From 5f8de423f190bbb79a62f804151bc24824fa32d8 Mon Sep 17 00:00:00 2001 From: "Matt A. Tobin" Date: Fri, 2 Feb 2018 04:16:08 -0500 Subject: Add m-esr52 at 52.6.0 --- python/PyECC/MANIFEST.in | 1 + python/PyECC/README.md | 29 + python/PyECC/ecc/Key.py | 320 + python/PyECC/ecc/Rabbit.py | 270 + python/PyECC/ecc/SecurityViolationException.py | 2 + python/PyECC/ecc/__init__.py | 0 python/PyECC/ecc/curves.py | 81 + python/PyECC/ecc/eccrypt.py | 65 + python/PyECC/ecc/ecdsa.py | 153 + python/PyECC/ecc/elliptic.py | 381 + python/PyECC/ecc/encoding.py | 178 + python/PyECC/ecc/performance.py | 50 + python/PyECC/ecc/primes.py | 82 + python/PyECC/ecc/shacrypt.py | 38 + python/PyECC/setup.py | 77 + python/README | 21 + python/altgraph/MANIFEST.in | 9 + python/altgraph/PKG-INFO | 216 + python/altgraph/README.txt | 6 + python/altgraph/altgraph/Dot.py | 299 + python/altgraph/altgraph/Graph.py | 677 + python/altgraph/altgraph/GraphAlgo.py | 147 + python/altgraph/altgraph/GraphStat.py | 73 + python/altgraph/altgraph/GraphUtil.py | 137 + python/altgraph/altgraph/ObjectGraph.py | 202 + python/altgraph/altgraph/__init__.py | 135 + python/altgraph/altgraph_tests/__init__.py | 1 + python/altgraph/altgraph_tests/test_altgraph.py | 45 + python/altgraph/altgraph_tests/test_dot.py | 370 + python/altgraph/altgraph_tests/test_graph.py | 644 + python/altgraph/altgraph_tests/test_graphstat.py | 70 + python/altgraph/altgraph_tests/test_graphutil.py | 140 + .../altgraph/altgraph_tests/test_object_graph.py | 349 + python/altgraph/doc/Makefile | 89 + .../altgraph/doc/_build/doctrees/changelog.doctree | Bin 0 -> 31601 bytes python/altgraph/doc/_build/doctrees/core.doctree | Bin 0 -> 9753 bytes python/altgraph/doc/_build/doctrees/dot.doctree | Bin 0 -> 51820 bytes .../doc/_build/doctrees/environment.pickle | Bin 0 -> 1265957 bytes python/altgraph/doc/_build/doctrees/graph.doctree | Bin 0 -> 88126 bytes .../altgraph/doc/_build/doctrees/graphalgo.doctree | Bin 0 -> 9685 bytes .../altgraph/doc/_build/doctrees/graphstat.doctree | Bin 0 -> 9392 bytes .../altgraph/doc/_build/doctrees/graphutil.doctree | Bin 0 -> 20473 bytes python/altgraph/doc/_build/doctrees/index.doctree | Bin 0 -> 9378 bytes .../altgraph/doc/_build/doctrees/license.doctree | Bin 0 -> 5203 bytes .../doc/_build/doctrees/objectgraph.doctree | Bin 0 -> 45814 bytes python/altgraph/doc/_build/html/.buildinfo | 4 + .../doc/_build/html/_sources/changelog.txt | 176 + python/altgraph/doc/_build/html/_sources/core.txt | 26 + python/altgraph/doc/_build/html/_sources/dot.txt | 224 + python/altgraph/doc/_build/html/_sources/graph.txt | 299 + .../doc/_build/html/_sources/graphalgo.txt | 26 + .../doc/_build/html/_sources/graphstat.txt | 25 + .../doc/_build/html/_sources/graphutil.txt | 55 + python/altgraph/doc/_build/html/_sources/index.txt | 41 + .../altgraph/doc/_build/html/_sources/license.txt | 25 + .../doc/_build/html/_sources/objectgraph.txt | 134 + .../doc/_build/html/_static/ajax-loader.gif | Bin 0 -> 673 bytes python/altgraph/doc/_build/html/_static/basic.css | 537 + .../doc/_build/html/_static/comment-bright.png | Bin 0 -> 3500 bytes .../doc/_build/html/_static/comment-close.png | Bin 0 -> 3578 bytes .../altgraph/doc/_build/html/_static/comment.png | Bin 0 -> 3445 bytes .../altgraph/doc/_build/html/_static/doctools.js | 238 + .../doc/_build/html/_static/down-pressed.png | Bin 0 -> 368 bytes python/altgraph/doc/_build/html/_static/down.png | Bin 0 -> 363 bytes python/altgraph/doc/_build/html/_static/file.png | Bin 0 -> 392 bytes python/altgraph/doc/_build/html/_static/jquery.js | 2 + python/altgraph/doc/_build/html/_static/minus.png | Bin 0 -> 199 bytes python/altgraph/doc/_build/html/_static/nature.css | 245 + python/altgraph/doc/_build/html/_static/plus.png | Bin 0 -> 199 bytes .../altgraph/doc/_build/html/_static/pygments.css | 62 + .../doc/_build/html/_static/searchtools.js | 622 + .../altgraph/doc/_build/html/_static/underscore.js | 31 + .../doc/_build/html/_static/up-pressed.png | Bin 0 -> 372 bytes python/altgraph/doc/_build/html/_static/up.png | Bin 0 -> 363 bytes .../altgraph/doc/_build/html/_static/websupport.js | 808 + python/altgraph/doc/_build/html/changelog.html | 271 + python/altgraph/doc/_build/html/core.html | 130 + python/altgraph/doc/_build/html/dot.html | 332 + python/altgraph/doc/_build/html/genindex.html | 604 + python/altgraph/doc/_build/html/graph.html | 491 + python/altgraph/doc/_build/html/graphalgo.html | 134 + python/altgraph/doc/_build/html/graphstat.html | 130 + python/altgraph/doc/_build/html/graphutil.html | 162 + python/altgraph/doc/_build/html/index.html | 142 + python/altgraph/doc/_build/html/license.html | 136 + python/altgraph/doc/_build/html/objectgraph.html | 283 + python/altgraph/doc/_build/html/objects.inv | Bin 0 -> 954 bytes python/altgraph/doc/_build/html/py-modindex.html | 139 + python/altgraph/doc/_build/html/search.html | 105 + python/altgraph/doc/_build/html/searchindex.js | 1 + python/altgraph/doc/changelog.rst | 185 + python/altgraph/doc/conf.py | 209 + python/altgraph/doc/core.rst | 26 + python/altgraph/doc/dot.rst | 224 + python/altgraph/doc/graph.rst | 305 + python/altgraph/doc/graphalgo.rst | 26 + python/altgraph/doc/graphstat.rst | 25 + python/altgraph/doc/graphutil.rst | 55 + python/altgraph/doc/index.rst | 41 + python/altgraph/doc/license.rst | 25 + python/altgraph/doc/objectgraph.rst | 146 + python/altgraph/setup.cfg | 36 + python/altgraph/setup.py | 867 + python/bitstring/PKG-INFO | 122 + python/bitstring/README.txt | 99 + python/bitstring/bitstring.py | 4234 ++++ python/bitstring/doc/bitstring_manual.pdf | Bin 0 -> 439327 bytes python/bitstring/release_notes.txt | 1523 ++ python/bitstring/setup.py | 44 + python/bitstring/test/smalltestfile | 1 + python/bitstring/test/test.m1v | Bin 0 -> 125300 bytes python/bitstring/test/test_bitarray.py | 310 + python/bitstring/test/test_bits.py | 378 + python/bitstring/test/test_bitstore.py | 37 + python/bitstring/test/test_bitstream.py | 3940 ++++ python/bitstring/test/test_bitstring.py | 97 + python/bitstring/test/test_constbitstream.py | 121 + python/blessings/LICENSE | 19 + python/blessings/MANIFEST.in | 3 + python/blessings/PKG-INFO | 426 + python/blessings/README.rst | 399 + python/blessings/blessings/__init__.py | 450 + python/blessings/blessings/tests.py | 231 + python/blessings/setup.cfg | 5 + python/blessings/setup.py | 42 + python/blessings/tox.ini | 7 + python/compare-locales/compare_locales/__init__.py | 1 + python/compare-locales/compare_locales/checks.py | 438 + python/compare-locales/compare_locales/commands.py | 154 + python/compare-locales/compare_locales/compare.py | 638 + python/compare-locales/compare_locales/parser.py | 521 + python/compare-locales/compare_locales/paths.py | 398 + .../compare_locales/tests/__init__.py | 49 + .../tests/data/bug121341.properties | 68 + .../compare_locales/tests/data/test.properties | 14 + .../compare_locales/tests/data/triple-license.dtd | 38 + .../compare_locales/tests/test_checks.py | 403 + .../compare_locales/tests/test_compare.py | 90 + .../compare_locales/tests/test_dtd.py | 86 + .../compare_locales/tests/test_ini.py | 115 + .../compare_locales/tests/test_merge.py | 265 + .../compare_locales/tests/test_properties.py | 95 + .../compare_locales/tests/test_util.py | 29 + .../compare_locales/tests/test_webapps.py | 41 + python/compare-locales/compare_locales/util.py | 11 + python/compare-locales/compare_locales/webapps.py | 235 + python/compare-locales/docs/glossary.rst | 26 + python/compare-locales/docs/index.rst | 191 + python/compare-locales/mach_commands.py | 81 + python/compare-locales/moz.build | 16 + python/configobj/PKG-INFO | 47 + python/configobj/configobj.py | 2468 +++ python/configobj/setup.py | 83 + python/configobj/validate.py | 1450 ++ python/devtools/migrate-l10n/README.rst | 16 + python/devtools/migrate-l10n/migrate/__init__.py | 0 .../devtools/migrate-l10n/migrate/conf/bug1294186 | 22 + .../migrate-l10n/migrate/conf/bug1308500_1309191 | 97 + python/devtools/migrate-l10n/migrate/main.py | 261 + .../migrate-l10n/migrate/tests/__init__.py | 0 python/eme/gen-eme-voucher.py | 633 + python/futures/CHANGES | 89 + python/futures/LICENSE | 21 + python/futures/MANIFEST.in | 5 + python/futures/PKG-INFO | 16 + python/futures/concurrent/__init__.py | 3 + python/futures/concurrent/futures/__init__.py | 23 + python/futures/concurrent/futures/_base.py | 605 + python/futures/concurrent/futures/process.py | 359 + python/futures/concurrent/futures/thread.py | 134 + python/futures/crawl.py | 74 + python/futures/docs/Makefile | 88 + python/futures/docs/conf.py | 194 + python/futures/docs/index.rst | 347 + python/futures/docs/make.bat | 112 + python/futures/primes.py | 50 + python/futures/setup.cfg | 12 + python/futures/setup.py | 27 + python/futures/test_futures.py | 724 + python/futures/tox.ini | 8 + python/gdbpp/gdbpp/__init__.py | 28 + python/gdbpp/gdbpp/linkedlist.py | 49 + python/gdbpp/gdbpp/owningthread.py | 24 + python/gdbpp/gdbpp/smartptr.py | 55 + python/gdbpp/gdbpp/string.py | 19 + python/gdbpp/gdbpp/tarray.py | 30 + python/gdbpp/gdbpp/thashtable.py | 143 + python/jsmin/jsmin/__init__.py | 238 + python/jsmin/jsmin/test.py | 394 + python/jsmin/setup.cfg | 5 + python/jsmin/setup.py | 42 + python/lldbutils/README.txt | 221 + python/lldbutils/lldbutils/__init__.py | 13 + python/lldbutils/lldbutils/content.py | 21 + python/lldbutils/lldbutils/general.py | 105 + python/lldbutils/lldbutils/gfx.py | 130 + python/lldbutils/lldbutils/layout.py | 20 + python/lldbutils/lldbutils/utils.py | 70 + python/mach/README.rst | 13 + python/mach/bash-completion.sh | 29 + python/mach/docs/commands.rst | 145 + python/mach/docs/driver.rst | 51 + python/mach/docs/index.rst | 75 + python/mach/docs/logging.rst | 100 + python/mach/docs/settings.rst | 140 + python/mach/mach/__init__.py | 0 python/mach/mach/base.py | 46 + python/mach/mach/commands/__init__.py | 0 python/mach/mach/commands/commandinfo.py | 53 + python/mach/mach/commands/settings.py | 132 + python/mach/mach/config.py | 461 + python/mach/mach/decorators.py | 353 + python/mach/mach/dispatcher.py | 453 + python/mach/mach/locale/en_US/LC_MESSAGES/alias.mo | Bin 0 -> 193 bytes python/mach/mach/locale/en_US/LC_MESSAGES/alias.po | 9 + python/mach/mach/logging.py | 256 + python/mach/mach/main.py | 594 + python/mach/mach/mixin/__init__.py | 0 python/mach/mach/mixin/logging.py | 55 + python/mach/mach/mixin/process.py | 175 + python/mach/mach/registrar.py | 126 + python/mach/mach/terminal.py | 75 + python/mach/mach/test/__init__.py | 0 python/mach/mach/test/common.py | 45 + python/mach/mach/test/providers/__init__.py | 0 python/mach/mach/test/providers/basic.py | 23 + python/mach/mach/test/providers/conditions.py | 53 + .../mach/mach/test/providers/conditions_invalid.py | 16 + python/mach/mach/test/providers/throw.py | 29 + python/mach/mach/test/providers/throw2.py | 13 + python/mach/mach/test/test_conditions.py | 83 + python/mach/mach/test/test_config.py | 297 + python/mach/mach/test/test_dispatcher.py | 61 + python/mach/mach/test/test_entry_point.py | 61 + python/mach/mach/test/test_error_output.py | 39 + python/mach/mach/test/test_logger.py | 47 + python/mach/setup.py | 38 + python/mach_commands.py | 158 + python/macholib/MANIFEST.in | 8 + python/macholib/PKG-INFO | 275 + python/macholib/README.txt | 8 + python/macholib/doc/MachO.rst | 19 + python/macholib/doc/MachoOGraph.rst | 14 + python/macholib/doc/MachoOStandalone.rst | 13 + python/macholib/doc/Makefile | 130 + python/macholib/doc/SymbolTable.rst | 24 + python/macholib/doc/_build/doctrees/MachO.doctree | Bin 0 -> 7229 bytes .../doc/_build/doctrees/MachoOGraph.doctree | Bin 0 -> 6109 bytes .../doc/_build/doctrees/MachoOStandalone.doctree | Bin 0 -> 7099 bytes .../doc/_build/doctrees/SymbolTable.doctree | Bin 0 -> 8138 bytes .../macholib/doc/_build/doctrees/changelog.doctree | Bin 0 -> 45133 bytes python/macholib/doc/_build/doctrees/dyld.doctree | Bin 0 -> 42349 bytes python/macholib/doc/_build/doctrees/dylib.doctree | Bin 0 -> 9129 bytes .../doc/_build/doctrees/environment.pickle | Bin 0 -> 1281784 bytes .../macholib/doc/_build/doctrees/framework.doctree | Bin 0 -> 9557 bytes python/macholib/doc/_build/doctrees/index.doctree | Bin 0 -> 11612 bytes .../macholib/doc/_build/doctrees/license.doctree | Bin 0 -> 4977 bytes .../macholib/doc/_build/doctrees/macho_o.doctree | Bin 0 -> 5186 bytes python/macholib/doc/_build/doctrees/ptypes.doctree | Bin 0 -> 44118 bytes .../macholib/doc/_build/doctrees/scripts.doctree | Bin 0 -> 5399 bytes python/macholib/doc/_build/html/.buildinfo | 4 + python/macholib/doc/_build/html/MachO.html | 132 + python/macholib/doc/_build/html/MachoOGraph.html | 125 + .../macholib/doc/_build/html/MachoOStandalone.html | 125 + python/macholib/doc/_build/html/SymbolTable.html | 136 + python/macholib/doc/_build/html/_sources/MachO.txt | 19 + .../doc/_build/html/_sources/MachoOGraph.txt | 14 + .../doc/_build/html/_sources/MachoOStandalone.txt | 13 + .../doc/_build/html/_sources/SymbolTable.txt | 24 + .../doc/_build/html/_sources/changelog.txt | 242 + python/macholib/doc/_build/html/_sources/dyld.txt | 159 + python/macholib/doc/_build/html/_sources/dylib.txt | 33 + .../doc/_build/html/_sources/framework.txt | 34 + python/macholib/doc/_build/html/_sources/index.txt | 59 + .../macholib/doc/_build/html/_sources/license.txt | 23 + .../macholib/doc/_build/html/_sources/macho_o.txt | 13 + .../macholib/doc/_build/html/_sources/ptypes.txt | 157 + .../macholib/doc/_build/html/_sources/scripts.txt | 35 + .../doc/_build/html/_static/ajax-loader.gif | Bin 0 -> 673 bytes python/macholib/doc/_build/html/_static/basic.css | 537 + .../doc/_build/html/_static/comment-bright.png | Bin 0 -> 3500 bytes .../doc/_build/html/_static/comment-close.png | Bin 0 -> 3578 bytes .../macholib/doc/_build/html/_static/comment.png | Bin 0 -> 3445 bytes .../macholib/doc/_build/html/_static/doctools.js | 238 + .../doc/_build/html/_static/down-pressed.png | Bin 0 -> 368 bytes python/macholib/doc/_build/html/_static/down.png | Bin 0 -> 363 bytes python/macholib/doc/_build/html/_static/file.png | Bin 0 -> 392 bytes python/macholib/doc/_build/html/_static/jquery.js | 2 + python/macholib/doc/_build/html/_static/minus.png | Bin 0 -> 199 bytes python/macholib/doc/_build/html/_static/nature.css | 245 + python/macholib/doc/_build/html/_static/plus.png | Bin 0 -> 199 bytes .../macholib/doc/_build/html/_static/pygments.css | 62 + .../doc/_build/html/_static/searchtools.js | 622 + .../macholib/doc/_build/html/_static/underscore.js | 31 + .../doc/_build/html/_static/up-pressed.png | Bin 0 -> 372 bytes python/macholib/doc/_build/html/_static/up.png | Bin 0 -> 363 bytes .../macholib/doc/_build/html/_static/websupport.js | 808 + python/macholib/doc/_build/html/changelog.html | 385 + python/macholib/doc/_build/html/dyld.html | 267 + python/macholib/doc/_build/html/dylib.html | 145 + python/macholib/doc/_build/html/framework.html | 147 + python/macholib/doc/_build/html/genindex.html | 365 + python/macholib/doc/_build/html/index.html | 170 + python/macholib/doc/_build/html/license.html | 140 + python/macholib/doc/_build/html/macho_o.html | 122 + python/macholib/doc/_build/html/objects.inv | Bin 0 -> 692 bytes python/macholib/doc/_build/html/ptypes.html | 317 + python/macholib/doc/_build/html/py-modindex.html | 154 + python/macholib/doc/_build/html/scripts.html | 156 + python/macholib/doc/_build/html/search.html | 105 + python/macholib/doc/_build/html/searchindex.js | 1 + python/macholib/doc/changelog.rst | 242 + python/macholib/doc/conf.py | 275 + python/macholib/doc/dyld.rst | 159 + python/macholib/doc/dylib.rst | 33 + python/macholib/doc/framework.rst | 34 + python/macholib/doc/index.rst | 59 + python/macholib/doc/license.rst | 23 + python/macholib/doc/macho_o.rst | 13 + python/macholib/doc/ptypes.rst | 157 + python/macholib/doc/scripts.rst | 35 + python/macholib/macholib/MachO.py | 398 + python/macholib/macholib/MachOGraph.py | 131 + python/macholib/macholib/MachOStandalone.py | 147 + python/macholib/macholib/SymbolTable.py | 95 + python/macholib/macholib/__init__.py | 10 + python/macholib/macholib/__main__.py | 73 + python/macholib/macholib/_cmdline.py | 44 + python/macholib/macholib/dyld.py | 176 + python/macholib/macholib/dylib.py | 42 + python/macholib/macholib/framework.py | 42 + python/macholib/macholib/itergraphreport.py | 73 + python/macholib/macholib/mach_o.py | 1311 ++ python/macholib/macholib/macho_dump.py | 48 + python/macholib/macholib/macho_find.py | 17 + python/macholib/macholib/macho_standalone.py | 26 + python/macholib/macholib/ptypes.py | 290 + python/macholib/macholib/util.py | 245 + python/macholib/macholib_tests/__init__.py | 1 + .../macholib/macholib_tests/binaries/src/build.py | 22 + python/macholib/macholib_tests/test_MachO.py | 15 + python/macholib/macholib_tests/test_MachOGraph.py | 15 + .../macholib_tests/test_MachOStandalone.py | 15 + python/macholib/macholib_tests/test_SymbolTable.py | 15 + .../macholib/macholib_tests/test_command_line.py | 147 + python/macholib/macholib_tests/test_dyld.py | 450 + python/macholib/macholib_tests/test_dylib.py | 38 + python/macholib/macholib_tests/test_framework.py | 88 + .../macholib_tests/test_itergraphreport.py | 15 + python/macholib/macholib_tests/test_mach_o.py | 21 + python/macholib/macholib_tests/test_ptypes.py | 191 + python/macholib/setup.cfg | 42 + python/macholib/setup.py | 867 + python/mock-1.0.0/LICENSE.txt | 26 + python/mock-1.0.0/MANIFEST.in | 2 + python/mock-1.0.0/PKG-INFO | 208 + python/mock-1.0.0/README.txt | 177 + python/mock-1.0.0/docs/changelog.txt | 725 + python/mock-1.0.0/docs/compare.txt | 628 + python/mock-1.0.0/docs/conf.py | 209 + python/mock-1.0.0/docs/examples.txt | 1063 + python/mock-1.0.0/docs/getting-started.txt | 479 + python/mock-1.0.0/docs/helpers.txt | 583 + python/mock-1.0.0/docs/index.txt | 411 + python/mock-1.0.0/docs/magicmock.txt | 258 + python/mock-1.0.0/docs/mock.txt | 842 + python/mock-1.0.0/docs/patch.txt | 636 + python/mock-1.0.0/docs/sentinel.txt | 58 + python/mock-1.0.0/html/.doctrees/changelog.doctree | Bin 0 -> 282659 bytes python/mock-1.0.0/html/.doctrees/compare.doctree | Bin 0 -> 56915 bytes python/mock-1.0.0/html/.doctrees/examples.doctree | Bin 0 -> 167478 bytes .../html/.doctrees/getting-started.doctree | Bin 0 -> 70942 bytes python/mock-1.0.0/html/.doctrees/index.doctree | Bin 0 -> 98784 bytes python/mock-1.0.0/html/.doctrees/magicmock.doctree | Bin 0 -> 75713 bytes python/mock-1.0.0/html/.doctrees/mock.doctree | Bin 0 -> 152111 bytes .../html/.doctrees/mocksignature.doctree | Bin 0 -> 42324 bytes python/mock-1.0.0/html/.doctrees/patch.doctree | Bin 0 -> 123511 bytes python/mock-1.0.0/html/.doctrees/sentinel.doctree | Bin 0 -> 10632 bytes python/mock-1.0.0/html/_sources/changelog.txt | 725 + python/mock-1.0.0/html/_sources/compare.txt | 628 + python/mock-1.0.0/html/_sources/examples.txt | 1063 + .../mock-1.0.0/html/_sources/getting-started.txt | 479 + python/mock-1.0.0/html/_sources/index.txt | 411 + python/mock-1.0.0/html/_sources/magicmock.txt | 258 + python/mock-1.0.0/html/_sources/mock.txt | 842 + python/mock-1.0.0/html/_sources/mocksignature.txt | 262 + python/mock-1.0.0/html/_sources/patch.txt | 636 + python/mock-1.0.0/html/_sources/sentinel.txt | 58 + python/mock-1.0.0/html/_static/adctheme.css | 757 + python/mock-1.0.0/html/_static/basic.css | 540 + .../html/_static/breadcrumb_background.png | Bin 0 -> 136 bytes python/mock-1.0.0/html/_static/default.css | 256 + python/mock-1.0.0/html/_static/doctools.js | 247 + python/mock-1.0.0/html/_static/documentation.png | Bin 0 -> 412 bytes python/mock-1.0.0/html/_static/file.png | Bin 0 -> 392 bytes python/mock-1.0.0/html/_static/header_sm_mid.png | Bin 0 -> 159 bytes python/mock-1.0.0/html/_static/jquery.js | 154 + python/mock-1.0.0/html/_static/minus.png | Bin 0 -> 199 bytes python/mock-1.0.0/html/_static/mobile.css | 17 + python/mock-1.0.0/html/_static/plus.png | Bin 0 -> 199 bytes python/mock-1.0.0/html/_static/pygments.css | 62 + python/mock-1.0.0/html/_static/scrn1.png | Bin 0 -> 108046 bytes python/mock-1.0.0/html/_static/scrn2.png | Bin 0 -> 121395 bytes .../html/_static/searchfield_leftcap.png | Bin 0 -> 855 bytes .../mock-1.0.0/html/_static/searchfield_repeat.png | Bin 0 -> 158 bytes .../html/_static/searchfield_rightcap.png | Bin 0 -> 530 bytes python/mock-1.0.0/html/_static/searchtools.js | 560 + python/mock-1.0.0/html/_static/sidebar.js | 148 + .../mock-1.0.0/html/_static/title_background.png | Bin 0 -> 132 bytes python/mock-1.0.0/html/_static/toc.js | 20 + python/mock-1.0.0/html/_static/triangle_closed.png | Bin 0 -> 181 bytes python/mock-1.0.0/html/_static/triangle_left.png | Bin 0 -> 195 bytes python/mock-1.0.0/html/_static/triangle_open.png | Bin 0 -> 191 bytes python/mock-1.0.0/html/_static/underscore.js | 23 + python/mock-1.0.0/html/changelog.html | 839 + python/mock-1.0.0/html/compare.html | 672 + python/mock-1.0.0/html/examples.html | 1006 + python/mock-1.0.0/html/genindex.html | 479 + python/mock-1.0.0/html/getting-started.html | 510 + python/mock-1.0.0/html/index.html | 529 + python/mock-1.0.0/html/magicmock.html | 347 + python/mock-1.0.0/html/mock.html | 875 + python/mock-1.0.0/html/mocksignature.html | 352 + python/mock-1.0.0/html/objects.inv | Bin 0 -> 711 bytes python/mock-1.0.0/html/output.txt | 126 + python/mock-1.0.0/html/patch.html | 648 + python/mock-1.0.0/html/search.html | 99 + python/mock-1.0.0/html/searchindex.js | 1 + python/mock-1.0.0/html/sentinel.html | 156 + python/mock-1.0.0/mock.py | 2356 ++ python/mock-1.0.0/setup.cfg | 12 + python/mock-1.0.0/setup.py | 72 + python/mock-1.0.0/tests/__init__.py | 3 + python/mock-1.0.0/tests/_testwith.py | 181 + python/mock-1.0.0/tests/support.py | 41 + python/mock-1.0.0/tests/support_with.py | 93 + python/mock-1.0.0/tests/testcallable.py | 158 + python/mock-1.0.0/tests/testhelpers.py | 940 + python/mock-1.0.0/tests/testmagicmethods.py | 486 + python/mock-1.0.0/tests/testmock.py | 1351 ++ python/mock-1.0.0/tests/testpatch.py | 1790 ++ python/mock-1.0.0/tests/testsentinel.py | 33 + python/mock-1.0.0/tests/testwith.py | 16 + python/mock-1.0.0/tox.ini | 40 + python/moz.build | 88 + python/mozboot/README.rst | 19 + python/mozboot/bin/bootstrap-msys2.vbs | 116 + python/mozboot/bin/bootstrap.py | 170 + python/mozboot/mozboot/__init__.py | 0 python/mozboot/mozboot/android.py | 270 + python/mozboot/mozboot/archlinux.py | 223 + python/mozboot/mozboot/base.py | 452 + python/mozboot/mozboot/bootstrap.py | 437 + python/mozboot/mozboot/centosfedora.py | 153 + python/mozboot/mozboot/debian.py | 188 + python/mozboot/mozboot/freebsd.py | 63 + python/mozboot/mozboot/gentoo.py | 33 + python/mozboot/mozboot/mach_commands.py | 67 + python/mozboot/mozboot/mozillabuild.py | 77 + python/mozboot/mozboot/openbsd.py | 45 + python/mozboot/mozboot/osx.py | 577 + python/mozboot/mozboot/util.py | 20 + python/mozboot/mozboot/windows.py | 95 + python/mozboot/setup.py | 16 + python/mozboot/support/ConEmu.xml | 897 + python/mozbuild/TODO | 3 + python/mozbuild/dumbmake/__init__.py | 0 python/mozbuild/dumbmake/dumbmake.py | 122 + python/mozbuild/dumbmake/test/__init__.py | 0 python/mozbuild/dumbmake/test/test_dumbmake.py | 106 + python/mozbuild/mozbuild/__init__.py | 0 python/mozbuild/mozbuild/action/__init__.py | 0 python/mozbuild/mozbuild/action/buildlist.py | 52 + python/mozbuild/mozbuild/action/cl.py | 124 + python/mozbuild/mozbuild/action/dump_env.py | 10 + python/mozbuild/mozbuild/action/explode_aar.py | 72 + python/mozbuild/mozbuild/action/file_generate.py | 108 + .../mozbuild/action/generate_browsersearch.py | 131 + .../mozbuild/action/generate_searchjson.py | 23 + .../mozbuild/action/generate_suggestedsites.py | 147 + .../mozbuild/action/generate_symbols_file.py | 91 + python/mozbuild/mozbuild/action/jar_maker.py | 17 + python/mozbuild/mozbuild/action/make_dmg.py | 37 + .../mozbuild/action/output_searchplugins_list.py | 21 + .../mozbuild/mozbuild/action/package_fennec_apk.py | 150 + python/mozbuild/mozbuild/action/preprocessor.py | 18 + .../mozbuild/action/process_define_files.py | 94 + .../mozbuild/action/process_install_manifest.py | 120 + python/mozbuild/mozbuild/action/test_archive.py | 565 + python/mozbuild/mozbuild/action/webidl.py | 19 + python/mozbuild/mozbuild/action/xpccheck.py | 83 + python/mozbuild/mozbuild/action/xpidl-process.py | 94 + python/mozbuild/mozbuild/action/zip.py | 39 + python/mozbuild/mozbuild/android_version_code.py | 167 + python/mozbuild/mozbuild/artifacts.py | 1089 + python/mozbuild/mozbuild/backend/__init__.py | 26 + .../mozbuild/mozbuild/backend/android_eclipse.py | 267 + python/mozbuild/mozbuild/backend/base.py | 317 + python/mozbuild/mozbuild/backend/common.py | 567 + .../mozbuild/mozbuild/backend/configenvironment.py | 199 + python/mozbuild/mozbuild/backend/cpp_eclipse.py | 698 + python/mozbuild/mozbuild/backend/fastermake.py | 165 + python/mozbuild/mozbuild/backend/mach_commands.py | 132 + python/mozbuild/mozbuild/backend/recursivemake.py | 1513 ++ .../backend/templates/android_eclipse/.classpath | 10 + .../com.android.ide.eclipse.adt.ApkBuilder.launch | 8 + ...droid.ide.eclipse.adt.PreCompilerBuilder.launch | 8 + ...d.ide.eclipse.adt.ResourceManagerBuilder.launch | 8 + .../org.eclipse.jdt.core.javabuilder.launch | 8 + .../templates/android_eclipse/AndroidManifest.xml | 11 + .../backend/templates/android_eclipse/gen/tmp | 1 + .../backend/templates/android_eclipse/lint.xml | 5 + .../templates/android_eclipse/project.properties | 14 + .../.not_an_android_resource | 5 + python/mozbuild/mozbuild/backend/tup.py | 344 + python/mozbuild/mozbuild/backend/visualstudio.py | 582 + python/mozbuild/mozbuild/base.py | 850 + python/mozbuild/mozbuild/codecoverage/__init__.py | 0 .../mozbuild/mozbuild/codecoverage/chrome_map.py | 105 + python/mozbuild/mozbuild/codecoverage/packager.py | 43 + python/mozbuild/mozbuild/compilation/__init__.py | 0 .../mozbuild/mozbuild/compilation/codecomplete.py | 63 + python/mozbuild/mozbuild/compilation/database.py | 252 + python/mozbuild/mozbuild/compilation/util.py | 54 + python/mozbuild/mozbuild/compilation/warnings.py | 376 + python/mozbuild/mozbuild/config_status.py | 182 + python/mozbuild/mozbuild/configure/__init__.py | 935 + .../mozbuild/configure/check_debug_ranges.py | 62 + python/mozbuild/mozbuild/configure/constants.py | 103 + python/mozbuild/mozbuild/configure/help.py | 45 + python/mozbuild/mozbuild/configure/libstdcxx.py | 81 + python/mozbuild/mozbuild/configure/lint.py | 78 + python/mozbuild/mozbuild/configure/lint_util.py | 52 + python/mozbuild/mozbuild/configure/options.py | 485 + python/mozbuild/mozbuild/configure/util.py | 226 + python/mozbuild/mozbuild/controller/__init__.py | 0 python/mozbuild/mozbuild/controller/building.py | 680 + python/mozbuild/mozbuild/controller/clobber.py | 237 + python/mozbuild/mozbuild/doctor.py | 293 + python/mozbuild/mozbuild/dotproperties.py | 83 + python/mozbuild/mozbuild/frontend/__init__.py | 0 python/mozbuild/mozbuild/frontend/context.py | 2292 ++ python/mozbuild/mozbuild/frontend/data.py | 1113 + python/mozbuild/mozbuild/frontend/emitter.py | 1416 ++ python/mozbuild/mozbuild/frontend/gyp_reader.py | 248 + python/mozbuild/mozbuild/frontend/mach_commands.py | 218 + python/mozbuild/mozbuild/frontend/reader.py | 1408 ++ python/mozbuild/mozbuild/frontend/sandbox.py | 308 + python/mozbuild/mozbuild/html_build_viewer.py | 120 + python/mozbuild/mozbuild/jar.py | 597 + .../mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mo | Bin 0 -> 301 bytes .../mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po | 8 + python/mozbuild/mozbuild/mach_commands.py | 1603 ++ python/mozbuild/mozbuild/makeutil.py | 186 + python/mozbuild/mozbuild/milestone.py | 75 + python/mozbuild/mozbuild/mozconfig.py | 485 + python/mozbuild/mozbuild/mozconfig_loader | 80 + python/mozbuild/mozbuild/mozinfo.py | 160 + python/mozbuild/mozbuild/preprocessor.py | 805 + python/mozbuild/mozbuild/pythonutil.py | 25 + .../resources/html-build-viewer/index.html | 475 + python/mozbuild/mozbuild/shellutil.py | 209 + python/mozbuild/mozbuild/sphinx.py | 200 + python/mozbuild/mozbuild/test/__init__.py | 0 .../test/action/data/invalid/region.properties | 12 + .../data/package_fennec_apk/assets/asset.txt | 1 + .../action/data/package_fennec_apk/classes.dex | 1 + .../test/action/data/package_fennec_apk/input1.ap_ | Bin 0 -> 503 bytes .../data/package_fennec_apk/input1/res/res.txt | 1 + .../data/package_fennec_apk/input1/resources.arsc | 1 + .../test/action/data/package_fennec_apk/input2.apk | Bin 0 -> 1649 bytes .../package_fennec_apk/input2/assets/asset.txt | 1 + .../data/package_fennec_apk/input2/assets/omni.ja | 1 + .../data/package_fennec_apk/input2/classes.dex | 1 + .../data/package_fennec_apk/input2/lib/lib.txt | 1 + .../data/package_fennec_apk/input2/res/res.txt | 1 + .../data/package_fennec_apk/input2/resources.arsc | 1 + .../data/package_fennec_apk/input2/root_file.txt | 1 + .../action/data/package_fennec_apk/lib/lib.txt | 1 + .../test/action/data/package_fennec_apk/omni.ja | 1 + .../action/data/package_fennec_apk/root_file.txt | 1 + .../test/action/data/valid-zh-CN/region.properties | 37 + .../mozbuild/test/action/test_buildlist.py | 89 + .../test/action/test_generate_browsersearch.py | 55 + .../test/action/test_package_fennec_apk.py | 70 + python/mozbuild/mozbuild/test/backend/__init__.py | 0 python/mozbuild/mozbuild/test/backend/common.py | 156 + .../library1/resources/values/strings.xml | 1 + .../data/android_eclipse/main1/AndroidManifest.xml | 1 + .../data/android_eclipse/main2/AndroidManifest.xml | 1 + .../data/android_eclipse/main2/assets/dummy.txt | 1 + .../backend/data/android_eclipse/main2/extra.jar | 1 + .../android_eclipse/main2/res/values/strings.xml | 1 + .../data/android_eclipse/main3/AndroidManifest.xml | 1 + .../backend/data/android_eclipse/main3/a/A.java | 1 + .../backend/data/android_eclipse/main3/b/B.java | 1 + .../backend/data/android_eclipse/main3/c/C.java | 1 + .../test/backend/data/android_eclipse/main4 | 1 + .../test/backend/data/android_eclipse/moz.build | 37 + .../backend/data/android_eclipse/subdir/moz.build | 13 + .../subdir/submain/AndroidManifest.xml | 1 + .../backend/data/binary-components/bar/moz.build | 2 + .../backend/data/binary-components/foo/moz.build | 1 + .../test/backend/data/binary-components/moz.build | 10 + .../test/backend/data/branding-files/bar.ico | 0 .../test/backend/data/branding-files/foo.ico | 0 .../test/backend/data/branding-files/moz.build | 12 + .../test/backend/data/branding-files/sub/quux.png | 0 .../mozbuild/test/backend/data/build/app/moz.build | 54 + .../mozbuild/test/backend/data/build/bar.ini | 1 + .../mozbuild/test/backend/data/build/bar.js | 2 + .../mozbuild/test/backend/data/build/bar.jsm | 1 + .../mozbuild/test/backend/data/build/baz.ini | 2 + .../mozbuild/test/backend/data/build/baz.jsm | 2 + .../test/backend/data/build/components.manifest | 2 + .../mozbuild/test/backend/data/build/foo.css | 2 + .../mozbuild/test/backend/data/build/foo.ini | 1 + .../mozbuild/test/backend/data/build/foo.js | 1 + .../mozbuild/test/backend/data/build/foo.jsm | 1 + .../mozbuild/test/backend/data/build/jar.mn | 11 + .../mozbuild/test/backend/data/build/moz.build | 68 + .../mozbuild/test/backend/data/build/prefs.js | 1 + .../mozbuild/test/backend/data/build/qux.ini | 5 + .../mozbuild/test/backend/data/build/qux.jsm | 5 + .../mozbuild/test/backend/data/build/resource | 1 + .../mozbuild/test/backend/data/build/resource2 | 1 + .../mozbuild/test/backend/data/build/subdir/bar.js | 1 + .../mozbuild/test/backend/data/defines/moz.build | 14 + .../test/backend/data/dist-files/install.rdf | 0 .../mozbuild/test/backend/data/dist-files/main.js | 0 .../test/backend/data/dist-files/moz.build | 8 + .../test/backend/data/exports-generated/dom1.h | 0 .../test/backend/data/exports-generated/foo.h | 0 .../test/backend/data/exports-generated/gfx.h | 0 .../test/backend/data/exports-generated/moz.build | 12 + .../test/backend/data/exports-generated/mozilla1.h | 0 .../mozbuild/test/backend/data/exports/dom1.h | 0 .../mozbuild/test/backend/data/exports/dom2.h | 0 .../mozbuild/test/backend/data/exports/foo.h | 0 .../mozbuild/test/backend/data/exports/gfx.h | 0 .../mozbuild/test/backend/data/exports/moz.build | 8 + .../mozbuild/test/backend/data/exports/mozilla1.h | 0 .../mozbuild/test/backend/data/exports/mozilla2.h | 0 .../mozbuild/test/backend/data/exports/pprio.h | 0 .../test/backend/data/final_target/both/moz.build | 6 + .../data/final_target/dist-subdir/moz.build | 5 + .../data/final_target/final-target/moz.build | 5 + .../test/backend/data/final_target/moz.build | 5 + .../backend/data/final_target/xpi-name/moz.build | 5 + .../test/backend/data/generated-files/foo-data | 0 .../backend/data/generated-files/generate-bar.py | 0 .../backend/data/generated-files/generate-foo.py | 0 .../test/backend/data/generated-files/moz.build | 12 + .../test/backend/data/generated_includes/moz.build | 5 + .../test/backend/data/host-defines/moz.build | 14 + .../data/install_substitute_config_files/moz.build | 6 + .../install_substitute_config_files/sub/foo.h.in | 1 + .../install_substitute_config_files/sub/moz.build | 7 + .../test/backend/data/ipdl_sources/bar/moz.build | 10 + .../test/backend/data/ipdl_sources/foo/moz.build | 10 + .../test/backend/data/ipdl_sources/moz.build | 10 + .../test/backend/data/jar-manifests/moz.build | 8 + .../bar/baz/dummy_file_for_nonempty_directory | 0 .../foo/dummy_file_for_nonempty_directory | 0 .../test/backend/data/local_includes/moz.build | 5 + .../test/backend/data/resources/bar.res.in | 0 .../test/backend/data/resources/cursor.cur | 0 .../test/backend/data/resources/desktop1.ttf | 0 .../test/backend/data/resources/desktop2.ttf | 0 .../test/backend/data/resources/extra.manifest | 0 .../mozbuild/test/backend/data/resources/font1.ttf | 0 .../mozbuild/test/backend/data/resources/font2.ttf | 0 .../mozbuild/test/backend/data/resources/foo.res | 0 .../test/backend/data/resources/mobile.ttf | 0 .../mozbuild/test/backend/data/resources/moz.build | 9 + .../test/backend/data/resources/test.manifest | 0 .../mozbuild/test/backend/data/sdk-files/bar.ico | 0 .../mozbuild/test/backend/data/sdk-files/foo.ico | 0 .../mozbuild/test/backend/data/sdk-files/moz.build | 11 + .../test/backend/data/sdk-files/sub/quux.png | 0 .../mozbuild/test/backend/data/sources/bar.c | 0 .../mozbuild/test/backend/data/sources/bar.cpp | 0 .../mozbuild/test/backend/data/sources/bar.mm | 0 .../mozbuild/test/backend/data/sources/bar.s | 0 .../mozbuild/test/backend/data/sources/baz.S | 0 .../mozbuild/test/backend/data/sources/foo.S | 0 .../mozbuild/test/backend/data/sources/foo.asm | 0 .../mozbuild/test/backend/data/sources/foo.c | 0 .../mozbuild/test/backend/data/sources/foo.cpp | 0 .../mozbuild/test/backend/data/sources/foo.mm | 0 .../mozbuild/test/backend/data/sources/moz.build | 21 + .../mozbuild/test/backend/data/stub0/Makefile.in | 4 + .../test/backend/data/stub0/dir1/Makefile.in | 7 + .../test/backend/data/stub0/dir1/moz.build | 5 + .../test/backend/data/stub0/dir2/moz.build | 4 + .../test/backend/data/stub0/dir3/Makefile.in | 7 + .../test/backend/data/stub0/dir3/moz.build | 4 + .../mozbuild/test/backend/data/stub0/moz.build | 7 + .../data/substitute_config_files/Makefile.in | 0 .../backend/data/substitute_config_files/foo.in | 1 + .../backend/data/substitute_config_files/moz.build | 5 + .../child/another-file.sjs | 0 .../test-manifest-shared-support/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-support/child/test_sub.js | 0 .../test-manifest-shared-support/mochitest.ini | 8 + .../data/test-manifest-shared-support/moz.build | 5 + .../test-manifest-shared-support/support-file.txt | 0 .../data/test-manifest-shared-support/test_foo.js | 0 .../mochitest1.ini | 4 + .../mochitest2.ini | 4 + .../moz.build | 7 + .../test_bar.js | 0 .../test_foo.js | 0 .../instrumentation.ini | 1 + .../test-manifests-package-tests/mochitest.ini | 1 + .../data/test-manifests-package-tests/mochitest.js | 0 .../data/test-manifests-package-tests/moz.build | 10 + .../test-manifests-package-tests/not_packaged.java | 0 .../data/test-manifests-written/dir1/test_bar.js | 0 .../data/test-manifests-written/dir1/xpcshell.ini | 3 + .../data/test-manifests-written/mochitest.ini | 3 + .../data/test-manifests-written/mochitest.js | 0 .../backend/data/test-manifests-written/moz.build | 9 + .../data/test-manifests-written/xpcshell.ini | 4 + .../data/test-manifests-written/xpcshell.js | 0 .../mozbuild/test/backend/data/test_config/file.in | 3 + .../test/backend/data/test_config/moz.build | 3 + .../backend/data/variable_passthru/Makefile.in | 0 .../test/backend/data/variable_passthru/moz.build | 23 + .../test/backend/data/variable_passthru/test1.c | 0 .../test/backend/data/variable_passthru/test1.cpp | 0 .../test/backend/data/variable_passthru/test1.mm | 0 .../test/backend/data/variable_passthru/test2.c | 0 .../test/backend/data/variable_passthru/test2.cpp | 0 .../test/backend/data/variable_passthru/test2.mm | 0 .../test/backend/data/visual-studio/dir1/bar.cpp | 0 .../test/backend/data/visual-studio/dir1/foo.cpp | 0 .../test/backend/data/visual-studio/dir1/moz.build | 9 + .../test/backend/data/visual-studio/moz.build | 7 + .../data/xpidl/config/makefiles/xpidl/Makefile.in | 0 .../mozbuild/test/backend/data/xpidl/moz.build | 6 + .../mozbuild/test/backend/test_android_eclipse.py | 153 + .../mozbuild/mozbuild/test/backend/test_build.py | 233 + .../test/backend/test_configenvironment.py | 63 + .../mozbuild/test/backend/test_recursivemake.py | 942 + .../mozbuild/test/backend/test_visualstudio.py | 64 + python/mozbuild/mozbuild/test/common.py | 50 + .../mozbuild/mozbuild/test/compilation/__init__.py | 0 .../mozbuild/test/compilation/test_warnings.py | 241 + python/mozbuild/mozbuild/test/configure/common.py | 279 + .../test/configure/data/decorators.configure | 44 + .../mozbuild/test/configure/data/empty_mozconfig | 0 .../mozbuild/test/configure/data/extra.configure | 13 + .../test/configure/data/imply_option/imm.configure | 32 + .../configure/data/imply_option/infer.configure | 24 + .../configure/data/imply_option/infer_ko.configure | 31 + .../configure/data/imply_option/negative.configure | 34 + .../configure/data/imply_option/simple.configure | 24 + .../configure/data/imply_option/values.configure | 24 + .../test/configure/data/included.configure | 53 + .../mozbuild/test/configure/data/moz.configure | 174 + .../test/configure/data/set_config.configure | 43 + .../test/configure/data/set_define.configure | 43 + .../test/configure/data/subprocess.configure | 23 + python/mozbuild/mozbuild/test/configure/lint.py | 65 + .../test/configure/test_checks_configure.py | 940 + .../mozbuild/test/configure/test_compile_checks.py | 403 + .../mozbuild/test/configure/test_configure.py | 1273 ++ .../mozbuild/mozbuild/test/configure/test_lint.py | 132 + .../mozbuild/test/configure/test_moz_configure.py | 93 + .../mozbuild/test/configure/test_options.py | 852 + .../test/configure/test_toolchain_configure.py | 1271 ++ .../test/configure/test_toolchain_helpers.py | 437 + .../test/configure/test_toolkit_moz_configure.py | 67 + .../mozbuild/mozbuild/test/configure/test_util.py | 558 + .../mozbuild/mozbuild/test/controller/__init__.py | 0 .../mozbuild/test/controller/test_ccachestats.py | 208 + .../mozbuild/test/controller/test_clobber.py | 213 + python/mozbuild/mozbuild/test/data/Makefile | 0 python/mozbuild/mozbuild/test/data/bad.properties | 12 + .../mozbuild/mozbuild/test/data/test-dir/Makefile | 0 .../mozbuild/test/data/test-dir/with/Makefile | 0 .../test/data/test-dir/with/without/with/Makefile | 0 .../test/data/test-dir/without/with/Makefile | 0 .../mozbuild/mozbuild/test/data/valid.properties | 11 + python/mozbuild/mozbuild/test/frontend/__init__.py | 0 .../test/frontend/data/android-res-dirs/dir1/foo | 0 .../test/frontend/data/android-res-dirs/moz.build | 9 + .../frontend/data/binary-components/bar/moz.build | 2 + .../frontend/data/binary-components/foo/moz.build | 1 + .../test/frontend/data/binary-components/moz.build | 10 + .../test/frontend/data/branding-files/bar.ico | 0 .../test/frontend/data/branding-files/baz.png | 0 .../test/frontend/data/branding-files/foo.xpm | 0 .../test/frontend/data/branding-files/moz.build | 13 + .../test/frontend/data/branding-files/quux.icns | 0 .../data/config-file-substitution/moz.build | 6 + .../crate-dependency-path-resolution/Cargo.toml | 18 + .../crate-dependency-path-resolution/moz.build | 18 + .../shallow/Cargo.toml | 6 + .../the/depths/Cargo.toml | 9 + .../mozbuild/test/frontend/data/defines/moz.build | 14 + .../frontend/data/dist-files-missing/install.rdf | 0 .../frontend/data/dist-files-missing/moz.build | 8 + .../test/frontend/data/dist-files/install.rdf | 0 .../mozbuild/test/frontend/data/dist-files/main.js | 0 .../test/frontend/data/dist-files/moz.build | 8 + .../test/frontend/data/exports-generated/foo.h | 0 .../test/frontend/data/exports-generated/moz.build | 8 + .../frontend/data/exports-generated/mozilla1.h | 0 .../frontend/data/exports-missing-generated/foo.h | 0 .../data/exports-missing-generated/moz.build | 5 + .../test/frontend/data/exports-missing/foo.h | 0 .../test/frontend/data/exports-missing/moz.build | 6 + .../test/frontend/data/exports-missing/mozilla1.h | 0 .../mozbuild/test/frontend/data/exports/bar.h | 0 .../mozbuild/test/frontend/data/exports/baz.h | 0 .../mozbuild/test/frontend/data/exports/dom1.h | 0 .../mozbuild/test/frontend/data/exports/dom2.h | 0 .../mozbuild/test/frontend/data/exports/dom3.h | 0 .../mozbuild/test/frontend/data/exports/foo.h | 0 .../mozbuild/test/frontend/data/exports/gfx.h | 0 .../mozbuild/test/frontend/data/exports/mem.h | 0 .../mozbuild/test/frontend/data/exports/mem2.h | 0 .../mozbuild/test/frontend/data/exports/moz.build | 13 + .../mozbuild/test/frontend/data/exports/mozilla1.h | 0 .../mozbuild/test/frontend/data/exports/mozilla2.h | 0 .../mozbuild/test/frontend/data/exports/pprio.h | 0 .../mozbuild/test/frontend/data/exports/pprthred.h | 0 .../bug_component/bad-assignment/moz.build | 2 + .../bug_component/different-matchers/moz.build | 4 + .../data/files-info/bug_component/final/moz.build | 3 + .../bug_component/final/subcomponent/moz.build | 2 + .../data/files-info/bug_component/moz.build | 2 + .../data/files-info/bug_component/simple/moz.build | 2 + .../data/files-info/bug_component/static/moz.build | 5 + .../test/frontend/data/files-info/moz.build | 0 .../data/files-test-metadata/default/module.js | 0 .../data/files-test-metadata/default/moz.build | 6 + .../default/tests/reftests/reftest-stylo.list | 2 + .../default/tests/reftests/reftest.list | 1 + .../default/tests/reftests/test1-ref.html | 0 .../default/tests/reftests/test1.html | 0 .../default/tests/xpcshell/test_default_mod.js | 0 .../default/tests/xpcshell/xpcshell.ini | 1 + .../frontend/data/files-test-metadata/moz.build | 4 + .../data/files-test-metadata/simple/base.cpp | 0 .../files-test-metadata/simple/browser/browser.ini | 1 + .../files-test-metadata/simple/browser/test_mod.js | 0 .../data/files-test-metadata/simple/moz.build | 22 + .../data/files-test-metadata/simple/src/module.jsm | 0 .../data/files-test-metadata/simple/src/moz.build | 3 + .../files-test-metadata/simple/tests/mochitest.ini | 2 + .../files-test-metadata/simple/tests/moz.build | 1 + .../simple/tests/test_general.html | 0 .../simple/tests/test_specific.html | 0 .../data/files-test-metadata/tagged/moz.build | 15 + .../data/files-test-metadata/tagged/src/bar.jsm | 0 .../tagged/src/submodule/foo.js | 0 .../files-test-metadata/tagged/tests/mochitest.ini | 3 + .../files-test-metadata/tagged/tests/test_bar.js | 0 .../tagged/tests/test_simple.html | 0 .../tagged/tests/test_specific.html | 0 .../files-test-metadata/tagged/tests/xpcshell.ini | 1 + .../final-target-pp-files-non-srcdir/moz.build | 7 + .../data/generated-files-absolute-script/moz.build | 9 + .../data/generated-files-absolute-script/script.py | 0 .../data/generated-files-method-names/moz.build | 13 + .../data/generated-files-method-names/script.py | 0 .../data/generated-files-no-inputs/moz.build | 9 + .../data/generated-files-no-inputs/script.py | 0 .../generated-files-no-python-script/moz.build | 8 + .../generated-files-no-python-script/script.rb | 0 .../data/generated-files-no-script/moz.build | 8 + .../test/frontend/data/generated-files/moz.build | 5 + .../test/frontend/data/generated-sources/a.cpp | 0 .../test/frontend/data/generated-sources/b.cc | 0 .../test/frontend/data/generated-sources/c.cxx | 0 .../test/frontend/data/generated-sources/d.c | 0 .../test/frontend/data/generated-sources/e.m | 0 .../test/frontend/data/generated-sources/f.mm | 0 .../test/frontend/data/generated-sources/g.S | 0 .../test/frontend/data/generated-sources/h.s | 0 .../test/frontend/data/generated-sources/i.asm | 0 .../test/frontend/data/generated-sources/moz.build | 37 + .../frontend/data/generated_includes/moz.build | 5 + .../test/frontend/data/host-defines/moz.build | 14 + .../mozbuild/test/frontend/data/host-sources/a.cpp | 0 .../mozbuild/test/frontend/data/host-sources/b.cc | 0 .../mozbuild/test/frontend/data/host-sources/c.cxx | 0 .../mozbuild/test/frontend/data/host-sources/d.c | 0 .../mozbuild/test/frontend/data/host-sources/e.mm | 0 .../mozbuild/test/frontend/data/host-sources/f.mm | 0 .../test/frontend/data/host-sources/moz.build | 25 + .../frontend/data/include-basic/included.build | 4 + .../test/frontend/data/include-basic/moz.build | 7 + .../data/include-file-stack/included-1.build | 4 + .../data/include-file-stack/included-2.build | 4 + .../frontend/data/include-file-stack/moz.build | 5 + .../test/frontend/data/include-missing/moz.build | 5 + .../data/include-outside-topsrcdir/relative.build | 4 + .../include-relative-from-child/child/child.build | 4 + .../include-relative-from-child/child/child2.build | 4 + .../child/grandchild/grandchild.build | 4 + .../data/include-relative-from-child/parent.build | 4 + .../data/include-topsrcdir-relative/moz.build | 5 + .../data/include-topsrcdir-relative/sibling.build | 4 + .../data/inheriting-variables/bar/moz.build | 5 + .../data/inheriting-variables/foo/baz/moz.build | 7 + .../data/inheriting-variables/foo/moz.build | 7 + .../frontend/data/inheriting-variables/moz.build | 10 + .../test/frontend/data/ipdl_sources/bar/moz.build | 10 + .../test/frontend/data/ipdl_sources/foo/moz.build | 10 + .../test/frontend/data/ipdl_sources/moz.build | 10 + .../data/jar-manifests-multiple-files/moz.build | 8 + .../test/frontend/data/jar-manifests/moz.build | 7 + .../frontend/data/library-defines/liba/moz.build | 5 + .../frontend/data/library-defines/libb/moz.build | 7 + .../frontend/data/library-defines/libc/moz.build | 5 + .../frontend/data/library-defines/libd/moz.build | 5 + .../test/frontend/data/library-defines/moz.build | 9 + .../bar/baz/dummy_file_for_nonempty_directory | 0 .../foo/dummy_file_for_nonempty_directory | 0 .../test/frontend/data/local_includes/moz.build | 5 + .../frontend/data/missing-local-includes/moz.build | 5 + .../data/multiple-rust-libraries/moz.build | 27 + .../data/multiple-rust-libraries/rust1/Cargo.toml | 15 + .../data/multiple-rust-libraries/rust1/moz.build | 4 + .../data/multiple-rust-libraries/rust2/Cargo.toml | 15 + .../data/multiple-rust-libraries/rust2/moz.build | 4 + .../mozbuild/test/frontend/data/program/moz.build | 15 + .../frontend/data/reader-error-bad-dir/moz.build | 5 + .../frontend/data/reader-error-basic/moz.build | 5 + .../data/reader-error-empty-list/moz.build | 5 + .../data/reader-error-error-func/moz.build | 6 + .../data/reader-error-included-from/child.build | 4 + .../data/reader-error-included-from/moz.build | 5 + .../data/reader-error-missing-include/moz.build | 5 + .../data/reader-error-outside-topsrcdir/moz.build | 5 + .../reader-error-read-unknown-global/moz.build | 5 + .../data/reader-error-repeated-dir/moz.build | 7 + .../data/reader-error-script-error/moz.build | 5 + .../frontend/data/reader-error-syntax/moz.build | 5 + .../data/reader-error-write-bad-value/moz.build | 5 + .../reader-error-write-unknown-global/moz.build | 7 + .../reader-relevant-mozbuild/d1/every-level/a/file | 0 .../d1/every-level/a/moz.build | 0 .../reader-relevant-mozbuild/d1/every-level/b/file | 0 .../d1/every-level/b/moz.build | 0 .../d1/every-level/moz.build | 0 .../data/reader-relevant-mozbuild/d1/file1 | 0 .../data/reader-relevant-mozbuild/d1/file2 | 0 .../data/reader-relevant-mozbuild/d1/moz.build | 0 .../d1/no-intermediate-moz-build/child/file | 0 .../d1/no-intermediate-moz-build/child/moz.build | 0 .../d1/parent-is-far/dir1/dir2/dir3/file | 0 .../d1/parent-is-far/moz.build | 0 .../data/reader-relevant-mozbuild/d2/dir1/file | 0 .../reader-relevant-mozbuild/d2/dir1/moz.build | 0 .../data/reader-relevant-mozbuild/d2/dir2/file | 0 .../reader-relevant-mozbuild/d2/dir2/moz.build | 0 .../data/reader-relevant-mozbuild/d2/moz.build | 0 .../frontend/data/reader-relevant-mozbuild/file | 0 .../data/reader-relevant-mozbuild/moz.build | 0 .../data/rust-library-dash-folding/Cargo.toml | 15 + .../data/rust-library-dash-folding/moz.build | 18 + .../rust-library-invalid-crate-type/Cargo.toml | 15 + .../data/rust-library-invalid-crate-type/moz.build | 18 + .../data/rust-library-name-mismatch/Cargo.toml | 12 + .../data/rust-library-name-mismatch/moz.build | 18 + .../data/rust-library-no-cargo-toml/moz.build | 18 + .../data/rust-library-no-lib-section/Cargo.toml | 12 + .../data/rust-library-no-lib-section/moz.build | 18 + .../rust-library-no-profile-section/Cargo.toml | 12 + .../data/rust-library-no-profile-section/moz.build | 18 + .../data/rust-library-non-abort-panic/Cargo.toml | 14 + .../data/rust-library-non-abort-panic/moz.build | 18 + .../mozbuild/test/frontend/data/sdk-files/bar.ico | 0 .../mozbuild/test/frontend/data/sdk-files/baz.png | 0 .../mozbuild/test/frontend/data/sdk-files/foo.xpm | 0 .../test/frontend/data/sdk-files/moz.build | 12 + .../test/frontend/data/sdk-files/quux.icns | 0 .../mozbuild/test/frontend/data/sources-just-c/d.c | 0 .../mozbuild/test/frontend/data/sources-just-c/e.m | 0 .../mozbuild/test/frontend/data/sources-just-c/g.S | 0 .../mozbuild/test/frontend/data/sources-just-c/h.s | 0 .../test/frontend/data/sources-just-c/i.asm | 0 .../test/frontend/data/sources-just-c/moz.build | 27 + .../mozbuild/test/frontend/data/sources/a.cpp | 0 .../mozbuild/test/frontend/data/sources/b.cc | 0 .../mozbuild/test/frontend/data/sources/c.cxx | 0 .../mozbuild/test/frontend/data/sources/d.c | 0 .../mozbuild/test/frontend/data/sources/e.m | 0 .../mozbuild/test/frontend/data/sources/f.mm | 0 .../mozbuild/test/frontend/data/sources/g.S | 0 .../mozbuild/test/frontend/data/sources/h.s | 0 .../mozbuild/test/frontend/data/sources/i.asm | 0 .../mozbuild/test/frontend/data/sources/moz.build | 37 + .../frontend/data/templates/templates.mozbuild | 21 + .../data/test-harness-files-root/moz.build | 4 + .../frontend/data/test-harness-files/mochitest.ini | 1 + .../frontend/data/test-harness-files/mochitest.py | 1 + .../frontend/data/test-harness-files/moz.build | 7 + .../frontend/data/test-harness-files/runtests.py | 1 + .../test/frontend/data/test-harness-files/utils.py | 1 + .../data/test-install-shared-lib/moz.build | 12 + .../data/test-linkables-cxx-link/moz.build | 11 + .../data/test-linkables-cxx-link/one/foo.cpp | 0 .../data/test-linkables-cxx-link/one/moz.build | 9 + .../data/test-linkables-cxx-link/three/moz.build | 5 + .../data/test-linkables-cxx-link/two/foo.c | 0 .../data/test-linkables-cxx-link/two/moz.build | 9 + .../absolute-support.ini | 4 + .../data/test-manifest-absolute-support/foo.txt | 1 + .../data/test-manifest-absolute-support/moz.build | 4 + .../test-manifest-absolute-support/test_file.js | 0 .../test/frontend/data/test-manifest-dupes/bar.js | 0 .../test/frontend/data/test-manifest-dupes/foo.js | 0 .../data/test-manifest-dupes/mochitest.ini | 7 + .../frontend/data/test-manifest-dupes/moz.build | 4 + .../frontend/data/test-manifest-dupes/test_baz.js | 0 .../included-reftest.list | 1 + .../data/test-manifest-emitted-includes/moz.build | 1 + .../reftest-stylo.list | 3 + .../test-manifest-emitted-includes/reftest.list | 2 + .../frontend/data/test-manifest-empty/empty.ini | 2 + .../frontend/data/test-manifest-empty/moz.build | 4 + .../test_inactive.html | 0 .../data/test-manifest-install-includes/common.ini | 1 + .../test-manifest-install-includes/mochitest.ini | 4 + .../data/test-manifest-install-includes/moz.build | 4 + .../test-manifest-install-includes/test_foo.html | 1 + .../data/test-manifest-install-subdir/moz.build | 4 + .../data/test-manifest-install-subdir/subdir.ini | 5 + .../test-manifest-install-subdir/test_foo.html | 1 + .../data/test-manifest-just-support/foo.txt | 1 + .../test-manifest-just-support/just-support.ini | 2 + .../data/test-manifest-just-support/moz.build | 4 + .../a11y-support/dir1/bar | 0 .../test-manifest-keys-extracted/a11y-support/foo | 0 .../data/test-manifest-keys-extracted/a11y.ini | 4 + .../data/test-manifest-keys-extracted/browser.ini | 4 + .../data/test-manifest-keys-extracted/chrome.ini | 4 + .../test-manifest-keys-extracted/crashtest.list | 1 + .../data/test-manifest-keys-extracted/metro.ini | 3 + .../test-manifest-keys-extracted/mochitest.ini | 5 + .../data/test-manifest-keys-extracted/moz.build | 12 + .../reftest-stylo.list | 2 + .../data/test-manifest-keys-extracted/reftest.list | 1 + .../data/test-manifest-keys-extracted/test_a11y.js | 0 .../test-manifest-keys-extracted/test_browser.js | 0 .../test-manifest-keys-extracted/test_chrome.js | 0 .../data/test-manifest-keys-extracted/test_foo.py | 0 .../test-manifest-keys-extracted/test_metro.js | 0 .../test-manifest-keys-extracted/test_mochitest.js | 0 .../test-manifest-keys-extracted/test_xpcshell.js | 0 .../data/test-manifest-keys-extracted/xpcshell.ini | 6 + .../data/test-manifest-missing-manifest/moz.build | 4 + .../moz.build | 4 + .../xpcshell.ini | 4 + .../test-manifest-missing-test-file/mochitest.ini | 1 + .../data/test-manifest-missing-test-file/moz.build | 4 + .../child/mochitest.ini | 4 + .../child/test_foo.js | 0 .../moz.build | 4 + .../support-file.txt | 0 .../child/another-file.sjs | 0 .../test-manifest-shared-missing/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-missing/child/test_sub.js | 0 .../test-manifest-shared-missing/mochitest.ini | 9 + .../data/test-manifest-shared-missing/moz.build | 5 + .../test-manifest-shared-missing/support-file.txt | 0 .../data/test-manifest-shared-missing/test_foo.js | 0 .../child/another-file.sjs | 0 .../test-manifest-shared-support/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-support/child/test_sub.js | 0 .../test-manifest-shared-support/mochitest.ini | 8 + .../data/test-manifest-shared-support/moz.build | 5 + .../test-manifest-shared-support/support-file.txt | 0 .../data/test-manifest-shared-support/test_foo.js | 0 .../test-manifest-unmatched-generated/moz.build | 4 + .../test-manifest-unmatched-generated/test.ini | 4 + .../test-manifest-unmatched-generated/test_foo | 0 .../data/test-python-unit-test-missing/moz.build | 4 + .../moz.build | 10 + .../frontend/data/test-symbols-file-objdir/foo.py | 0 .../data/test-symbols-file-objdir/moz.build | 13 + .../frontend/data/test-symbols-file/foo.symbols | 1 + .../test/frontend/data/test-symbols-file/moz.build | 10 + .../frontend/data/traversal-all-vars/moz.build | 6 + .../data/traversal-all-vars/parallel/moz.build | 0 .../data/traversal-all-vars/regular/moz.build | 0 .../data/traversal-all-vars/test/moz.build | 0 .../data/traversal-outside-topsrcdir/moz.build | 5 + .../data/traversal-relative-dirs/bar/moz.build | 0 .../data/traversal-relative-dirs/foo/moz.build | 5 + .../data/traversal-relative-dirs/moz.build | 5 + .../data/traversal-repeated-dirs/bar/moz.build | 5 + .../data/traversal-repeated-dirs/foo/moz.build | 5 + .../data/traversal-repeated-dirs/moz.build | 5 + .../frontend/data/traversal-simple/bar/moz.build | 0 .../data/traversal-simple/foo/biz/moz.build | 0 .../frontend/data/traversal-simple/foo/moz.build | 2 + .../test/frontend/data/traversal-simple/moz.build | 5 + .../data/unified-sources-non-unified/bar.cxx | 0 .../frontend/data/unified-sources-non-unified/c1.c | 0 .../frontend/data/unified-sources-non-unified/c2.c | 0 .../data/unified-sources-non-unified/foo.cpp | 0 .../data/unified-sources-non-unified/moz.build | 28 + .../data/unified-sources-non-unified/objc1.mm | 0 .../data/unified-sources-non-unified/objc2.mm | 0 .../data/unified-sources-non-unified/quux.cc | 0 .../test/frontend/data/unified-sources/bar.cxx | 0 .../test/frontend/data/unified-sources/c1.c | 0 .../test/frontend/data/unified-sources/c2.c | 0 .../test/frontend/data/unified-sources/foo.cpp | 0 .../test/frontend/data/unified-sources/moz.build | 28 + .../test/frontend/data/unified-sources/objc1.mm | 0 .../test/frontend/data/unified-sources/objc2.mm | 0 .../test/frontend/data/unified-sources/quux.cc | 0 .../mozbuild/test/frontend/data/use-yasm/moz.build | 5 + .../test/frontend/data/variable-passthru/bans.S | 0 .../test/frontend/data/variable-passthru/moz.build | 25 + .../test/frontend/data/variable-passthru/test1.c | 0 .../test/frontend/data/variable-passthru/test1.cpp | 0 .../test/frontend/data/variable-passthru/test1.mm | 0 .../test/frontend/data/variable-passthru/test2.c | 0 .../test/frontend/data/variable-passthru/test2.cpp | 0 .../test/frontend/data/variable-passthru/test2.mm | 0 .../data/xpidl-module-no-sources/moz.build | 5 + .../mozbuild/test/frontend/test_context.py | 721 + .../mozbuild/test/frontend/test_emitter.py | 1172 + .../mozbuild/test/frontend/test_namespaces.py | 207 + .../mozbuild/mozbuild/test/frontend/test_reader.py | 485 + .../mozbuild/test/frontend/test_sandbox.py | 534 + .../mozbuild/test/test_android_version_code.py | 63 + python/mozbuild/mozbuild/test/test_base.py | 410 + python/mozbuild/mozbuild/test/test_containers.py | 224 + .../mozbuild/mozbuild/test/test_dotproperties.py | 178 + python/mozbuild/mozbuild/test/test_expression.py | 82 + python/mozbuild/mozbuild/test/test_jarmaker.py | 367 + python/mozbuild/mozbuild/test/test_line_endings.py | 46 + python/mozbuild/mozbuild/test/test_makeutil.py | 165 + python/mozbuild/mozbuild/test/test_mozconfig.py | 489 + python/mozbuild/mozbuild/test/test_mozinfo.py | 278 + python/mozbuild/mozbuild/test/test_preprocessor.py | 646 + python/mozbuild/mozbuild/test/test_pythonutil.py | 23 + python/mozbuild/mozbuild/test/test_testing.py | 332 + python/mozbuild/mozbuild/test/test_util.py | 924 + python/mozbuild/mozbuild/testing.py | 535 + python/mozbuild/mozbuild/util.py | 1264 ++ python/mozbuild/mozbuild/vendor_rust.py | 86 + python/mozbuild/mozbuild/virtualenv.py | 568 + python/mozbuild/mozpack/__init__.py | 0 python/mozbuild/mozpack/archive.py | 107 + python/mozbuild/mozpack/chrome/__init__.py | 0 python/mozbuild/mozpack/chrome/flags.py | 258 + python/mozbuild/mozpack/chrome/manifest.py | 368 + python/mozbuild/mozpack/copier.py | 568 + python/mozbuild/mozpack/dmg.py | 121 + python/mozbuild/mozpack/errors.py | 139 + python/mozbuild/mozpack/executables.py | 124 + python/mozbuild/mozpack/files.py | 1106 + python/mozbuild/mozpack/hg.py | 95 + python/mozbuild/mozpack/manifests.py | 419 + python/mozbuild/mozpack/mozjar.py | 816 + python/mozbuild/mozpack/packager/__init__.py | 408 + python/mozbuild/mozpack/packager/formats.py | 324 + python/mozbuild/mozpack/packager/l10n.py | 259 + python/mozbuild/mozpack/packager/unpack.py | 202 + python/mozbuild/mozpack/path.py | 136 + python/mozbuild/mozpack/test/__init__.py | 0 python/mozbuild/mozpack/test/data/test_data | 1 + .../mozpack/test/support/minify_js_verify.py | 17 + python/mozbuild/mozpack/test/test_archive.py | 190 + python/mozbuild/mozpack/test/test_chrome_flags.py | 148 + .../mozbuild/mozpack/test/test_chrome_manifest.py | 149 + python/mozbuild/mozpack/test/test_copier.py | 529 + python/mozbuild/mozpack/test/test_errors.py | 93 + python/mozbuild/mozpack/test/test_files.py | 1160 + python/mozbuild/mozpack/test/test_manifests.py | 375 + python/mozbuild/mozpack/test/test_mozjar.py | 342 + python/mozbuild/mozpack/test/test_packager.py | 490 + .../mozbuild/mozpack/test/test_packager_formats.py | 428 + python/mozbuild/mozpack/test/test_packager_l10n.py | 126 + .../mozbuild/mozpack/test/test_packager_unpack.py | 65 + python/mozbuild/mozpack/test/test_path.py | 143 + python/mozbuild/mozpack/test/test_unify.py | 199 + python/mozbuild/mozpack/unify.py | 231 + python/mozbuild/setup.py | 29 + python/mozlint/mozlint/__init__.py | 7 + python/mozlint/mozlint/cli.py | 115 + python/mozlint/mozlint/errors.py | 25 + python/mozlint/mozlint/formatters/__init__.py | 25 + python/mozlint/mozlint/formatters/stylish.py | 122 + python/mozlint/mozlint/formatters/treeherder.py | 31 + python/mozlint/mozlint/parser.py | 85 + python/mozlint/mozlint/pathutils.py | 156 + python/mozlint/mozlint/result.py | 88 + python/mozlint/mozlint/roller.py | 154 + python/mozlint/mozlint/types.py | 142 + python/mozlint/mozlint/vcs.py | 62 + python/mozlint/setup.py | 26 + python/mozlint/test/__init__.py | 0 python/mozlint/test/conftest.py | 42 + python/mozlint/test/files/foobar.js | 2 + python/mozlint/test/files/foobar.py | 2 + python/mozlint/test/files/no_foobar.js | 2 + python/mozlint/test/linters/badreturncode.lint | 21 + python/mozlint/test/linters/explicit_path.lint | 13 + python/mozlint/test/linters/external.lint | 30 + python/mozlint/test/linters/invalid_exclude.lint | 10 + python/mozlint/test/linters/invalid_extension.lnt | 9 + python/mozlint/test/linters/invalid_include.lint | 10 + python/mozlint/test/linters/invalid_type.lint | 9 + python/mozlint/test/linters/missing_attrs.lint | 7 + .../mozlint/test/linters/missing_definition.lint | 4 + python/mozlint/test/linters/raises.lint | 19 + python/mozlint/test/linters/regex.lint | 15 + python/mozlint/test/linters/string.lint | 15 + python/mozlint/test/linters/structured.lint | 28 + python/mozlint/test/test_formatters.py | 90 + python/mozlint/test/test_parser.py | 55 + python/mozlint/test/test_roller.py | 82 + python/mozlint/test/test_types.py | 50 + .../mozversioncontrol/__init__.py | 144 + .../mozversioncontrol/repoupdate.py | 40 + python/psutil/CREDITS | 310 + python/psutil/HISTORY.rst | 1018 + python/psutil/INSTALL.rst | 116 + python/psutil/LICENSE | 27 + python/psutil/MANIFEST.in | 22 + python/psutil/Makefile | 122 + python/psutil/PKG-INFO | 434 + python/psutil/README.rst | 386 + python/psutil/TODO | 167 + python/psutil/docs/Makefile | 177 + python/psutil/docs/README | 15 + python/psutil/docs/_static/copybutton.js | 57 + python/psutil/docs/_static/favicon.ico | Bin 0 -> 15086 bytes python/psutil/docs/_static/logo.png | Bin 0 -> 4922 bytes python/psutil/docs/_static/sidebar.js | 161 + python/psutil/docs/_template/globaltoc.html | 12 + python/psutil/docs/_template/indexcontent.html | 4 + python/psutil/docs/_template/indexsidebar.html | 8 + python/psutil/docs/_template/page.html | 66 + .../docs/_themes/pydoctheme/static/pydoctheme.css | 187 + python/psutil/docs/_themes/pydoctheme/theme.conf | 23 + python/psutil/docs/conf.py | 248 + python/psutil/docs/index.rst | 1400 ++ python/psutil/docs/make.bat | 242 + python/psutil/docs/xxx | 11 + python/psutil/examples/disk_usage.py | 62 + python/psutil/examples/free.py | 41 + python/psutil/examples/ifconfig.py | 78 + python/psutil/examples/iotop.py | 179 + python/psutil/examples/killall.py | 32 + python/psutil/examples/meminfo.py | 68 + python/psutil/examples/netstat.py | 64 + python/psutil/examples/nettop.py | 165 + python/psutil/examples/pidof.py | 53 + python/psutil/examples/pmap.py | 57 + python/psutil/examples/process_detail.py | 167 + python/psutil/examples/ps.py | 81 + python/psutil/examples/pstree.py | 71 + python/psutil/examples/top.py | 233 + python/psutil/examples/who.py | 33 + python/psutil/make.bat | 201 + python/psutil/psutil/__init__.py | 1887 ++ python/psutil/psutil/_common.py | 246 + python/psutil/psutil/_compat.py | 189 + python/psutil/psutil/_psbsd.py | 455 + python/psutil/psutil/_pslinux.py | 1206 + python/psutil/psutil/_psosx.py | 363 + python/psutil/psutil/_psposix.py | 156 + python/psutil/psutil/_pssunos.py | 553 + python/psutil/psutil/_psutil_bsd.c | 2296 ++ python/psutil/psutil/_psutil_bsd.h | 53 + python/psutil/psutil/_psutil_common.c | 37 + python/psutil/psutil/_psutil_common.h | 10 + python/psutil/psutil/_psutil_linux.c | 689 + python/psutil/psutil/_psutil_linux.h | 21 + python/psutil/psutil/_psutil_osx.c | 1808 ++ python/psutil/psutil/_psutil_osx.h | 41 + python/psutil/psutil/_psutil_posix.c | 531 + python/psutil/psutil/_psutil_posix.h | 15 + python/psutil/psutil/_psutil_sunos.c | 1389 ++ python/psutil/psutil/_psutil_sunos.h | 28 + python/psutil/psutil/_psutil_windows.c | 3405 +++ python/psutil/psutil/_psutil_windows.h | 68 + python/psutil/psutil/_pswindows.py | 548 + python/psutil/psutil/arch/bsd/process_info.c | 265 + python/psutil/psutil/arch/bsd/process_info.h | 15 + python/psutil/psutil/arch/osx/process_info.c | 281 + python/psutil/psutil/arch/osx/process_info.h | 16 + python/psutil/psutil/arch/windows/glpi.h | 41 + python/psutil/psutil/arch/windows/inet_ntop.c | 41 + python/psutil/psutil/arch/windows/inet_ntop.h | 10 + python/psutil/psutil/arch/windows/ntextapi.h | 228 + .../psutil/psutil/arch/windows/process_handles.c | 533 + .../psutil/psutil/arch/windows/process_handles.h | 113 + python/psutil/psutil/arch/windows/process_info.c | 435 + python/psutil/psutil/arch/windows/process_info.h | 26 + python/psutil/psutil/arch/windows/security.c | 228 + python/psutil/psutil/arch/windows/security.h | 17 + python/psutil/setup.cfg | 5 + python/psutil/setup.py | 206 + python/psutil/test/README.rst | 21 + python/psutil/test/_bsd.py | 252 + python/psutil/test/_linux.py | 473 + python/psutil/test/_osx.py | 160 + python/psutil/test/_posix.py | 258 + python/psutil/test/_sunos.py | 48 + python/psutil/test/_windows.py | 464 + python/psutil/test/test_memory_leaks.py | 445 + python/psutil/test/test_psutil.py | 3013 +++ python/psutil/tox.ini | 32 + python/py/AUTHORS | 24 + python/py/LICENSE | 19 + python/py/MANIFEST.in | 9 + python/py/PKG-INFO | 46 + python/py/README.txt | 21 + python/py/py/__init__.py | 150 + python/py/py/__metainfo.py | 2 + python/py/py/_apipkg.py | 181 + python/py/py/_builtin.py | 248 + python/py/py/_code/__init__.py | 1 + python/py/py/_code/_assertionnew.py | 339 + python/py/py/_code/_assertionold.py | 555 + python/py/py/_code/_py2traceback.py | 79 + python/py/py/_code/assertion.py | 94 + python/py/py/_code/code.py | 787 + python/py/py/_code/source.py | 419 + python/py/py/_error.py | 88 + python/py/py/_iniconfig.py | 162 + python/py/py/_io/__init__.py | 1 + python/py/py/_io/capture.py | 371 + python/py/py/_io/saferepr.py | 71 + python/py/py/_io/terminalwriter.py | 348 + python/py/py/_log/__init__.py | 2 + python/py/py/_log/log.py | 186 + python/py/py/_log/warning.py | 76 + python/py/py/_path/__init__.py | 1 + python/py/py/_path/cacheutil.py | 114 + python/py/py/_path/common.py | 403 + python/py/py/_path/local.py | 911 + python/py/py/_path/svnurl.py | 380 + python/py/py/_path/svnwc.py | 1240 ++ python/py/py/_process/__init__.py | 1 + python/py/py/_process/cmdexec.py | 49 + python/py/py/_process/forkedfunc.py | 120 + python/py/py/_process/killproc.py | 23 + python/py/py/_std.py | 18 + python/py/py/_xmlgen.py | 253 + python/py/py/test.py | 10 + python/py/setup.cfg | 11 + python/py/setup.py | 38 + python/pyasn1-modules/CHANGES | 45 + python/pyasn1-modules/LICENSE | 24 + python/pyasn1-modules/MANIFEST.in | 3 + python/pyasn1-modules/PKG-INFO | 26 + python/pyasn1-modules/README | 17 + python/pyasn1-modules/pyasn1_modules/__init__.py | 2 + python/pyasn1-modules/pyasn1_modules/pem.py | 51 + python/pyasn1-modules/pyasn1_modules/pkcs12.py | 34 + python/pyasn1-modules/pyasn1_modules/rfc1155.py | 73 + python/pyasn1-modules/pyasn1_modules/rfc1157.py | 90 + python/pyasn1-modules/pyasn1_modules/rfc1901.py | 15 + python/pyasn1-modules/pyasn1_modules/rfc1902.py | 105 + python/pyasn1-modules/pyasn1_modules/rfc1905.py | 100 + python/pyasn1-modules/pyasn1_modules/rfc2251.py | 319 + python/pyasn1-modules/pyasn1_modules/rfc2314.py | 33 + python/pyasn1-modules/pyasn1_modules/rfc2315.py | 205 + python/pyasn1-modules/pyasn1_modules/rfc2437.py | 53 + python/pyasn1-modules/pyasn1_modules/rfc2459.py | 903 + python/pyasn1-modules/pyasn1_modules/rfc2511.py | 176 + python/pyasn1-modules/pyasn1_modules/rfc2560.py | 171 + python/pyasn1-modules/pyasn1_modules/rfc3412.py | 38 + python/pyasn1-modules/pyasn1_modules/rfc3414.py | 17 + python/pyasn1-modules/pyasn1_modules/rfc3447.py | 35 + python/pyasn1-modules/pyasn1_modules/rfc4210.py | 695 + python/pyasn1-modules/pyasn1_modules/rfc5208.py | 39 + python/pyasn1-modules/setup.cfg | 5 + python/pyasn1-modules/setup.py | 89 + python/pyasn1-modules/test/cmp.sh | 93 + python/pyasn1-modules/test/crl.sh | 14 + python/pyasn1-modules/test/crmf.sh | 14 + python/pyasn1-modules/test/ocspreq.sh | 7 + python/pyasn1-modules/test/ocsprsp.sh | 26 + python/pyasn1-modules/test/pkcs1.sh | 26 + python/pyasn1-modules/test/pkcs10.sh | 24 + python/pyasn1-modules/test/pkcs7.sh | 63 + python/pyasn1-modules/test/pkcs8.sh | 26 + python/pyasn1-modules/test/x509dump.sh | 23 + python/pyasn1-modules/tools/cmpdump.py | 28 + python/pyasn1-modules/tools/crldump.py | 38 + python/pyasn1-modules/tools/crmfdump.py | 25 + python/pyasn1-modules/tools/ocspclient.py | 145 + python/pyasn1-modules/tools/ocspreqdump.py | 27 + python/pyasn1-modules/tools/ocsprspdump.py | 27 + python/pyasn1-modules/tools/ocspserver.py | 143 + python/pyasn1-modules/tools/pkcs10dump.py | 39 + python/pyasn1-modules/tools/pkcs1dump.py | 42 + python/pyasn1-modules/tools/pkcs7dump.py | 47 + python/pyasn1-modules/tools/pkcs8dump.py | 41 + python/pyasn1-modules/tools/snmpget.py | 37 + python/pyasn1-modules/tools/x509dump.py | 40 + python/pyasn1/CHANGES | 278 + python/pyasn1/LICENSE | 24 + python/pyasn1/MANIFEST.in | 3 + python/pyasn1/PKG-INFO | 26 + python/pyasn1/README | 68 + python/pyasn1/THANKS | 4 + python/pyasn1/TODO | 36 + python/pyasn1/doc/codecs.html | 503 + python/pyasn1/doc/constraints.html | 436 + python/pyasn1/doc/constructed.html | 377 + python/pyasn1/doc/intro.html | 156 + python/pyasn1/doc/pyasn1-tutorial.html | 2405 ++ python/pyasn1/doc/scalar.html | 794 + python/pyasn1/doc/tagging.html | 233 + python/pyasn1/pyasn1/__init__.py | 8 + python/pyasn1/pyasn1/codec/__init__.py | 1 + python/pyasn1/pyasn1/codec/ber/__init__.py | 1 + python/pyasn1/pyasn1/codec/ber/decoder.py | 808 + python/pyasn1/pyasn1/codec/ber/encoder.py | 353 + python/pyasn1/pyasn1/codec/ber/eoo.py | 8 + python/pyasn1/pyasn1/codec/cer/__init__.py | 1 + python/pyasn1/pyasn1/codec/cer/decoder.py | 35 + python/pyasn1/pyasn1/codec/cer/encoder.py | 87 + python/pyasn1/pyasn1/codec/der/__init__.py | 1 + python/pyasn1/pyasn1/codec/der/decoder.py | 9 + python/pyasn1/pyasn1/codec/der/encoder.py | 28 + python/pyasn1/pyasn1/compat/__init__.py | 1 + python/pyasn1/pyasn1/compat/octets.py | 20 + python/pyasn1/pyasn1/debug.py | 65 + python/pyasn1/pyasn1/error.py | 3 + python/pyasn1/pyasn1/type/__init__.py | 1 + python/pyasn1/pyasn1/type/base.py | 249 + python/pyasn1/pyasn1/type/char.py | 61 + python/pyasn1/pyasn1/type/constraint.py | 200 + python/pyasn1/pyasn1/type/error.py | 3 + python/pyasn1/pyasn1/type/namedtype.py | 132 + python/pyasn1/pyasn1/type/namedval.py | 46 + python/pyasn1/pyasn1/type/tag.py | 122 + python/pyasn1/pyasn1/type/tagmap.py | 52 + python/pyasn1/pyasn1/type/univ.py | 1042 + python/pyasn1/pyasn1/type/useful.py | 12 + python/pyasn1/setup.cfg | 5 + python/pyasn1/setup.py | 115 + python/pyasn1/test/__init__.py | 1 + python/pyasn1/test/codec/__init__.py | 1 + python/pyasn1/test/codec/ber/__init__.py | 1 + python/pyasn1/test/codec/ber/suite.py | 22 + python/pyasn1/test/codec/ber/test_decoder.py | 535 + python/pyasn1/test/codec/ber/test_encoder.py | 338 + python/pyasn1/test/codec/cer/__init__.py | 1 + python/pyasn1/test/codec/cer/suite.py | 22 + python/pyasn1/test/codec/cer/test_decoder.py | 31 + python/pyasn1/test/codec/cer/test_encoder.py | 107 + python/pyasn1/test/codec/der/__init__.py | 1 + python/pyasn1/test/codec/der/suite.py | 22 + python/pyasn1/test/codec/der/test_decoder.py | 20 + python/pyasn1/test/codec/der/test_encoder.py | 44 + python/pyasn1/test/codec/suite.py | 29 + python/pyasn1/test/suite.py | 26 + python/pyasn1/test/type/__init__.py | 1 + python/pyasn1/test/type/suite.py | 20 + python/pyasn1/test/type/test_constraint.py | 280 + python/pyasn1/test/type/test_namedtype.py | 87 + python/pyasn1/test/type/test_tag.py | 107 + python/pyasn1/test/type/test_univ.py | 479 + python/pylru/pylru.py | 556 + python/pylru/test.py | 238 + python/pystache/.gitignore | 17 + python/pystache/.gitmodules | 3 + python/pystache/.travis.yml | 14 + python/pystache/HISTORY.md | 169 + python/pystache/LICENSE | 22 + python/pystache/MANIFEST.in | 13 + python/pystache/README.md | 276 + python/pystache/TODO.md | 16 + python/pystache/gh/images/logo_phillips.png | Bin 0 -> 173595 bytes python/pystache/pystache/__init__.py | 13 + python/pystache/pystache/commands/__init__.py | 4 + python/pystache/pystache/commands/render.py | 95 + python/pystache/pystache/commands/test.py | 18 + python/pystache/pystache/common.py | 71 + python/pystache/pystache/context.py | 342 + python/pystache/pystache/defaults.py | 65 + python/pystache/pystache/init.py | 19 + python/pystache/pystache/loader.py | 170 + python/pystache/pystache/locator.py | 171 + python/pystache/pystache/parsed.py | 50 + python/pystache/pystache/parser.py | 378 + python/pystache/pystache/renderengine.py | 181 + python/pystache/pystache/renderer.py | 460 + python/pystache/pystache/specloader.py | 90 + python/pystache/pystache/template_spec.py | 53 + python/pystache/setup.py | 413 + python/pystache/setup_description.rst | 513 + python/pystache/test_pystache.py | 30 + python/pystache/tox.ini | 36 + python/pytest/.coveragerc | 7 + python/pytest/AUTHORS | 91 + python/pytest/LICENSE | 21 + python/pytest/MANIFEST.in | 34 + python/pytest/PKG-INFO | 133 + python/pytest/README.rst | 102 + python/pytest/_pytest/__init__.py | 2 + python/pytest/_pytest/_argcomplete.py | 101 + python/pytest/_pytest/_code/__init__.py | 12 + python/pytest/_pytest/_code/_py2traceback.py | 81 + python/pytest/_pytest/_code/code.py | 805 + python/pytest/_pytest/_code/source.py | 421 + python/pytest/_pytest/_pluggy.py | 11 + python/pytest/_pytest/assertion/__init__.py | 176 + python/pytest/_pytest/assertion/reinterpret.py | 407 + python/pytest/_pytest/assertion/rewrite.py | 885 + python/pytest/_pytest/assertion/util.py | 332 + python/pytest/_pytest/cacheprovider.py | 245 + python/pytest/_pytest/capture.py | 472 + python/pytest/_pytest/config.py | 1192 + python/pytest/_pytest/doctest.py | 290 + python/pytest/_pytest/genscript.py | 132 + python/pytest/_pytest/helpconfig.py | 139 + python/pytest/_pytest/hookspec.py | 295 + python/pytest/_pytest/junitxml.py | 387 + python/pytest/_pytest/main.py | 744 + python/pytest/_pytest/mark.py | 311 + python/pytest/_pytest/monkeypatch.py | 254 + python/pytest/_pytest/nose.py | 71 + python/pytest/_pytest/pastebin.py | 92 + python/pytest/_pytest/pdb.py | 109 + python/pytest/_pytest/pytester.py | 1110 + python/pytest/_pytest/python.py | 2300 ++ python/pytest/_pytest/recwarn.py | 221 + python/pytest/_pytest/resultlog.py | 104 + python/pytest/_pytest/runner.py | 515 + python/pytest/_pytest/skipping.py | 361 + python/pytest/_pytest/standalonetemplate.py | 89 + python/pytest/_pytest/terminal.py | 593 + python/pytest/_pytest/tmpdir.py | 123 + python/pytest/_pytest/unittest.py | 205 + python/pytest/_pytest/vendored_packages/README.md | 13 + .../pytest/_pytest/vendored_packages/__init__.py | 0 .../pluggy-0.3.1.dist-info/DESCRIPTION.rst | 10 + .../pluggy-0.3.1.dist-info/METADATA | 39 + .../pluggy-0.3.1.dist-info/RECORD | 8 + .../vendored_packages/pluggy-0.3.1.dist-info/WHEEL | 6 + .../pluggy-0.3.1.dist-info/metadata.json | 1 + .../pluggy-0.3.1.dist-info/pbr.json | 1 + .../pluggy-0.3.1.dist-info/top_level.txt | 1 + python/pytest/_pytest/vendored_packages/pluggy.py | 777 + python/pytest/pytest.py | 28 + python/pytest/setup.cfg | 19 + python/pytest/setup.py | 122 + python/pytoml/PKG-INFO | 10 + python/pytoml/pytoml/__init__.py | 3 + python/pytoml/pytoml/core.py | 13 + python/pytoml/pytoml/parser.py | 366 + python/pytoml/pytoml/writer.py | 120 + python/pytoml/setup.cfg | 5 + python/pytoml/setup.py | 17 + python/pytoml/test/test.py | 100 + python/pyyaml/CHANGES | 147 + python/pyyaml/LICENSE | 19 + python/pyyaml/PKG-INFO | 36 + python/pyyaml/README | 35 + python/pyyaml/examples/pygments-lexer/example.yaml | 302 + python/pyyaml/examples/pygments-lexer/yaml.py | 431 + python/pyyaml/examples/yaml-highlight/yaml_hl.cfg | 115 + python/pyyaml/examples/yaml-highlight/yaml_hl.py | 114 + python/pyyaml/ext/_yaml.c | 22190 +++++++++++++++++++ python/pyyaml/ext/_yaml.h | 23 + python/pyyaml/ext/_yaml.pxd | 251 + python/pyyaml/ext/_yaml.pyx | 1527 ++ python/pyyaml/lib/yaml/__init__.py | 315 + python/pyyaml/lib/yaml/composer.py | 139 + python/pyyaml/lib/yaml/constructor.py | 675 + python/pyyaml/lib/yaml/cyaml.py | 85 + python/pyyaml/lib/yaml/dumper.py | 62 + python/pyyaml/lib/yaml/emitter.py | 1140 + python/pyyaml/lib/yaml/error.py | 75 + python/pyyaml/lib/yaml/events.py | 86 + python/pyyaml/lib/yaml/loader.py | 40 + python/pyyaml/lib/yaml/nodes.py | 49 + python/pyyaml/lib/yaml/parser.py | 589 + python/pyyaml/lib/yaml/reader.py | 190 + python/pyyaml/lib/yaml/representer.py | 484 + python/pyyaml/lib/yaml/resolver.py | 224 + python/pyyaml/lib/yaml/scanner.py | 1457 ++ python/pyyaml/lib/yaml/serializer.py | 111 + python/pyyaml/lib/yaml/tokens.py | 104 + python/pyyaml/lib3/yaml/__init__.py | 312 + python/pyyaml/lib3/yaml/composer.py | 139 + python/pyyaml/lib3/yaml/constructor.py | 686 + python/pyyaml/lib3/yaml/cyaml.py | 85 + python/pyyaml/lib3/yaml/dumper.py | 62 + python/pyyaml/lib3/yaml/emitter.py | 1137 + python/pyyaml/lib3/yaml/error.py | 75 + python/pyyaml/lib3/yaml/events.py | 86 + python/pyyaml/lib3/yaml/loader.py | 40 + python/pyyaml/lib3/yaml/nodes.py | 49 + python/pyyaml/lib3/yaml/parser.py | 589 + python/pyyaml/lib3/yaml/reader.py | 192 + python/pyyaml/lib3/yaml/representer.py | 374 + python/pyyaml/lib3/yaml/resolver.py | 224 + python/pyyaml/lib3/yaml/scanner.py | 1448 ++ python/pyyaml/lib3/yaml/serializer.py | 111 + python/pyyaml/lib3/yaml/tokens.py | 104 + python/pyyaml/setup.cfg | 29 + python/pyyaml/setup.py | 345 + python/redo/PKG-INFO | 10 + python/redo/README | 4 + python/redo/redo/__init__.py | 240 + python/redo/redo/cmd.py | 53 + python/redo/setup.cfg | 8 + python/redo/setup.py | 18 + python/requests/HISTORY.rst | 1130 + python/requests/LICENSE | 13 + python/requests/MANIFEST.in | 1 + python/requests/NOTICE | 54 + python/requests/PKG-INFO | 1238 ++ python/requests/README.rst | 86 + python/requests/requests/__init__.py | 83 + python/requests/requests/adapters.py | 453 + python/requests/requests/api.py | 145 + python/requests/requests/auth.py | 223 + python/requests/requests/cacert.pem | 5616 +++++ python/requests/requests/certs.py | 25 + python/requests/requests/compat.py | 62 + python/requests/requests/cookies.py | 487 + python/requests/requests/exceptions.py | 114 + python/requests/requests/hooks.py | 34 + python/requests/requests/models.py | 851 + python/requests/requests/packages/__init__.py | 36 + .../requests/requests/packages/chardet/__init__.py | 32 + .../requests/requests/packages/chardet/big5freq.py | 925 + .../requests/packages/chardet/big5prober.py | 42 + .../requests/packages/chardet/chardetect.py | 80 + .../requests/packages/chardet/chardistribution.py | 231 + .../packages/chardet/charsetgroupprober.py | 106 + .../requests/packages/chardet/charsetprober.py | 62 + .../packages/chardet/codingstatemachine.py | 61 + .../requests/requests/packages/chardet/compat.py | 34 + .../requests/packages/chardet/constants.py | 39 + .../requests/packages/chardet/cp949prober.py | 44 + .../requests/packages/chardet/escprober.py | 86 + python/requests/requests/packages/chardet/escsm.py | 242 + .../requests/packages/chardet/eucjpprober.py | 90 + .../requests/packages/chardet/euckrfreq.py | 596 + .../requests/packages/chardet/euckrprober.py | 42 + .../requests/packages/chardet/euctwfreq.py | 428 + .../requests/packages/chardet/euctwprober.py | 41 + .../requests/packages/chardet/gb2312freq.py | 472 + .../requests/packages/chardet/gb2312prober.py | 41 + .../requests/packages/chardet/hebrewprober.py | 283 + .../requests/requests/packages/chardet/jisfreq.py | 569 + .../requests/requests/packages/chardet/jpcntx.py | 227 + .../packages/chardet/langbulgarianmodel.py | 229 + .../requests/packages/chardet/langcyrillicmodel.py | 329 + .../requests/packages/chardet/langgreekmodel.py | 225 + .../requests/packages/chardet/langhebrewmodel.py | 201 + .../packages/chardet/langhungarianmodel.py | 225 + .../requests/packages/chardet/langthaimodel.py | 200 + .../requests/packages/chardet/latin1prober.py | 139 + .../requests/packages/chardet/mbcharsetprober.py | 86 + .../requests/packages/chardet/mbcsgroupprober.py | 54 + .../requests/requests/packages/chardet/mbcssm.py | 572 + .../requests/packages/chardet/sbcharsetprober.py | 120 + .../requests/packages/chardet/sbcsgroupprober.py | 69 + .../requests/packages/chardet/sjisprober.py | 91 + .../requests/packages/chardet/universaldetector.py | 170 + .../requests/packages/chardet/utf8prober.py | 76 + .../requests/requests/packages/urllib3/__init__.py | 93 + .../requests/packages/urllib3/_collections.py | 324 + .../requests/packages/urllib3/connection.py | 288 + .../requests/packages/urllib3/connectionpool.py | 818 + .../requests/packages/urllib3/contrib/__init__.py | 0 .../requests/packages/urllib3/contrib/appengine.py | 223 + .../requests/packages/urllib3/contrib/ntlmpool.py | 115 + .../requests/packages/urllib3/contrib/pyopenssl.py | 310 + .../requests/packages/urllib3/exceptions.py | 201 + .../requests/requests/packages/urllib3/fields.py | 178 + .../requests/requests/packages/urllib3/filepost.py | 94 + .../requests/packages/urllib3/packages/__init__.py | 5 + .../packages/urllib3/packages/ordered_dict.py | 259 + .../requests/packages/urllib3/packages/six.py | 385 + .../packages/ssl_match_hostname/__init__.py | 13 + .../packages/ssl_match_hostname/_implementation.py | 105 + .../requests/packages/urllib3/poolmanager.py | 281 + .../requests/requests/packages/urllib3/request.py | 151 + .../requests/requests/packages/urllib3/response.py | 514 + .../requests/packages/urllib3/util/__init__.py | 44 + .../requests/packages/urllib3/util/connection.py | 101 + .../requests/packages/urllib3/util/request.py | 72 + .../requests/packages/urllib3/util/response.py | 74 + .../requests/packages/urllib3/util/retry.py | 286 + .../requests/packages/urllib3/util/ssl_.py | 317 + .../requests/packages/urllib3/util/timeout.py | 242 + .../requests/requests/packages/urllib3/util/url.py | 217 + python/requests/requests/sessions.py | 680 + python/requests/requests/status_codes.py | 90 + python/requests/requests/structures.py | 104 + python/requests/requests/utils.py | 721 + python/requests/requirements.txt | 6 + python/requests/setup.cfg | 8 + python/requests/setup.py | 74 + python/requests/test_requests.py | 1746 ++ python/rsa/LICENSE | 13 + python/rsa/MANIFEST.in | 5 + python/rsa/PKG-INFO | 18 + python/rsa/README.rst | 31 + python/rsa/create_timing_table.py | 29 + python/rsa/playstuff.py | 41 + python/rsa/rsa/__init__.py | 45 + python/rsa/rsa/_compat.py | 160 + python/rsa/rsa/_version133.py | 442 + python/rsa/rsa/_version200.py | 529 + python/rsa/rsa/asn1.py | 35 + python/rsa/rsa/bigfile.py | 87 + python/rsa/rsa/cli.py | 379 + python/rsa/rsa/common.py | 185 + python/rsa/rsa/core.py | 58 + python/rsa/rsa/key.py | 612 + python/rsa/rsa/parallel.py | 94 + python/rsa/rsa/pem.py | 120 + python/rsa/rsa/pkcs1.py | 391 + python/rsa/rsa/prime.py | 166 + python/rsa/rsa/randnum.py | 85 + python/rsa/rsa/transform.py | 220 + python/rsa/rsa/util.py | 81 + python/rsa/rsa/varblock.py | 155 + python/rsa/run_tests.py | 43 + python/rsa/setup.cfg | 8 + python/rsa/setup.py | 41 + python/rsa/tests/__init__.py | 0 python/rsa/tests/constants.py | 9 + python/rsa/tests/py2kconstants.py | 3 + python/rsa/tests/py3kconstants.py | 3 + python/rsa/tests/test_bigfile.py | 60 + python/rsa/tests/test_common.py | 61 + python/rsa/tests/test_compat.py | 17 + python/rsa/tests/test_integers.py | 36 + python/rsa/tests/test_load_save_keys.py | 127 + python/rsa/tests/test_pem.py | 14 + python/rsa/tests/test_pkcs1.py | 94 + python/rsa/tests/test_strings.py | 28 + python/rsa/tests/test_transform.py | 67 + python/rsa/tests/test_varblock.py | 82 + python/slugid/.gitignore | 57 + python/slugid/.travis.yml | 27 + python/slugid/LICENSE | 363 + python/slugid/README.rst | 121 + python/slugid/requirements.txt | 2 + python/slugid/setup.py | 39 + python/slugid/slugid/__init__.py | 43 + python/slugid/slugid/slugid.py | 43 + python/slugid/test.py | 167 + python/slugid/tox.ini | 26 + python/virtualenv/AUTHORS.txt | 91 + python/virtualenv/LICENSE.txt | 22 + python/virtualenv/MANIFEST.in | 12 + python/virtualenv/PKG-INFO | 87 + python/virtualenv/README.rst | 31 + python/virtualenv/bin/rebuild-script.py | 73 + python/virtualenv/docs/Makefile | 130 + python/virtualenv/docs/changes.rst | 985 + python/virtualenv/docs/conf.py | 153 + python/virtualenv/docs/development.rst | 61 + python/virtualenv/docs/index.rst | 137 + python/virtualenv/docs/installation.rst | 58 + python/virtualenv/docs/make.bat | 170 + python/virtualenv/docs/reference.rst | 261 + python/virtualenv/docs/userguide.rst | 258 + python/virtualenv/scripts/virtualenv | 3 + python/virtualenv/setup.cfg | 8 + python/virtualenv/setup.py | 123 + python/virtualenv/site.py | 760 + python/virtualenv/tests/__init__.py | 0 python/virtualenv/tests/test_activate.sh | 96 + .../virtualenv/tests/test_activate_output.expected | 2 + python/virtualenv/tests/test_cmdline.py | 44 + python/virtualenv/tests/test_virtualenv.py | 139 + python/virtualenv/virtualenv.py | 2329 ++ python/virtualenv/virtualenv_embedded/activate.bat | 30 + python/virtualenv/virtualenv_embedded/activate.csh | 36 + .../virtualenv/virtualenv_embedded/activate.fish | 76 + python/virtualenv/virtualenv_embedded/activate.ps1 | 150 + python/virtualenv/virtualenv_embedded/activate.sh | 78 + .../virtualenv_embedded/activate_this.py | 34 + .../virtualenv/virtualenv_embedded/deactivate.bat | 19 + .../virtualenv_embedded/distutils-init.py | 101 + .../virtualenv/virtualenv_embedded/distutils.cfg | 6 + .../virtualenv/virtualenv_embedded/python-config | 78 + python/virtualenv/virtualenv_embedded/site.py | 758 + python/virtualenv/virtualenv_support/__init__.py | 0 .../argparse-1.4.0-py2.py3-none-any.whl | Bin 0 -> 23000 bytes .../pip-8.1.2-py2.py3-none-any.whl | Bin 0 -> 1198961 bytes .../setuptools-25.2.0-py2.py3-none-any.whl | Bin 0 -> 442860 bytes .../wheel-0.29.0-py2.py3-none-any.whl | Bin 0 -> 66878 bytes python/voluptuous/COPYING | 25 + python/voluptuous/MANIFEST.in | 2 + python/voluptuous/PKG-INFO | 611 + python/voluptuous/README.md | 596 + python/voluptuous/README.rst | 589 + python/voluptuous/setup.cfg | 10 + python/voluptuous/setup.py | 54 + python/voluptuous/tests.md | 268 + python/voluptuous/voluptuous.py | 1954 ++ python/which/LICENSE.txt | 21 + python/which/MANIFEST.in | 3 + python/which/Makefile.win | 21 + python/which/PKG-INFO | 21 + python/which/README.txt | 229 + python/which/TODO.txt | 113 + python/which/build.py | 442 + python/which/launcher.cpp | 404 + python/which/logo.jpg | Bin 0 -> 3635 bytes python/which/setup.py | 70 + python/which/test/test_which.py | 168 + python/which/test/testsupport.py | 83 + python/which/which.py | 335 + 1830 files changed, 306691 insertions(+) create mode 100644 python/PyECC/MANIFEST.in create mode 100644 python/PyECC/README.md create mode 100644 python/PyECC/ecc/Key.py create mode 100644 python/PyECC/ecc/Rabbit.py create mode 100644 python/PyECC/ecc/SecurityViolationException.py create mode 100644 python/PyECC/ecc/__init__.py create mode 100644 python/PyECC/ecc/curves.py create mode 100644 python/PyECC/ecc/eccrypt.py create mode 100644 python/PyECC/ecc/ecdsa.py create mode 100644 python/PyECC/ecc/elliptic.py create mode 100644 python/PyECC/ecc/encoding.py create mode 100644 python/PyECC/ecc/performance.py create mode 100644 python/PyECC/ecc/primes.py create mode 100644 python/PyECC/ecc/shacrypt.py create mode 100644 python/PyECC/setup.py create mode 100644 python/README create mode 100644 python/altgraph/MANIFEST.in create mode 100644 python/altgraph/PKG-INFO create mode 100644 python/altgraph/README.txt create mode 100644 python/altgraph/altgraph/Dot.py create mode 100644 python/altgraph/altgraph/Graph.py create mode 100644 python/altgraph/altgraph/GraphAlgo.py create mode 100644 python/altgraph/altgraph/GraphStat.py create mode 100644 python/altgraph/altgraph/GraphUtil.py create mode 100644 python/altgraph/altgraph/ObjectGraph.py create mode 100644 python/altgraph/altgraph/__init__.py create mode 100644 python/altgraph/altgraph_tests/__init__.py create mode 100644 python/altgraph/altgraph_tests/test_altgraph.py create mode 100644 python/altgraph/altgraph_tests/test_dot.py create mode 100644 python/altgraph/altgraph_tests/test_graph.py create mode 100644 python/altgraph/altgraph_tests/test_graphstat.py create mode 100644 python/altgraph/altgraph_tests/test_graphutil.py create mode 100644 python/altgraph/altgraph_tests/test_object_graph.py create mode 100644 python/altgraph/doc/Makefile create mode 100644 python/altgraph/doc/_build/doctrees/changelog.doctree create mode 100644 python/altgraph/doc/_build/doctrees/core.doctree create mode 100644 python/altgraph/doc/_build/doctrees/dot.doctree create mode 100644 python/altgraph/doc/_build/doctrees/environment.pickle create mode 100644 python/altgraph/doc/_build/doctrees/graph.doctree create mode 100644 python/altgraph/doc/_build/doctrees/graphalgo.doctree create mode 100644 python/altgraph/doc/_build/doctrees/graphstat.doctree create mode 100644 python/altgraph/doc/_build/doctrees/graphutil.doctree create mode 100644 python/altgraph/doc/_build/doctrees/index.doctree create mode 100644 python/altgraph/doc/_build/doctrees/license.doctree create mode 100644 python/altgraph/doc/_build/doctrees/objectgraph.doctree create mode 100644 python/altgraph/doc/_build/html/.buildinfo create mode 100644 python/altgraph/doc/_build/html/_sources/changelog.txt create mode 100644 python/altgraph/doc/_build/html/_sources/core.txt create mode 100644 python/altgraph/doc/_build/html/_sources/dot.txt create mode 100644 python/altgraph/doc/_build/html/_sources/graph.txt create mode 100644 python/altgraph/doc/_build/html/_sources/graphalgo.txt create mode 100644 python/altgraph/doc/_build/html/_sources/graphstat.txt create mode 100644 python/altgraph/doc/_build/html/_sources/graphutil.txt create mode 100644 python/altgraph/doc/_build/html/_sources/index.txt create mode 100644 python/altgraph/doc/_build/html/_sources/license.txt create mode 100644 python/altgraph/doc/_build/html/_sources/objectgraph.txt create mode 100644 python/altgraph/doc/_build/html/_static/ajax-loader.gif create mode 100644 python/altgraph/doc/_build/html/_static/basic.css create mode 100644 python/altgraph/doc/_build/html/_static/comment-bright.png create mode 100644 python/altgraph/doc/_build/html/_static/comment-close.png create mode 100644 python/altgraph/doc/_build/html/_static/comment.png create mode 100644 python/altgraph/doc/_build/html/_static/doctools.js create mode 100644 python/altgraph/doc/_build/html/_static/down-pressed.png create mode 100644 python/altgraph/doc/_build/html/_static/down.png create mode 100644 python/altgraph/doc/_build/html/_static/file.png create mode 100644 python/altgraph/doc/_build/html/_static/jquery.js create mode 100644 python/altgraph/doc/_build/html/_static/minus.png create mode 100644 python/altgraph/doc/_build/html/_static/nature.css create mode 100644 python/altgraph/doc/_build/html/_static/plus.png create mode 100644 python/altgraph/doc/_build/html/_static/pygments.css create mode 100644 python/altgraph/doc/_build/html/_static/searchtools.js create mode 100644 python/altgraph/doc/_build/html/_static/underscore.js create mode 100644 python/altgraph/doc/_build/html/_static/up-pressed.png create mode 100644 python/altgraph/doc/_build/html/_static/up.png create mode 100644 python/altgraph/doc/_build/html/_static/websupport.js create mode 100644 python/altgraph/doc/_build/html/changelog.html create mode 100644 python/altgraph/doc/_build/html/core.html create mode 100644 python/altgraph/doc/_build/html/dot.html create mode 100644 python/altgraph/doc/_build/html/genindex.html create mode 100644 python/altgraph/doc/_build/html/graph.html create mode 100644 python/altgraph/doc/_build/html/graphalgo.html create mode 100644 python/altgraph/doc/_build/html/graphstat.html create mode 100644 python/altgraph/doc/_build/html/graphutil.html create mode 100644 python/altgraph/doc/_build/html/index.html create mode 100644 python/altgraph/doc/_build/html/license.html create mode 100644 python/altgraph/doc/_build/html/objectgraph.html create mode 100644 python/altgraph/doc/_build/html/objects.inv create mode 100644 python/altgraph/doc/_build/html/py-modindex.html create mode 100644 python/altgraph/doc/_build/html/search.html create mode 100644 python/altgraph/doc/_build/html/searchindex.js create mode 100644 python/altgraph/doc/changelog.rst create mode 100644 python/altgraph/doc/conf.py create mode 100644 python/altgraph/doc/core.rst create mode 100644 python/altgraph/doc/dot.rst create mode 100644 python/altgraph/doc/graph.rst create mode 100644 python/altgraph/doc/graphalgo.rst create mode 100644 python/altgraph/doc/graphstat.rst create mode 100644 python/altgraph/doc/graphutil.rst create mode 100644 python/altgraph/doc/index.rst create mode 100644 python/altgraph/doc/license.rst create mode 100644 python/altgraph/doc/objectgraph.rst create mode 100644 python/altgraph/setup.cfg create mode 100644 python/altgraph/setup.py create mode 100644 python/bitstring/PKG-INFO create mode 100644 python/bitstring/README.txt create mode 100644 python/bitstring/bitstring.py create mode 100644 python/bitstring/doc/bitstring_manual.pdf create mode 100644 python/bitstring/release_notes.txt create mode 100644 python/bitstring/setup.py create mode 100644 python/bitstring/test/smalltestfile create mode 100644 python/bitstring/test/test.m1v create mode 100644 python/bitstring/test/test_bitarray.py create mode 100644 python/bitstring/test/test_bits.py create mode 100644 python/bitstring/test/test_bitstore.py create mode 100644 python/bitstring/test/test_bitstream.py create mode 100644 python/bitstring/test/test_bitstring.py create mode 100644 python/bitstring/test/test_constbitstream.py create mode 100644 python/blessings/LICENSE create mode 100644 python/blessings/MANIFEST.in create mode 100644 python/blessings/PKG-INFO create mode 100644 python/blessings/README.rst create mode 100644 python/blessings/blessings/__init__.py create mode 100644 python/blessings/blessings/tests.py create mode 100644 python/blessings/setup.cfg create mode 100644 python/blessings/setup.py create mode 100644 python/blessings/tox.ini create mode 100644 python/compare-locales/compare_locales/__init__.py create mode 100644 python/compare-locales/compare_locales/checks.py create mode 100644 python/compare-locales/compare_locales/commands.py create mode 100644 python/compare-locales/compare_locales/compare.py create mode 100644 python/compare-locales/compare_locales/parser.py create mode 100644 python/compare-locales/compare_locales/paths.py create mode 100644 python/compare-locales/compare_locales/tests/__init__.py create mode 100644 python/compare-locales/compare_locales/tests/data/bug121341.properties create mode 100644 python/compare-locales/compare_locales/tests/data/test.properties create mode 100644 python/compare-locales/compare_locales/tests/data/triple-license.dtd create mode 100644 python/compare-locales/compare_locales/tests/test_checks.py create mode 100644 python/compare-locales/compare_locales/tests/test_compare.py create mode 100644 python/compare-locales/compare_locales/tests/test_dtd.py create mode 100644 python/compare-locales/compare_locales/tests/test_ini.py create mode 100644 python/compare-locales/compare_locales/tests/test_merge.py create mode 100644 python/compare-locales/compare_locales/tests/test_properties.py create mode 100644 python/compare-locales/compare_locales/tests/test_util.py create mode 100644 python/compare-locales/compare_locales/tests/test_webapps.py create mode 100644 python/compare-locales/compare_locales/util.py create mode 100644 python/compare-locales/compare_locales/webapps.py create mode 100644 python/compare-locales/docs/glossary.rst create mode 100644 python/compare-locales/docs/index.rst create mode 100644 python/compare-locales/mach_commands.py create mode 100644 python/compare-locales/moz.build create mode 100644 python/configobj/PKG-INFO create mode 100644 python/configobj/configobj.py create mode 100644 python/configobj/setup.py create mode 100644 python/configobj/validate.py create mode 100644 python/devtools/migrate-l10n/README.rst create mode 100644 python/devtools/migrate-l10n/migrate/__init__.py create mode 100644 python/devtools/migrate-l10n/migrate/conf/bug1294186 create mode 100644 python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191 create mode 100644 python/devtools/migrate-l10n/migrate/main.py create mode 100644 python/devtools/migrate-l10n/migrate/tests/__init__.py create mode 100644 python/eme/gen-eme-voucher.py create mode 100644 python/futures/CHANGES create mode 100644 python/futures/LICENSE create mode 100644 python/futures/MANIFEST.in create mode 100644 python/futures/PKG-INFO create mode 100644 python/futures/concurrent/__init__.py create mode 100644 python/futures/concurrent/futures/__init__.py create mode 100644 python/futures/concurrent/futures/_base.py create mode 100644 python/futures/concurrent/futures/process.py create mode 100644 python/futures/concurrent/futures/thread.py create mode 100644 python/futures/crawl.py create mode 100644 python/futures/docs/Makefile create mode 100644 python/futures/docs/conf.py create mode 100644 python/futures/docs/index.rst create mode 100644 python/futures/docs/make.bat create mode 100644 python/futures/primes.py create mode 100644 python/futures/setup.cfg create mode 100755 python/futures/setup.py create mode 100644 python/futures/test_futures.py create mode 100644 python/futures/tox.ini create mode 100644 python/gdbpp/gdbpp/__init__.py create mode 100644 python/gdbpp/gdbpp/linkedlist.py create mode 100644 python/gdbpp/gdbpp/owningthread.py create mode 100644 python/gdbpp/gdbpp/smartptr.py create mode 100644 python/gdbpp/gdbpp/string.py create mode 100644 python/gdbpp/gdbpp/tarray.py create mode 100644 python/gdbpp/gdbpp/thashtable.py create mode 100644 python/jsmin/jsmin/__init__.py create mode 100644 python/jsmin/jsmin/test.py create mode 100644 python/jsmin/setup.cfg create mode 100644 python/jsmin/setup.py create mode 100644 python/lldbutils/README.txt create mode 100644 python/lldbutils/lldbutils/__init__.py create mode 100644 python/lldbutils/lldbutils/content.py create mode 100644 python/lldbutils/lldbutils/general.py create mode 100644 python/lldbutils/lldbutils/gfx.py create mode 100644 python/lldbutils/lldbutils/layout.py create mode 100644 python/lldbutils/lldbutils/utils.py create mode 100644 python/mach/README.rst create mode 100644 python/mach/bash-completion.sh create mode 100644 python/mach/docs/commands.rst create mode 100644 python/mach/docs/driver.rst create mode 100644 python/mach/docs/index.rst create mode 100644 python/mach/docs/logging.rst create mode 100644 python/mach/docs/settings.rst create mode 100644 python/mach/mach/__init__.py create mode 100644 python/mach/mach/base.py create mode 100644 python/mach/mach/commands/__init__.py create mode 100644 python/mach/mach/commands/commandinfo.py create mode 100644 python/mach/mach/commands/settings.py create mode 100644 python/mach/mach/config.py create mode 100644 python/mach/mach/decorators.py create mode 100644 python/mach/mach/dispatcher.py create mode 100644 python/mach/mach/locale/en_US/LC_MESSAGES/alias.mo create mode 100644 python/mach/mach/locale/en_US/LC_MESSAGES/alias.po create mode 100644 python/mach/mach/logging.py create mode 100644 python/mach/mach/main.py create mode 100644 python/mach/mach/mixin/__init__.py create mode 100644 python/mach/mach/mixin/logging.py create mode 100644 python/mach/mach/mixin/process.py create mode 100644 python/mach/mach/registrar.py create mode 100644 python/mach/mach/terminal.py create mode 100644 python/mach/mach/test/__init__.py create mode 100644 python/mach/mach/test/common.py create mode 100644 python/mach/mach/test/providers/__init__.py create mode 100644 python/mach/mach/test/providers/basic.py create mode 100644 python/mach/mach/test/providers/conditions.py create mode 100644 python/mach/mach/test/providers/conditions_invalid.py create mode 100644 python/mach/mach/test/providers/throw.py create mode 100644 python/mach/mach/test/providers/throw2.py create mode 100644 python/mach/mach/test/test_conditions.py create mode 100644 python/mach/mach/test/test_config.py create mode 100644 python/mach/mach/test/test_dispatcher.py create mode 100644 python/mach/mach/test/test_entry_point.py create mode 100644 python/mach/mach/test/test_error_output.py create mode 100644 python/mach/mach/test/test_logger.py create mode 100644 python/mach/setup.py create mode 100644 python/mach_commands.py create mode 100644 python/macholib/MANIFEST.in create mode 100644 python/macholib/PKG-INFO create mode 100644 python/macholib/README.txt create mode 100644 python/macholib/doc/MachO.rst create mode 100644 python/macholib/doc/MachoOGraph.rst create mode 100644 python/macholib/doc/MachoOStandalone.rst create mode 100644 python/macholib/doc/Makefile create mode 100644 python/macholib/doc/SymbolTable.rst create mode 100644 python/macholib/doc/_build/doctrees/MachO.doctree create mode 100644 python/macholib/doc/_build/doctrees/MachoOGraph.doctree create mode 100644 python/macholib/doc/_build/doctrees/MachoOStandalone.doctree create mode 100644 python/macholib/doc/_build/doctrees/SymbolTable.doctree create mode 100644 python/macholib/doc/_build/doctrees/changelog.doctree create mode 100644 python/macholib/doc/_build/doctrees/dyld.doctree create mode 100644 python/macholib/doc/_build/doctrees/dylib.doctree create mode 100644 python/macholib/doc/_build/doctrees/environment.pickle create mode 100644 python/macholib/doc/_build/doctrees/framework.doctree create mode 100644 python/macholib/doc/_build/doctrees/index.doctree create mode 100644 python/macholib/doc/_build/doctrees/license.doctree create mode 100644 python/macholib/doc/_build/doctrees/macho_o.doctree create mode 100644 python/macholib/doc/_build/doctrees/ptypes.doctree create mode 100644 python/macholib/doc/_build/doctrees/scripts.doctree create mode 100644 python/macholib/doc/_build/html/.buildinfo create mode 100644 python/macholib/doc/_build/html/MachO.html create mode 100644 python/macholib/doc/_build/html/MachoOGraph.html create mode 100644 python/macholib/doc/_build/html/MachoOStandalone.html create mode 100644 python/macholib/doc/_build/html/SymbolTable.html create mode 100644 python/macholib/doc/_build/html/_sources/MachO.txt create mode 100644 python/macholib/doc/_build/html/_sources/MachoOGraph.txt create mode 100644 python/macholib/doc/_build/html/_sources/MachoOStandalone.txt create mode 100644 python/macholib/doc/_build/html/_sources/SymbolTable.txt create mode 100644 python/macholib/doc/_build/html/_sources/changelog.txt create mode 100644 python/macholib/doc/_build/html/_sources/dyld.txt create mode 100644 python/macholib/doc/_build/html/_sources/dylib.txt create mode 100644 python/macholib/doc/_build/html/_sources/framework.txt create mode 100644 python/macholib/doc/_build/html/_sources/index.txt create mode 100644 python/macholib/doc/_build/html/_sources/license.txt create mode 100644 python/macholib/doc/_build/html/_sources/macho_o.txt create mode 100644 python/macholib/doc/_build/html/_sources/ptypes.txt create mode 100644 python/macholib/doc/_build/html/_sources/scripts.txt create mode 100644 python/macholib/doc/_build/html/_static/ajax-loader.gif create mode 100644 python/macholib/doc/_build/html/_static/basic.css create mode 100644 python/macholib/doc/_build/html/_static/comment-bright.png create mode 100644 python/macholib/doc/_build/html/_static/comment-close.png create mode 100644 python/macholib/doc/_build/html/_static/comment.png create mode 100644 python/macholib/doc/_build/html/_static/doctools.js create mode 100644 python/macholib/doc/_build/html/_static/down-pressed.png create mode 100644 python/macholib/doc/_build/html/_static/down.png create mode 100644 python/macholib/doc/_build/html/_static/file.png create mode 100644 python/macholib/doc/_build/html/_static/jquery.js create mode 100644 python/macholib/doc/_build/html/_static/minus.png create mode 100644 python/macholib/doc/_build/html/_static/nature.css create mode 100644 python/macholib/doc/_build/html/_static/plus.png create mode 100644 python/macholib/doc/_build/html/_static/pygments.css create mode 100644 python/macholib/doc/_build/html/_static/searchtools.js create mode 100644 python/macholib/doc/_build/html/_static/underscore.js create mode 100644 python/macholib/doc/_build/html/_static/up-pressed.png create mode 100644 python/macholib/doc/_build/html/_static/up.png create mode 100644 python/macholib/doc/_build/html/_static/websupport.js create mode 100644 python/macholib/doc/_build/html/changelog.html create mode 100644 python/macholib/doc/_build/html/dyld.html create mode 100644 python/macholib/doc/_build/html/dylib.html create mode 100644 python/macholib/doc/_build/html/framework.html create mode 100644 python/macholib/doc/_build/html/genindex.html create mode 100644 python/macholib/doc/_build/html/index.html create mode 100644 python/macholib/doc/_build/html/license.html create mode 100644 python/macholib/doc/_build/html/macho_o.html create mode 100644 python/macholib/doc/_build/html/objects.inv create mode 100644 python/macholib/doc/_build/html/ptypes.html create mode 100644 python/macholib/doc/_build/html/py-modindex.html create mode 100644 python/macholib/doc/_build/html/scripts.html create mode 100644 python/macholib/doc/_build/html/search.html create mode 100644 python/macholib/doc/_build/html/searchindex.js create mode 100644 python/macholib/doc/changelog.rst create mode 100644 python/macholib/doc/conf.py create mode 100644 python/macholib/doc/dyld.rst create mode 100644 python/macholib/doc/dylib.rst create mode 100644 python/macholib/doc/framework.rst create mode 100644 python/macholib/doc/index.rst create mode 100644 python/macholib/doc/license.rst create mode 100644 python/macholib/doc/macho_o.rst create mode 100644 python/macholib/doc/ptypes.rst create mode 100644 python/macholib/doc/scripts.rst create mode 100644 python/macholib/macholib/MachO.py create mode 100644 python/macholib/macholib/MachOGraph.py create mode 100644 python/macholib/macholib/MachOStandalone.py create mode 100644 python/macholib/macholib/SymbolTable.py create mode 100644 python/macholib/macholib/__init__.py create mode 100644 python/macholib/macholib/__main__.py create mode 100644 python/macholib/macholib/_cmdline.py create mode 100644 python/macholib/macholib/dyld.py create mode 100644 python/macholib/macholib/dylib.py create mode 100644 python/macholib/macholib/framework.py create mode 100644 python/macholib/macholib/itergraphreport.py create mode 100644 python/macholib/macholib/mach_o.py create mode 100644 python/macholib/macholib/macho_dump.py create mode 100644 python/macholib/macholib/macho_find.py create mode 100644 python/macholib/macholib/macho_standalone.py create mode 100644 python/macholib/macholib/ptypes.py create mode 100644 python/macholib/macholib/util.py create mode 100644 python/macholib/macholib_tests/__init__.py create mode 100644 python/macholib/macholib_tests/binaries/src/build.py create mode 100644 python/macholib/macholib_tests/test_MachO.py create mode 100644 python/macholib/macholib_tests/test_MachOGraph.py create mode 100644 python/macholib/macholib_tests/test_MachOStandalone.py create mode 100644 python/macholib/macholib_tests/test_SymbolTable.py create mode 100644 python/macholib/macholib_tests/test_command_line.py create mode 100644 python/macholib/macholib_tests/test_dyld.py create mode 100644 python/macholib/macholib_tests/test_dylib.py create mode 100644 python/macholib/macholib_tests/test_framework.py create mode 100644 python/macholib/macholib_tests/test_itergraphreport.py create mode 100644 python/macholib/macholib_tests/test_mach_o.py create mode 100644 python/macholib/macholib_tests/test_ptypes.py create mode 100644 python/macholib/setup.cfg create mode 100644 python/macholib/setup.py create mode 100644 python/mock-1.0.0/LICENSE.txt create mode 100644 python/mock-1.0.0/MANIFEST.in create mode 100644 python/mock-1.0.0/PKG-INFO create mode 100644 python/mock-1.0.0/README.txt create mode 100644 python/mock-1.0.0/docs/changelog.txt create mode 100644 python/mock-1.0.0/docs/compare.txt create mode 100644 python/mock-1.0.0/docs/conf.py create mode 100644 python/mock-1.0.0/docs/examples.txt create mode 100644 python/mock-1.0.0/docs/getting-started.txt create mode 100644 python/mock-1.0.0/docs/helpers.txt create mode 100644 python/mock-1.0.0/docs/index.txt create mode 100644 python/mock-1.0.0/docs/magicmock.txt create mode 100644 python/mock-1.0.0/docs/mock.txt create mode 100644 python/mock-1.0.0/docs/patch.txt create mode 100644 python/mock-1.0.0/docs/sentinel.txt create mode 100644 python/mock-1.0.0/html/.doctrees/changelog.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/compare.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/examples.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/getting-started.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/index.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/magicmock.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/mock.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/mocksignature.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/patch.doctree create mode 100644 python/mock-1.0.0/html/.doctrees/sentinel.doctree create mode 100644 python/mock-1.0.0/html/_sources/changelog.txt create mode 100644 python/mock-1.0.0/html/_sources/compare.txt create mode 100644 python/mock-1.0.0/html/_sources/examples.txt create mode 100644 python/mock-1.0.0/html/_sources/getting-started.txt create mode 100644 python/mock-1.0.0/html/_sources/index.txt create mode 100644 python/mock-1.0.0/html/_sources/magicmock.txt create mode 100644 python/mock-1.0.0/html/_sources/mock.txt create mode 100644 python/mock-1.0.0/html/_sources/mocksignature.txt create mode 100644 python/mock-1.0.0/html/_sources/patch.txt create mode 100644 python/mock-1.0.0/html/_sources/sentinel.txt create mode 100644 python/mock-1.0.0/html/_static/adctheme.css create mode 100644 python/mock-1.0.0/html/_static/basic.css create mode 100644 python/mock-1.0.0/html/_static/breadcrumb_background.png create mode 100644 python/mock-1.0.0/html/_static/default.css create mode 100644 python/mock-1.0.0/html/_static/doctools.js create mode 100644 python/mock-1.0.0/html/_static/documentation.png create mode 100644 python/mock-1.0.0/html/_static/file.png create mode 100644 python/mock-1.0.0/html/_static/header_sm_mid.png create mode 100644 python/mock-1.0.0/html/_static/jquery.js create mode 100644 python/mock-1.0.0/html/_static/minus.png create mode 100644 python/mock-1.0.0/html/_static/mobile.css create mode 100644 python/mock-1.0.0/html/_static/plus.png create mode 100644 python/mock-1.0.0/html/_static/pygments.css create mode 100644 python/mock-1.0.0/html/_static/scrn1.png create mode 100644 python/mock-1.0.0/html/_static/scrn2.png create mode 100644 python/mock-1.0.0/html/_static/searchfield_leftcap.png create mode 100644 python/mock-1.0.0/html/_static/searchfield_repeat.png create mode 100644 python/mock-1.0.0/html/_static/searchfield_rightcap.png create mode 100644 python/mock-1.0.0/html/_static/searchtools.js create mode 100644 python/mock-1.0.0/html/_static/sidebar.js create mode 100644 python/mock-1.0.0/html/_static/title_background.png create mode 100644 python/mock-1.0.0/html/_static/toc.js create mode 100644 python/mock-1.0.0/html/_static/triangle_closed.png create mode 100644 python/mock-1.0.0/html/_static/triangle_left.png create mode 100644 python/mock-1.0.0/html/_static/triangle_open.png create mode 100644 python/mock-1.0.0/html/_static/underscore.js create mode 100644 python/mock-1.0.0/html/changelog.html create mode 100644 python/mock-1.0.0/html/compare.html create mode 100644 python/mock-1.0.0/html/examples.html create mode 100644 python/mock-1.0.0/html/genindex.html create mode 100644 python/mock-1.0.0/html/getting-started.html create mode 100644 python/mock-1.0.0/html/index.html create mode 100644 python/mock-1.0.0/html/magicmock.html create mode 100644 python/mock-1.0.0/html/mock.html create mode 100644 python/mock-1.0.0/html/mocksignature.html create mode 100644 python/mock-1.0.0/html/objects.inv create mode 100644 python/mock-1.0.0/html/output.txt create mode 100644 python/mock-1.0.0/html/patch.html create mode 100644 python/mock-1.0.0/html/search.html create mode 100644 python/mock-1.0.0/html/searchindex.js create mode 100644 python/mock-1.0.0/html/sentinel.html create mode 100644 python/mock-1.0.0/mock.py create mode 100644 python/mock-1.0.0/setup.cfg create mode 100755 python/mock-1.0.0/setup.py create mode 100644 python/mock-1.0.0/tests/__init__.py create mode 100644 python/mock-1.0.0/tests/_testwith.py create mode 100644 python/mock-1.0.0/tests/support.py create mode 100644 python/mock-1.0.0/tests/support_with.py create mode 100644 python/mock-1.0.0/tests/testcallable.py create mode 100644 python/mock-1.0.0/tests/testhelpers.py create mode 100644 python/mock-1.0.0/tests/testmagicmethods.py create mode 100644 python/mock-1.0.0/tests/testmock.py create mode 100644 python/mock-1.0.0/tests/testpatch.py create mode 100644 python/mock-1.0.0/tests/testsentinel.py create mode 100644 python/mock-1.0.0/tests/testwith.py create mode 100644 python/mock-1.0.0/tox.ini create mode 100644 python/moz.build create mode 100644 python/mozboot/README.rst create mode 100644 python/mozboot/bin/bootstrap-msys2.vbs create mode 100755 python/mozboot/bin/bootstrap.py create mode 100644 python/mozboot/mozboot/__init__.py create mode 100644 python/mozboot/mozboot/android.py create mode 100644 python/mozboot/mozboot/archlinux.py create mode 100644 python/mozboot/mozboot/base.py create mode 100644 python/mozboot/mozboot/bootstrap.py create mode 100644 python/mozboot/mozboot/centosfedora.py create mode 100644 python/mozboot/mozboot/debian.py create mode 100644 python/mozboot/mozboot/freebsd.py create mode 100644 python/mozboot/mozboot/gentoo.py create mode 100644 python/mozboot/mozboot/mach_commands.py create mode 100644 python/mozboot/mozboot/mozillabuild.py create mode 100644 python/mozboot/mozboot/openbsd.py create mode 100644 python/mozboot/mozboot/osx.py create mode 100644 python/mozboot/mozboot/util.py create mode 100644 python/mozboot/mozboot/windows.py create mode 100644 python/mozboot/setup.py create mode 100755 python/mozboot/support/ConEmu.xml create mode 100644 python/mozbuild/TODO create mode 100644 python/mozbuild/dumbmake/__init__.py create mode 100644 python/mozbuild/dumbmake/dumbmake.py create mode 100644 python/mozbuild/dumbmake/test/__init__.py create mode 100644 python/mozbuild/dumbmake/test/test_dumbmake.py create mode 100644 python/mozbuild/mozbuild/__init__.py create mode 100644 python/mozbuild/mozbuild/action/__init__.py create mode 100644 python/mozbuild/mozbuild/action/buildlist.py create mode 100644 python/mozbuild/mozbuild/action/cl.py create mode 100644 python/mozbuild/mozbuild/action/dump_env.py create mode 100644 python/mozbuild/mozbuild/action/explode_aar.py create mode 100644 python/mozbuild/mozbuild/action/file_generate.py create mode 100644 python/mozbuild/mozbuild/action/generate_browsersearch.py create mode 100644 python/mozbuild/mozbuild/action/generate_searchjson.py create mode 100644 python/mozbuild/mozbuild/action/generate_suggestedsites.py create mode 100644 python/mozbuild/mozbuild/action/generate_symbols_file.py create mode 100644 python/mozbuild/mozbuild/action/jar_maker.py create mode 100644 python/mozbuild/mozbuild/action/make_dmg.py create mode 100644 python/mozbuild/mozbuild/action/output_searchplugins_list.py create mode 100644 python/mozbuild/mozbuild/action/package_fennec_apk.py create mode 100644 python/mozbuild/mozbuild/action/preprocessor.py create mode 100644 python/mozbuild/mozbuild/action/process_define_files.py create mode 100644 python/mozbuild/mozbuild/action/process_install_manifest.py create mode 100644 python/mozbuild/mozbuild/action/test_archive.py create mode 100644 python/mozbuild/mozbuild/action/webidl.py create mode 100644 python/mozbuild/mozbuild/action/xpccheck.py create mode 100755 python/mozbuild/mozbuild/action/xpidl-process.py create mode 100644 python/mozbuild/mozbuild/action/zip.py create mode 100644 python/mozbuild/mozbuild/android_version_code.py create mode 100644 python/mozbuild/mozbuild/artifacts.py create mode 100644 python/mozbuild/mozbuild/backend/__init__.py create mode 100644 python/mozbuild/mozbuild/backend/android_eclipse.py create mode 100644 python/mozbuild/mozbuild/backend/base.py create mode 100644 python/mozbuild/mozbuild/backend/common.py create mode 100644 python/mozbuild/mozbuild/backend/configenvironment.py create mode 100644 python/mozbuild/mozbuild/backend/cpp_eclipse.py create mode 100644 python/mozbuild/mozbuild/backend/fastermake.py create mode 100644 python/mozbuild/mozbuild/backend/mach_commands.py create mode 100644 python/mozbuild/mozbuild/backend/recursivemake.py create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/.classpath create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ApkBuilder.launch create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.PreCompilerBuilder.launch create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/com.android.ide.eclipse.adt.ResourceManagerBuilder.launch create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/.externalToolBuilders/org.eclipse.jdt.core.javabuilder.launch create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/AndroidManifest.xml create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/gen/tmp create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/lint.xml create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse/project.properties create mode 100644 python/mozbuild/mozbuild/backend/templates/android_eclipse_empty_resource_directory/.not_an_android_resource create mode 100644 python/mozbuild/mozbuild/backend/tup.py create mode 100644 python/mozbuild/mozbuild/backend/visualstudio.py create mode 100644 python/mozbuild/mozbuild/base.py create mode 100644 python/mozbuild/mozbuild/codecoverage/__init__.py create mode 100644 python/mozbuild/mozbuild/codecoverage/chrome_map.py create mode 100644 python/mozbuild/mozbuild/codecoverage/packager.py create mode 100644 python/mozbuild/mozbuild/compilation/__init__.py create mode 100644 python/mozbuild/mozbuild/compilation/codecomplete.py create mode 100644 python/mozbuild/mozbuild/compilation/database.py create mode 100644 python/mozbuild/mozbuild/compilation/util.py create mode 100644 python/mozbuild/mozbuild/compilation/warnings.py create mode 100644 python/mozbuild/mozbuild/config_status.py create mode 100644 python/mozbuild/mozbuild/configure/__init__.py create mode 100644 python/mozbuild/mozbuild/configure/check_debug_ranges.py create mode 100644 python/mozbuild/mozbuild/configure/constants.py create mode 100644 python/mozbuild/mozbuild/configure/help.py create mode 100644 python/mozbuild/mozbuild/configure/libstdcxx.py create mode 100644 python/mozbuild/mozbuild/configure/lint.py create mode 100644 python/mozbuild/mozbuild/configure/lint_util.py create mode 100644 python/mozbuild/mozbuild/configure/options.py create mode 100644 python/mozbuild/mozbuild/configure/util.py create mode 100644 python/mozbuild/mozbuild/controller/__init__.py create mode 100644 python/mozbuild/mozbuild/controller/building.py create mode 100644 python/mozbuild/mozbuild/controller/clobber.py create mode 100644 python/mozbuild/mozbuild/doctor.py create mode 100644 python/mozbuild/mozbuild/dotproperties.py create mode 100644 python/mozbuild/mozbuild/frontend/__init__.py create mode 100644 python/mozbuild/mozbuild/frontend/context.py create mode 100644 python/mozbuild/mozbuild/frontend/data.py create mode 100644 python/mozbuild/mozbuild/frontend/emitter.py create mode 100644 python/mozbuild/mozbuild/frontend/gyp_reader.py create mode 100644 python/mozbuild/mozbuild/frontend/mach_commands.py create mode 100644 python/mozbuild/mozbuild/frontend/reader.py create mode 100644 python/mozbuild/mozbuild/frontend/sandbox.py create mode 100644 python/mozbuild/mozbuild/html_build_viewer.py create mode 100644 python/mozbuild/mozbuild/jar.py create mode 100644 python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.mo create mode 100644 python/mozbuild/mozbuild/locale/en-US/LC_MESSAGES/mozbuild.po create mode 100644 python/mozbuild/mozbuild/mach_commands.py create mode 100644 python/mozbuild/mozbuild/makeutil.py create mode 100644 python/mozbuild/mozbuild/milestone.py create mode 100644 python/mozbuild/mozbuild/mozconfig.py create mode 100755 python/mozbuild/mozbuild/mozconfig_loader create mode 100755 python/mozbuild/mozbuild/mozinfo.py create mode 100644 python/mozbuild/mozbuild/preprocessor.py create mode 100644 python/mozbuild/mozbuild/pythonutil.py create mode 100644 python/mozbuild/mozbuild/resources/html-build-viewer/index.html create mode 100644 python/mozbuild/mozbuild/shellutil.py create mode 100644 python/mozbuild/mozbuild/sphinx.py create mode 100644 python/mozbuild/mozbuild/test/__init__.py create mode 100644 python/mozbuild/mozbuild/test/action/data/invalid/region.properties create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/assets/asset.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/classes.dex create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1.ap_ create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/res/res.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input1/resources.arsc create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2.apk create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/asset.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/assets/omni.ja create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/classes.dex create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/lib/lib.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/res/res.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/resources.arsc create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/input2/root_file.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/lib/lib.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/omni.ja create mode 100644 python/mozbuild/mozbuild/test/action/data/package_fennec_apk/root_file.txt create mode 100644 python/mozbuild/mozbuild/test/action/data/valid-zh-CN/region.properties create mode 100644 python/mozbuild/mozbuild/test/action/test_buildlist.py create mode 100644 python/mozbuild/mozbuild/test/action/test_generate_browsersearch.py create mode 100644 python/mozbuild/mozbuild/test/action/test_package_fennec_apk.py create mode 100644 python/mozbuild/mozbuild/test/backend/__init__.py create mode 100644 python/mozbuild/mozbuild/test/backend/common.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/library1/resources/values/strings.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main1/AndroidManifest.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/AndroidManifest.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/assets/dummy.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/extra.jar create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main2/res/values/strings.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/AndroidManifest.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/a/A.java create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/b/B.java create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main3/c/C.java create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/main4 create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/android_eclipse/subdir/submain/AndroidManifest.xml create mode 100644 python/mozbuild/mozbuild/test/backend/data/binary-components/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/binary-components/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/binary-components/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/branding-files/bar.ico create mode 100644 python/mozbuild/mozbuild/test/backend/data/branding-files/foo.ico create mode 100644 python/mozbuild/mozbuild/test/backend/data/branding-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/branding-files/sub/quux.png create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/app/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/baz.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/baz.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/components.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.css create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/jar.mn create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/prefs.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/qux.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/qux.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/resource create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/resource2 create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/main.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/dom1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/dom2.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/foo.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/gfx.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/pprio.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/foo.res create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/test.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/sdk-files/bar.ico create mode 100644 python/mozbuild/mozbuild/test/backend/data/sdk-files/foo.ico create mode 100644 python/mozbuild/mozbuild/test/backend/data/sdk-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/sdk-files/sub/quux.png create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.s create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/baz.S create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.S create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.asm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test_config/file.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/test_config/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/test_android_eclipse.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_build.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_configenvironment.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_recursivemake.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_visualstudio.py create mode 100644 python/mozbuild/mozbuild/test/common.py create mode 100644 python/mozbuild/mozbuild/test/compilation/__init__.py create mode 100644 python/mozbuild/mozbuild/test/compilation/test_warnings.py create mode 100644 python/mozbuild/mozbuild/test/configure/common.py create mode 100644 python/mozbuild/mozbuild/test/configure/data/decorators.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/empty_mozconfig create mode 100644 python/mozbuild/mozbuild/test/configure/data/extra.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/included.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/moz.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/set_config.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/set_define.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/subprocess.configure create mode 100644 python/mozbuild/mozbuild/test/configure/lint.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_checks_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_compile_checks.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_lint.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_moz_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_options.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_util.py create mode 100644 python/mozbuild/mozbuild/test/controller/__init__.py create mode 100644 python/mozbuild/mozbuild/test/controller/test_ccachestats.py create mode 100644 python/mozbuild/mozbuild/test/controller/test_clobber.py create mode 100644 python/mozbuild/mozbuild/test/data/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/bad.properties create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/valid.properties create mode 100644 python/mozbuild/mozbuild/test/frontend/__init__.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/dir1/foo create mode 100644 python/mozbuild/mozbuild/test/frontend/data/android-res-dirs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/binary-components/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/binary-components/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/binary-components/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns create mode 100644 python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/bar.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/baz.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mem.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/module.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest-stylo.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1-ref.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/reftests/test1.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/test_default_mod.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/default/tests/xpcshell/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/base.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/browser/test_mod.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/module.jsm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/src/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_general.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/simple/tests/test_specific.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/bar.jsm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/src/submodule/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_simple.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/test_specific.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-test-metadata/tagged/tests/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-profile-section/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-non-abort-panic/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sdk-files/bar.ico create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sdk-files/baz.png create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sdk-files/foo.xpm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sdk-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sdk-files/quux.icns create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest-stylo.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/subdir.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-subdir/test_foo.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest-stylo.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-python-unit-test-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/use-yasm/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/test_context.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_emitter.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_namespaces.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_reader.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_sandbox.py create mode 100644 python/mozbuild/mozbuild/test/test_android_version_code.py create mode 100644 python/mozbuild/mozbuild/test/test_base.py create mode 100644 python/mozbuild/mozbuild/test/test_containers.py create mode 100644 python/mozbuild/mozbuild/test/test_dotproperties.py create mode 100644 python/mozbuild/mozbuild/test/test_expression.py create mode 100644 python/mozbuild/mozbuild/test/test_jarmaker.py create mode 100644 python/mozbuild/mozbuild/test/test_line_endings.py create mode 100644 python/mozbuild/mozbuild/test/test_makeutil.py create mode 100644 python/mozbuild/mozbuild/test/test_mozconfig.py create mode 100755 python/mozbuild/mozbuild/test/test_mozinfo.py create mode 100644 python/mozbuild/mozbuild/test/test_preprocessor.py create mode 100644 python/mozbuild/mozbuild/test/test_pythonutil.py create mode 100644 python/mozbuild/mozbuild/test/test_testing.py create mode 100644 python/mozbuild/mozbuild/test/test_util.py create mode 100644 python/mozbuild/mozbuild/testing.py create mode 100644 python/mozbuild/mozbuild/util.py create mode 100644 python/mozbuild/mozbuild/vendor_rust.py create mode 100644 python/mozbuild/mozbuild/virtualenv.py create mode 100644 python/mozbuild/mozpack/__init__.py create mode 100644 python/mozbuild/mozpack/archive.py create mode 100644 python/mozbuild/mozpack/chrome/__init__.py create mode 100644 python/mozbuild/mozpack/chrome/flags.py create mode 100644 python/mozbuild/mozpack/chrome/manifest.py create mode 100644 python/mozbuild/mozpack/copier.py create mode 100644 python/mozbuild/mozpack/dmg.py create mode 100644 python/mozbuild/mozpack/errors.py create mode 100644 python/mozbuild/mozpack/executables.py create mode 100644 python/mozbuild/mozpack/files.py create mode 100644 python/mozbuild/mozpack/hg.py create mode 100644 python/mozbuild/mozpack/manifests.py create mode 100644 python/mozbuild/mozpack/mozjar.py create mode 100644 python/mozbuild/mozpack/packager/__init__.py create mode 100644 python/mozbuild/mozpack/packager/formats.py create mode 100644 python/mozbuild/mozpack/packager/l10n.py create mode 100644 python/mozbuild/mozpack/packager/unpack.py create mode 100644 python/mozbuild/mozpack/path.py create mode 100644 python/mozbuild/mozpack/test/__init__.py create mode 100644 python/mozbuild/mozpack/test/data/test_data create mode 100644 python/mozbuild/mozpack/test/support/minify_js_verify.py create mode 100644 python/mozbuild/mozpack/test/test_archive.py create mode 100644 python/mozbuild/mozpack/test/test_chrome_flags.py create mode 100644 python/mozbuild/mozpack/test/test_chrome_manifest.py create mode 100644 python/mozbuild/mozpack/test/test_copier.py create mode 100644 python/mozbuild/mozpack/test/test_errors.py create mode 100644 python/mozbuild/mozpack/test/test_files.py create mode 100644 python/mozbuild/mozpack/test/test_manifests.py create mode 100644 python/mozbuild/mozpack/test/test_mozjar.py create mode 100644 python/mozbuild/mozpack/test/test_packager.py create mode 100644 python/mozbuild/mozpack/test/test_packager_formats.py create mode 100644 python/mozbuild/mozpack/test/test_packager_l10n.py create mode 100644 python/mozbuild/mozpack/test/test_packager_unpack.py create mode 100644 python/mozbuild/mozpack/test/test_path.py create mode 100644 python/mozbuild/mozpack/test/test_unify.py create mode 100644 python/mozbuild/mozpack/unify.py create mode 100644 python/mozbuild/setup.py create mode 100644 python/mozlint/mozlint/__init__.py create mode 100644 python/mozlint/mozlint/cli.py create mode 100644 python/mozlint/mozlint/errors.py create mode 100644 python/mozlint/mozlint/formatters/__init__.py create mode 100644 python/mozlint/mozlint/formatters/stylish.py create mode 100644 python/mozlint/mozlint/formatters/treeherder.py create mode 100644 python/mozlint/mozlint/parser.py create mode 100644 python/mozlint/mozlint/pathutils.py create mode 100644 python/mozlint/mozlint/result.py create mode 100644 python/mozlint/mozlint/roller.py create mode 100644 python/mozlint/mozlint/types.py create mode 100644 python/mozlint/mozlint/vcs.py create mode 100644 python/mozlint/setup.py create mode 100644 python/mozlint/test/__init__.py create mode 100644 python/mozlint/test/conftest.py create mode 100644 python/mozlint/test/files/foobar.js create mode 100644 python/mozlint/test/files/foobar.py create mode 100644 python/mozlint/test/files/no_foobar.js create mode 100644 python/mozlint/test/linters/badreturncode.lint create mode 100644 python/mozlint/test/linters/explicit_path.lint create mode 100644 python/mozlint/test/linters/external.lint create mode 100644 python/mozlint/test/linters/invalid_exclude.lint create mode 100644 python/mozlint/test/linters/invalid_extension.lnt create mode 100644 python/mozlint/test/linters/invalid_include.lint create mode 100644 python/mozlint/test/linters/invalid_type.lint create mode 100644 python/mozlint/test/linters/missing_attrs.lint create mode 100644 python/mozlint/test/linters/missing_definition.lint create mode 100644 python/mozlint/test/linters/raises.lint create mode 100644 python/mozlint/test/linters/regex.lint create mode 100644 python/mozlint/test/linters/string.lint create mode 100644 python/mozlint/test/linters/structured.lint create mode 100644 python/mozlint/test/test_formatters.py create mode 100644 python/mozlint/test/test_parser.py create mode 100644 python/mozlint/test/test_roller.py create mode 100644 python/mozlint/test/test_types.py create mode 100644 python/mozversioncontrol/mozversioncontrol/__init__.py create mode 100644 python/mozversioncontrol/mozversioncontrol/repoupdate.py create mode 100644 python/psutil/CREDITS create mode 100644 python/psutil/HISTORY.rst create mode 100644 python/psutil/INSTALL.rst create mode 100644 python/psutil/LICENSE create mode 100644 python/psutil/MANIFEST.in create mode 100644 python/psutil/Makefile create mode 100644 python/psutil/PKG-INFO create mode 100644 python/psutil/README.rst create mode 100644 python/psutil/TODO create mode 100644 python/psutil/docs/Makefile create mode 100644 python/psutil/docs/README create mode 100644 python/psutil/docs/_static/copybutton.js create mode 100644 python/psutil/docs/_static/favicon.ico create mode 100644 python/psutil/docs/_static/logo.png create mode 100644 python/psutil/docs/_static/sidebar.js create mode 100644 python/psutil/docs/_template/globaltoc.html create mode 100644 python/psutil/docs/_template/indexcontent.html create mode 100644 python/psutil/docs/_template/indexsidebar.html create mode 100644 python/psutil/docs/_template/page.html create mode 100644 python/psutil/docs/_themes/pydoctheme/static/pydoctheme.css create mode 100644 python/psutil/docs/_themes/pydoctheme/theme.conf create mode 100644 python/psutil/docs/conf.py create mode 100644 python/psutil/docs/index.rst create mode 100644 python/psutil/docs/make.bat create mode 100644 python/psutil/docs/xxx create mode 100755 python/psutil/examples/disk_usage.py create mode 100755 python/psutil/examples/free.py create mode 100644 python/psutil/examples/ifconfig.py create mode 100755 python/psutil/examples/iotop.py create mode 100755 python/psutil/examples/killall.py create mode 100755 python/psutil/examples/meminfo.py create mode 100755 python/psutil/examples/netstat.py create mode 100755 python/psutil/examples/nettop.py create mode 100755 python/psutil/examples/pidof.py create mode 100755 python/psutil/examples/pmap.py create mode 100755 python/psutil/examples/process_detail.py create mode 100644 python/psutil/examples/ps.py create mode 100644 python/psutil/examples/pstree.py create mode 100755 python/psutil/examples/top.py create mode 100755 python/psutil/examples/who.py create mode 100644 python/psutil/make.bat create mode 100644 python/psutil/psutil/__init__.py create mode 100644 python/psutil/psutil/_common.py create mode 100644 python/psutil/psutil/_compat.py create mode 100644 python/psutil/psutil/_psbsd.py create mode 100644 python/psutil/psutil/_pslinux.py create mode 100644 python/psutil/psutil/_psosx.py create mode 100644 python/psutil/psutil/_psposix.py create mode 100644 python/psutil/psutil/_pssunos.py create mode 100644 python/psutil/psutil/_psutil_bsd.c create mode 100644 python/psutil/psutil/_psutil_bsd.h create mode 100644 python/psutil/psutil/_psutil_common.c create mode 100644 python/psutil/psutil/_psutil_common.h create mode 100644 python/psutil/psutil/_psutil_linux.c create mode 100644 python/psutil/psutil/_psutil_linux.h create mode 100644 python/psutil/psutil/_psutil_osx.c create mode 100644 python/psutil/psutil/_psutil_osx.h create mode 100644 python/psutil/psutil/_psutil_posix.c create mode 100644 python/psutil/psutil/_psutil_posix.h create mode 100644 python/psutil/psutil/_psutil_sunos.c create mode 100644 python/psutil/psutil/_psutil_sunos.h create mode 100644 python/psutil/psutil/_psutil_windows.c create mode 100644 python/psutil/psutil/_psutil_windows.h create mode 100644 python/psutil/psutil/_pswindows.py create mode 100644 python/psutil/psutil/arch/bsd/process_info.c create mode 100644 python/psutil/psutil/arch/bsd/process_info.h create mode 100644 python/psutil/psutil/arch/osx/process_info.c create mode 100644 python/psutil/psutil/arch/osx/process_info.h create mode 100644 python/psutil/psutil/arch/windows/glpi.h create mode 100644 python/psutil/psutil/arch/windows/inet_ntop.c create mode 100644 python/psutil/psutil/arch/windows/inet_ntop.h create mode 100644 python/psutil/psutil/arch/windows/ntextapi.h create mode 100644 python/psutil/psutil/arch/windows/process_handles.c create mode 100644 python/psutil/psutil/arch/windows/process_handles.h create mode 100644 python/psutil/psutil/arch/windows/process_info.c create mode 100644 python/psutil/psutil/arch/windows/process_info.h create mode 100644 python/psutil/psutil/arch/windows/security.c create mode 100644 python/psutil/psutil/arch/windows/security.h create mode 100644 python/psutil/setup.cfg create mode 100644 python/psutil/setup.py create mode 100644 python/psutil/test/README.rst create mode 100644 python/psutil/test/_bsd.py create mode 100644 python/psutil/test/_linux.py create mode 100644 python/psutil/test/_osx.py create mode 100644 python/psutil/test/_posix.py create mode 100644 python/psutil/test/_sunos.py create mode 100644 python/psutil/test/_windows.py create mode 100644 python/psutil/test/test_memory_leaks.py create mode 100644 python/psutil/test/test_psutil.py create mode 100644 python/psutil/tox.ini create mode 100644 python/py/AUTHORS create mode 100644 python/py/LICENSE create mode 100644 python/py/MANIFEST.in create mode 100644 python/py/PKG-INFO create mode 100644 python/py/README.txt create mode 100644 python/py/py/__init__.py create mode 100644 python/py/py/__metainfo.py create mode 100644 python/py/py/_apipkg.py create mode 100644 python/py/py/_builtin.py create mode 100644 python/py/py/_code/__init__.py create mode 100644 python/py/py/_code/_assertionnew.py create mode 100644 python/py/py/_code/_assertionold.py create mode 100644 python/py/py/_code/_py2traceback.py create mode 100644 python/py/py/_code/assertion.py create mode 100644 python/py/py/_code/code.py create mode 100644 python/py/py/_code/source.py create mode 100644 python/py/py/_error.py create mode 100644 python/py/py/_iniconfig.py create mode 100644 python/py/py/_io/__init__.py create mode 100644 python/py/py/_io/capture.py create mode 100644 python/py/py/_io/saferepr.py create mode 100644 python/py/py/_io/terminalwriter.py create mode 100644 python/py/py/_log/__init__.py create mode 100644 python/py/py/_log/log.py create mode 100644 python/py/py/_log/warning.py create mode 100644 python/py/py/_path/__init__.py create mode 100644 python/py/py/_path/cacheutil.py create mode 100644 python/py/py/_path/common.py create mode 100644 python/py/py/_path/local.py create mode 100644 python/py/py/_path/svnurl.py create mode 100644 python/py/py/_path/svnwc.py create mode 100644 python/py/py/_process/__init__.py create mode 100644 python/py/py/_process/cmdexec.py create mode 100644 python/py/py/_process/forkedfunc.py create mode 100644 python/py/py/_process/killproc.py create mode 100644 python/py/py/_std.py create mode 100644 python/py/py/_xmlgen.py create mode 100644 python/py/py/test.py create mode 100644 python/py/setup.cfg create mode 100644 python/py/setup.py create mode 100644 python/pyasn1-modules/CHANGES create mode 100644 python/pyasn1-modules/LICENSE create mode 100644 python/pyasn1-modules/MANIFEST.in create mode 100644 python/pyasn1-modules/PKG-INFO create mode 100644 python/pyasn1-modules/README create mode 100644 python/pyasn1-modules/pyasn1_modules/__init__.py create mode 100644 python/pyasn1-modules/pyasn1_modules/pem.py create mode 100644 python/pyasn1-modules/pyasn1_modules/pkcs12.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc1155.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc1157.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc1901.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc1902.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc1905.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2251.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2314.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2315.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2437.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2459.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2511.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc2560.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc3412.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc3414.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc3447.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc4210.py create mode 100644 python/pyasn1-modules/pyasn1_modules/rfc5208.py create mode 100644 python/pyasn1-modules/setup.cfg create mode 100644 python/pyasn1-modules/setup.py create mode 100644 python/pyasn1-modules/test/cmp.sh create mode 100644 python/pyasn1-modules/test/crl.sh create mode 100644 python/pyasn1-modules/test/crmf.sh create mode 100644 python/pyasn1-modules/test/ocspreq.sh create mode 100644 python/pyasn1-modules/test/ocsprsp.sh create mode 100644 python/pyasn1-modules/test/pkcs1.sh create mode 100644 python/pyasn1-modules/test/pkcs10.sh create mode 100644 python/pyasn1-modules/test/pkcs7.sh create mode 100644 python/pyasn1-modules/test/pkcs8.sh create mode 100644 python/pyasn1-modules/test/x509dump.sh create mode 100755 python/pyasn1-modules/tools/cmpdump.py create mode 100755 python/pyasn1-modules/tools/crldump.py create mode 100755 python/pyasn1-modules/tools/crmfdump.py create mode 100755 python/pyasn1-modules/tools/ocspclient.py create mode 100755 python/pyasn1-modules/tools/ocspreqdump.py create mode 100755 python/pyasn1-modules/tools/ocsprspdump.py create mode 100755 python/pyasn1-modules/tools/ocspserver.py create mode 100755 python/pyasn1-modules/tools/pkcs10dump.py create mode 100755 python/pyasn1-modules/tools/pkcs1dump.py create mode 100755 python/pyasn1-modules/tools/pkcs7dump.py create mode 100755 python/pyasn1-modules/tools/pkcs8dump.py create mode 100755 python/pyasn1-modules/tools/snmpget.py create mode 100755 python/pyasn1-modules/tools/x509dump.py create mode 100644 python/pyasn1/CHANGES create mode 100644 python/pyasn1/LICENSE create mode 100644 python/pyasn1/MANIFEST.in create mode 100644 python/pyasn1/PKG-INFO create mode 100644 python/pyasn1/README create mode 100644 python/pyasn1/THANKS create mode 100644 python/pyasn1/TODO create mode 100644 python/pyasn1/doc/codecs.html create mode 100644 python/pyasn1/doc/constraints.html create mode 100644 python/pyasn1/doc/constructed.html create mode 100644 python/pyasn1/doc/intro.html create mode 100644 python/pyasn1/doc/pyasn1-tutorial.html create mode 100644 python/pyasn1/doc/scalar.html create mode 100644 python/pyasn1/doc/tagging.html create mode 100644 python/pyasn1/pyasn1/__init__.py create mode 100644 python/pyasn1/pyasn1/codec/__init__.py create mode 100644 python/pyasn1/pyasn1/codec/ber/__init__.py create mode 100644 python/pyasn1/pyasn1/codec/ber/decoder.py create mode 100644 python/pyasn1/pyasn1/codec/ber/encoder.py create mode 100644 python/pyasn1/pyasn1/codec/ber/eoo.py create mode 100644 python/pyasn1/pyasn1/codec/cer/__init__.py create mode 100644 python/pyasn1/pyasn1/codec/cer/decoder.py create mode 100644 python/pyasn1/pyasn1/codec/cer/encoder.py create mode 100644 python/pyasn1/pyasn1/codec/der/__init__.py create mode 100644 python/pyasn1/pyasn1/codec/der/decoder.py create mode 100644 python/pyasn1/pyasn1/codec/der/encoder.py create mode 100644 python/pyasn1/pyasn1/compat/__init__.py create mode 100644 python/pyasn1/pyasn1/compat/octets.py create mode 100644 python/pyasn1/pyasn1/debug.py create mode 100644 python/pyasn1/pyasn1/error.py create mode 100644 python/pyasn1/pyasn1/type/__init__.py create mode 100644 python/pyasn1/pyasn1/type/base.py create mode 100644 python/pyasn1/pyasn1/type/char.py create mode 100644 python/pyasn1/pyasn1/type/constraint.py create mode 100644 python/pyasn1/pyasn1/type/error.py create mode 100644 python/pyasn1/pyasn1/type/namedtype.py create mode 100644 python/pyasn1/pyasn1/type/namedval.py create mode 100644 python/pyasn1/pyasn1/type/tag.py create mode 100644 python/pyasn1/pyasn1/type/tagmap.py create mode 100644 python/pyasn1/pyasn1/type/univ.py create mode 100644 python/pyasn1/pyasn1/type/useful.py create mode 100644 python/pyasn1/setup.cfg create mode 100644 python/pyasn1/setup.py create mode 100644 python/pyasn1/test/__init__.py create mode 100644 python/pyasn1/test/codec/__init__.py create mode 100644 python/pyasn1/test/codec/ber/__init__.py create mode 100644 python/pyasn1/test/codec/ber/suite.py create mode 100644 python/pyasn1/test/codec/ber/test_decoder.py create mode 100644 python/pyasn1/test/codec/ber/test_encoder.py create mode 100644 python/pyasn1/test/codec/cer/__init__.py create mode 100644 python/pyasn1/test/codec/cer/suite.py create mode 100644 python/pyasn1/test/codec/cer/test_decoder.py create mode 100644 python/pyasn1/test/codec/cer/test_encoder.py create mode 100644 python/pyasn1/test/codec/der/__init__.py create mode 100644 python/pyasn1/test/codec/der/suite.py create mode 100644 python/pyasn1/test/codec/der/test_decoder.py create mode 100644 python/pyasn1/test/codec/der/test_encoder.py create mode 100644 python/pyasn1/test/codec/suite.py create mode 100644 python/pyasn1/test/suite.py create mode 100644 python/pyasn1/test/type/__init__.py create mode 100644 python/pyasn1/test/type/suite.py create mode 100644 python/pyasn1/test/type/test_constraint.py create mode 100644 python/pyasn1/test/type/test_namedtype.py create mode 100644 python/pyasn1/test/type/test_tag.py create mode 100644 python/pyasn1/test/type/test_univ.py create mode 100644 python/pylru/pylru.py create mode 100644 python/pylru/test.py create mode 100644 python/pystache/.gitignore create mode 100644 python/pystache/.gitmodules create mode 100644 python/pystache/.travis.yml create mode 100644 python/pystache/HISTORY.md create mode 100644 python/pystache/LICENSE create mode 100644 python/pystache/MANIFEST.in create mode 100644 python/pystache/README.md create mode 100644 python/pystache/TODO.md create mode 100644 python/pystache/gh/images/logo_phillips.png create mode 100644 python/pystache/pystache/__init__.py create mode 100644 python/pystache/pystache/commands/__init__.py create mode 100644 python/pystache/pystache/commands/render.py create mode 100644 python/pystache/pystache/commands/test.py create mode 100644 python/pystache/pystache/common.py create mode 100644 python/pystache/pystache/context.py create mode 100644 python/pystache/pystache/defaults.py create mode 100644 python/pystache/pystache/init.py create mode 100644 python/pystache/pystache/loader.py create mode 100644 python/pystache/pystache/locator.py create mode 100644 python/pystache/pystache/parsed.py create mode 100644 python/pystache/pystache/parser.py create mode 100644 python/pystache/pystache/renderengine.py create mode 100644 python/pystache/pystache/renderer.py create mode 100644 python/pystache/pystache/specloader.py create mode 100644 python/pystache/pystache/template_spec.py create mode 100644 python/pystache/setup.py create mode 100644 python/pystache/setup_description.rst create mode 100644 python/pystache/test_pystache.py create mode 100644 python/pystache/tox.ini create mode 100644 python/pytest/.coveragerc create mode 100644 python/pytest/AUTHORS create mode 100644 python/pytest/LICENSE create mode 100644 python/pytest/MANIFEST.in create mode 100644 python/pytest/PKG-INFO create mode 100644 python/pytest/README.rst create mode 100644 python/pytest/_pytest/__init__.py create mode 100644 python/pytest/_pytest/_argcomplete.py create mode 100644 python/pytest/_pytest/_code/__init__.py create mode 100644 python/pytest/_pytest/_code/_py2traceback.py create mode 100644 python/pytest/_pytest/_code/code.py create mode 100644 python/pytest/_pytest/_code/source.py create mode 100644 python/pytest/_pytest/_pluggy.py create mode 100644 python/pytest/_pytest/assertion/__init__.py create mode 100644 python/pytest/_pytest/assertion/reinterpret.py create mode 100644 python/pytest/_pytest/assertion/rewrite.py create mode 100644 python/pytest/_pytest/assertion/util.py create mode 100755 python/pytest/_pytest/cacheprovider.py create mode 100644 python/pytest/_pytest/capture.py create mode 100644 python/pytest/_pytest/config.py create mode 100644 python/pytest/_pytest/doctest.py create mode 100755 python/pytest/_pytest/genscript.py create mode 100644 python/pytest/_pytest/helpconfig.py create mode 100644 python/pytest/_pytest/hookspec.py create mode 100644 python/pytest/_pytest/junitxml.py create mode 100644 python/pytest/_pytest/main.py create mode 100644 python/pytest/_pytest/mark.py create mode 100644 python/pytest/_pytest/monkeypatch.py create mode 100644 python/pytest/_pytest/nose.py create mode 100644 python/pytest/_pytest/pastebin.py create mode 100644 python/pytest/_pytest/pdb.py create mode 100644 python/pytest/_pytest/pytester.py create mode 100644 python/pytest/_pytest/python.py create mode 100644 python/pytest/_pytest/recwarn.py create mode 100644 python/pytest/_pytest/resultlog.py create mode 100644 python/pytest/_pytest/runner.py create mode 100644 python/pytest/_pytest/skipping.py create mode 100755 python/pytest/_pytest/standalonetemplate.py create mode 100644 python/pytest/_pytest/terminal.py create mode 100644 python/pytest/_pytest/tmpdir.py create mode 100644 python/pytest/_pytest/unittest.py create mode 100644 python/pytest/_pytest/vendored_packages/README.md create mode 100644 python/pytest/_pytest/vendored_packages/__init__.py create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/DESCRIPTION.rst create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/METADATA create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/RECORD create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/WHEEL create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/metadata.json create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/pbr.json create mode 100644 python/pytest/_pytest/vendored_packages/pluggy-0.3.1.dist-info/top_level.txt create mode 100644 python/pytest/_pytest/vendored_packages/pluggy.py create mode 100644 python/pytest/pytest.py create mode 100644 python/pytest/setup.cfg create mode 100644 python/pytest/setup.py create mode 100644 python/pytoml/PKG-INFO create mode 100644 python/pytoml/pytoml/__init__.py create mode 100644 python/pytoml/pytoml/core.py create mode 100644 python/pytoml/pytoml/parser.py create mode 100644 python/pytoml/pytoml/writer.py create mode 100644 python/pytoml/setup.cfg create mode 100644 python/pytoml/setup.py create mode 100644 python/pytoml/test/test.py create mode 100644 python/pyyaml/CHANGES create mode 100644 python/pyyaml/LICENSE create mode 100644 python/pyyaml/PKG-INFO create mode 100644 python/pyyaml/README create mode 100644 python/pyyaml/examples/pygments-lexer/example.yaml create mode 100644 python/pyyaml/examples/pygments-lexer/yaml.py create mode 100644 python/pyyaml/examples/yaml-highlight/yaml_hl.cfg create mode 100755 python/pyyaml/examples/yaml-highlight/yaml_hl.py create mode 100644 python/pyyaml/ext/_yaml.c create mode 100644 python/pyyaml/ext/_yaml.h create mode 100644 python/pyyaml/ext/_yaml.pxd create mode 100644 python/pyyaml/ext/_yaml.pyx create mode 100644 python/pyyaml/lib/yaml/__init__.py create mode 100644 python/pyyaml/lib/yaml/composer.py create mode 100644 python/pyyaml/lib/yaml/constructor.py create mode 100644 python/pyyaml/lib/yaml/cyaml.py create mode 100644 python/pyyaml/lib/yaml/dumper.py create mode 100644 python/pyyaml/lib/yaml/emitter.py create mode 100644 python/pyyaml/lib/yaml/error.py create mode 100644 python/pyyaml/lib/yaml/events.py create mode 100644 python/pyyaml/lib/yaml/loader.py create mode 100644 python/pyyaml/lib/yaml/nodes.py create mode 100644 python/pyyaml/lib/yaml/parser.py create mode 100644 python/pyyaml/lib/yaml/reader.py create mode 100644 python/pyyaml/lib/yaml/representer.py create mode 100644 python/pyyaml/lib/yaml/resolver.py create mode 100644 python/pyyaml/lib/yaml/scanner.py create mode 100644 python/pyyaml/lib/yaml/serializer.py create mode 100644 python/pyyaml/lib/yaml/tokens.py create mode 100644 python/pyyaml/lib3/yaml/__init__.py create mode 100644 python/pyyaml/lib3/yaml/composer.py create mode 100644 python/pyyaml/lib3/yaml/constructor.py create mode 100644 python/pyyaml/lib3/yaml/cyaml.py create mode 100644 python/pyyaml/lib3/yaml/dumper.py create mode 100644 python/pyyaml/lib3/yaml/emitter.py create mode 100644 python/pyyaml/lib3/yaml/error.py create mode 100644 python/pyyaml/lib3/yaml/events.py create mode 100644 python/pyyaml/lib3/yaml/loader.py create mode 100644 python/pyyaml/lib3/yaml/nodes.py create mode 100644 python/pyyaml/lib3/yaml/parser.py create mode 100644 python/pyyaml/lib3/yaml/reader.py create mode 100644 python/pyyaml/lib3/yaml/representer.py create mode 100644 python/pyyaml/lib3/yaml/resolver.py create mode 100644 python/pyyaml/lib3/yaml/scanner.py create mode 100644 python/pyyaml/lib3/yaml/serializer.py create mode 100644 python/pyyaml/lib3/yaml/tokens.py create mode 100644 python/pyyaml/setup.cfg create mode 100644 python/pyyaml/setup.py create mode 100644 python/redo/PKG-INFO create mode 100644 python/redo/README create mode 100644 python/redo/redo/__init__.py create mode 100644 python/redo/redo/cmd.py create mode 100644 python/redo/setup.cfg create mode 100644 python/redo/setup.py create mode 100644 python/requests/HISTORY.rst create mode 100644 python/requests/LICENSE create mode 100644 python/requests/MANIFEST.in create mode 100644 python/requests/NOTICE create mode 100644 python/requests/PKG-INFO create mode 100644 python/requests/README.rst create mode 100644 python/requests/requests/__init__.py create mode 100644 python/requests/requests/adapters.py create mode 100644 python/requests/requests/api.py create mode 100644 python/requests/requests/auth.py create mode 100644 python/requests/requests/cacert.pem create mode 100644 python/requests/requests/certs.py create mode 100644 python/requests/requests/compat.py create mode 100644 python/requests/requests/cookies.py create mode 100644 python/requests/requests/exceptions.py create mode 100644 python/requests/requests/hooks.py create mode 100644 python/requests/requests/models.py create mode 100644 python/requests/requests/packages/__init__.py create mode 100644 python/requests/requests/packages/chardet/__init__.py create mode 100644 python/requests/requests/packages/chardet/big5freq.py create mode 100644 python/requests/requests/packages/chardet/big5prober.py create mode 100755 python/requests/requests/packages/chardet/chardetect.py create mode 100644 python/requests/requests/packages/chardet/chardistribution.py create mode 100644 python/requests/requests/packages/chardet/charsetgroupprober.py create mode 100644 python/requests/requests/packages/chardet/charsetprober.py create mode 100644 python/requests/requests/packages/chardet/codingstatemachine.py create mode 100644 python/requests/requests/packages/chardet/compat.py create mode 100644 python/requests/requests/packages/chardet/constants.py create mode 100644 python/requests/requests/packages/chardet/cp949prober.py create mode 100644 python/requests/requests/packages/chardet/escprober.py create mode 100644 python/requests/requests/packages/chardet/escsm.py create mode 100644 python/requests/requests/packages/chardet/eucjpprober.py create mode 100644 python/requests/requests/packages/chardet/euckrfreq.py create mode 100644 python/requests/requests/packages/chardet/euckrprober.py create mode 100644 python/requests/requests/packages/chardet/euctwfreq.py create mode 100644 python/requests/requests/packages/chardet/euctwprober.py create mode 100644 python/requests/requests/packages/chardet/gb2312freq.py create mode 100644 python/requests/requests/packages/chardet/gb2312prober.py create mode 100644 python/requests/requests/packages/chardet/hebrewprober.py create mode 100644 python/requests/requests/packages/chardet/jisfreq.py create mode 100644 python/requests/requests/packages/chardet/jpcntx.py create mode 100644 python/requests/requests/packages/chardet/langbulgarianmodel.py create mode 100644 python/requests/requests/packages/chardet/langcyrillicmodel.py create mode 100644 python/requests/requests/packages/chardet/langgreekmodel.py create mode 100644 python/requests/requests/packages/chardet/langhebrewmodel.py create mode 100644 python/requests/requests/packages/chardet/langhungarianmodel.py create mode 100644 python/requests/requests/packages/chardet/langthaimodel.py create mode 100644 python/requests/requests/packages/chardet/latin1prober.py create mode 100644 python/requests/requests/packages/chardet/mbcharsetprober.py create mode 100644 python/requests/requests/packages/chardet/mbcsgroupprober.py create mode 100644 python/requests/requests/packages/chardet/mbcssm.py create mode 100644 python/requests/requests/packages/chardet/sbcharsetprober.py create mode 100644 python/requests/requests/packages/chardet/sbcsgroupprober.py create mode 100644 python/requests/requests/packages/chardet/sjisprober.py create mode 100644 python/requests/requests/packages/chardet/universaldetector.py create mode 100644 python/requests/requests/packages/chardet/utf8prober.py create mode 100644 python/requests/requests/packages/urllib3/__init__.py create mode 100644 python/requests/requests/packages/urllib3/_collections.py create mode 100644 python/requests/requests/packages/urllib3/connection.py create mode 100644 python/requests/requests/packages/urllib3/connectionpool.py create mode 100644 python/requests/requests/packages/urllib3/contrib/__init__.py create mode 100644 python/requests/requests/packages/urllib3/contrib/appengine.py create mode 100644 python/requests/requests/packages/urllib3/contrib/ntlmpool.py create mode 100644 python/requests/requests/packages/urllib3/contrib/pyopenssl.py create mode 100644 python/requests/requests/packages/urllib3/exceptions.py create mode 100644 python/requests/requests/packages/urllib3/fields.py create mode 100644 python/requests/requests/packages/urllib3/filepost.py create mode 100644 python/requests/requests/packages/urllib3/packages/__init__.py create mode 100644 python/requests/requests/packages/urllib3/packages/ordered_dict.py create mode 100644 python/requests/requests/packages/urllib3/packages/six.py create mode 100644 python/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 python/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 python/requests/requests/packages/urllib3/poolmanager.py create mode 100644 python/requests/requests/packages/urllib3/request.py create mode 100644 python/requests/requests/packages/urllib3/response.py create mode 100644 python/requests/requests/packages/urllib3/util/__init__.py create mode 100644 python/requests/requests/packages/urllib3/util/connection.py create mode 100644 python/requests/requests/packages/urllib3/util/request.py create mode 100644 python/requests/requests/packages/urllib3/util/response.py create mode 100644 python/requests/requests/packages/urllib3/util/retry.py create mode 100644 python/requests/requests/packages/urllib3/util/ssl_.py create mode 100644 python/requests/requests/packages/urllib3/util/timeout.py create mode 100644 python/requests/requests/packages/urllib3/util/url.py create mode 100644 python/requests/requests/sessions.py create mode 100644 python/requests/requests/status_codes.py create mode 100644 python/requests/requests/structures.py create mode 100644 python/requests/requests/utils.py create mode 100644 python/requests/requirements.txt create mode 100644 python/requests/setup.cfg create mode 100755 python/requests/setup.py create mode 100755 python/requests/test_requests.py create mode 100644 python/rsa/LICENSE create mode 100644 python/rsa/MANIFEST.in create mode 100644 python/rsa/PKG-INFO create mode 100644 python/rsa/README.rst create mode 100755 python/rsa/create_timing_table.py create mode 100755 python/rsa/playstuff.py create mode 100644 python/rsa/rsa/__init__.py create mode 100644 python/rsa/rsa/_compat.py create mode 100644 python/rsa/rsa/_version133.py create mode 100644 python/rsa/rsa/_version200.py create mode 100644 python/rsa/rsa/asn1.py create mode 100644 python/rsa/rsa/bigfile.py create mode 100644 python/rsa/rsa/cli.py create mode 100644 python/rsa/rsa/common.py create mode 100644 python/rsa/rsa/core.py create mode 100644 python/rsa/rsa/key.py create mode 100644 python/rsa/rsa/parallel.py create mode 100644 python/rsa/rsa/pem.py create mode 100644 python/rsa/rsa/pkcs1.py create mode 100644 python/rsa/rsa/prime.py create mode 100644 python/rsa/rsa/randnum.py create mode 100644 python/rsa/rsa/transform.py create mode 100644 python/rsa/rsa/util.py create mode 100644 python/rsa/rsa/varblock.py create mode 100644 python/rsa/run_tests.py create mode 100644 python/rsa/setup.cfg create mode 100755 python/rsa/setup.py create mode 100644 python/rsa/tests/__init__.py create mode 100644 python/rsa/tests/constants.py create mode 100644 python/rsa/tests/py2kconstants.py create mode 100644 python/rsa/tests/py3kconstants.py create mode 100644 python/rsa/tests/test_bigfile.py create mode 100644 python/rsa/tests/test_common.py create mode 100644 python/rsa/tests/test_compat.py create mode 100644 python/rsa/tests/test_integers.py create mode 100644 python/rsa/tests/test_load_save_keys.py create mode 100644 python/rsa/tests/test_pem.py create mode 100644 python/rsa/tests/test_pkcs1.py create mode 100644 python/rsa/tests/test_strings.py create mode 100644 python/rsa/tests/test_transform.py create mode 100644 python/rsa/tests/test_varblock.py create mode 100644 python/slugid/.gitignore create mode 100644 python/slugid/.travis.yml create mode 100644 python/slugid/LICENSE create mode 100644 python/slugid/README.rst create mode 100644 python/slugid/requirements.txt create mode 100644 python/slugid/setup.py create mode 100644 python/slugid/slugid/__init__.py create mode 100644 python/slugid/slugid/slugid.py create mode 100644 python/slugid/test.py create mode 100644 python/slugid/tox.ini create mode 100644 python/virtualenv/AUTHORS.txt create mode 100644 python/virtualenv/LICENSE.txt create mode 100644 python/virtualenv/MANIFEST.in create mode 100644 python/virtualenv/PKG-INFO create mode 100644 python/virtualenv/README.rst create mode 100755 python/virtualenv/bin/rebuild-script.py create mode 100644 python/virtualenv/docs/Makefile create mode 100644 python/virtualenv/docs/changes.rst create mode 100644 python/virtualenv/docs/conf.py create mode 100644 python/virtualenv/docs/development.rst create mode 100644 python/virtualenv/docs/index.rst create mode 100644 python/virtualenv/docs/installation.rst create mode 100644 python/virtualenv/docs/make.bat create mode 100644 python/virtualenv/docs/reference.rst create mode 100644 python/virtualenv/docs/userguide.rst create mode 100644 python/virtualenv/scripts/virtualenv create mode 100644 python/virtualenv/setup.cfg create mode 100644 python/virtualenv/setup.py create mode 100644 python/virtualenv/site.py create mode 100644 python/virtualenv/tests/__init__.py create mode 100755 python/virtualenv/tests/test_activate.sh create mode 100644 python/virtualenv/tests/test_activate_output.expected create mode 100644 python/virtualenv/tests/test_cmdline.py create mode 100644 python/virtualenv/tests/test_virtualenv.py create mode 100755 python/virtualenv/virtualenv.py create mode 100644 python/virtualenv/virtualenv_embedded/activate.bat create mode 100644 python/virtualenv/virtualenv_embedded/activate.csh create mode 100644 python/virtualenv/virtualenv_embedded/activate.fish create mode 100644 python/virtualenv/virtualenv_embedded/activate.ps1 create mode 100644 python/virtualenv/virtualenv_embedded/activate.sh create mode 100644 python/virtualenv/virtualenv_embedded/activate_this.py create mode 100644 python/virtualenv/virtualenv_embedded/deactivate.bat create mode 100644 python/virtualenv/virtualenv_embedded/distutils-init.py create mode 100644 python/virtualenv/virtualenv_embedded/distutils.cfg create mode 100644 python/virtualenv/virtualenv_embedded/python-config create mode 100644 python/virtualenv/virtualenv_embedded/site.py create mode 100644 python/virtualenv/virtualenv_support/__init__.py create mode 100644 python/virtualenv/virtualenv_support/argparse-1.4.0-py2.py3-none-any.whl create mode 100644 python/virtualenv/virtualenv_support/pip-8.1.2-py2.py3-none-any.whl create mode 100644 python/virtualenv/virtualenv_support/setuptools-25.2.0-py2.py3-none-any.whl create mode 100644 python/virtualenv/virtualenv_support/wheel-0.29.0-py2.py3-none-any.whl create mode 100644 python/voluptuous/COPYING create mode 100644 python/voluptuous/MANIFEST.in create mode 100644 python/voluptuous/PKG-INFO create mode 100644 python/voluptuous/README.md create mode 100644 python/voluptuous/README.rst create mode 100644 python/voluptuous/setup.cfg create mode 100644 python/voluptuous/setup.py create mode 100644 python/voluptuous/tests.md create mode 100644 python/voluptuous/voluptuous.py create mode 100644 python/which/LICENSE.txt create mode 100644 python/which/MANIFEST.in create mode 100644 python/which/Makefile.win create mode 100644 python/which/PKG-INFO create mode 100644 python/which/README.txt create mode 100644 python/which/TODO.txt create mode 100644 python/which/build.py create mode 100644 python/which/launcher.cpp create mode 100644 python/which/logo.jpg create mode 100644 python/which/setup.py create mode 100644 python/which/test/test_which.py create mode 100644 python/which/test/testsupport.py create mode 100644 python/which/which.py (limited to 'python') diff --git a/python/PyECC/MANIFEST.in b/python/PyECC/MANIFEST.in new file mode 100644 index 000000000..bb3ec5f0d --- /dev/null +++ b/python/PyECC/MANIFEST.in @@ -0,0 +1 @@ +include README.md diff --git a/python/PyECC/README.md b/python/PyECC/README.md new file mode 100644 index 000000000..be67fff04 --- /dev/null +++ b/python/PyECC/README.md @@ -0,0 +1,29 @@ +ecc +=== + +Pure Python implementation of an elliptic curve cryptosystem based on FIPS 186-3 + +License +======= + +The MIT License (MIT) + +Copyright (c) 2010-2015 Toni Mattis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/python/PyECC/ecc/Key.py b/python/PyECC/ecc/Key.py new file mode 100644 index 000000000..8ba268576 --- /dev/null +++ b/python/PyECC/ecc/Key.py @@ -0,0 +1,320 @@ +# ==================================================================== +# +# ELLIPTIC CURVE KEY ENCAPSULATION +# Version 2011-01-26 +# +# Copyright (c) 2010 - 2011 | Toni Mattis +# +# ==================================================================== + +""" +== Elliptic Curve Key Encapsulation == + +Keypairs +-------- +Keypairs are generated using: Key.generate(bits) + +The number of bits is tied to the NIST-proposed elliptic curves +and has to be 192, 224, 256, 384 or 521 (not 512!). +The result is a Key object containing public and private key. + +private() is a method for checking whether the Key object is a +pure public key or also includes the private part. + + +Exchange +-------- +Public keys have to be exported using the export()-Method without +passing an argument. The result is a string which can be safely +transmitted. + +Using Key.decode() the receiver obtains a new +public Key object of the sender. + + +Storage +------- +For storing a key, export(True) exports both private and public +key as a string. Make sure this information is properly encrypted +when stored. + +Key.decode() obtains the full Key object from the +encoded keypair. + + +Public Keys +----------- +A public Key object can perform the following cryptographic +operations: + +* validate() Checks key integrity, i.e. after loading the + key from a file. Returns True if the key is + valid. Invalid keys should be discarded. + +* fingerprint() Returns the public key fingerprint used to + identify the key. Optional arguments: + 1. as_hex - True, if output should be formatted + as hexadecimal number (default: True). + 2. hashfunc - The official name of the hash + function being used (default: 'sha1') + For supported hash functions see below. + +* keyid() Returns a (mostly) unique Key ID, which is + shorter than the fingerprint. The result + is an integer of max. 64 bits. + +* verify() Verifies whether the given data (argument 1) + matches the signature (argument 2) issued + by the owner of this key. A falsification + can have multiple causes: + + - Data, public key or signature were altered + during transmission/storage. + - The siganture was not issued by the owner + of this key but may be valid with another + key. + - The signature was issued for different data. + - The signature was issued using a different + hash function. Another hash function may work. + + Optionally, the name of a hash algorithm + can be provided. For hash names see below. + +* encrypt() Encrypts a packet of data destined for the owner + of this key*. After encryption only the holder + of this Key's private part is able to decrypt + the message. + +Private Keys / Keypairs +----------------------- + +If the key object is private, then it is a keypair consisting of +a public and a private key. Therefore all Public key operations +are supported. + +Additional functions: + +* sign() Signs given data using this private key. The + result is a signature which can be passed as + argument to the verify() function in addition + to the data being verified. + + As additional argument the name of the hash + function can be provided (defaults to 'sha256'). + For hash names see below. + +* auth_encrypt() Performs authenticated encryption of data + (argument 1) for the holder of the key provided + as second argument. Only the receiver whose + public key is given is able to derypt and verify + the message. The message will be implicitly + signed using the own private key. * + +* decrypt() Decrypts a message which has been encrypted + using the public key of this keypair*. If + decryption yields random data, this can have + multiple causes: + - You were not the intended receiver, a different + private key may be able to decrypt it. + - The message was altered. + - Your private key is damaged. + +* auth_decrypt() Decrypts a message while verifying whether + it has been authentically issued by the holder + of the given key (argument 2). When + authentication failed, a + SecurityViolationException is thrown. Reasons + for this to happen are those mentioned with + decrypt() and verify(). * + +*) The encryption used here depends on the "eccrypt" module imported +by this module. Default implementation should use RABBIT as cipher +and do the asymmetric part using an optimized El-Gamal scheme. + + + +Hash functions +-------------- +The following hash functions can be passed at the moment: + +name | hash size | security level + | (bits, bytes, hex digits) +---------+------------------------+---------------- +'sha1' 160 / 20 / 40 medium +'sha224' 224 / 28 / 56 medium-strong +'sha256' 256 / 32 / 64 strong +'sha384' 384 / 48 / 96 very strong +'sha512' 512 / 64 / 128 very strong + +'md5' 128 / 16 / 32 weak (not recommended!) + + +Curves +------ +According to FIPS 186-3, Appendix D.1.2 there are 5 elliptic +curves recommended. All of those are strong, but those with +a higher bit number even stronger. + +192 and 224 bits are sufficient for most purposes. +256 bits offer an additional magnitude of security. + (i.e. for classified / strongly confidential data) +384 and 521 bits provide exceptionally strong security. According + to current research they most probably keep this level for + decades in the future. + +FIPS also recommends curves over polynomial fields but actually +only prime fields are implemented here. (Because 2^521-1 is a mersenne +prime having great security characteristics, 521 bits are preferred +over a constructed 512 bit field.) +""" + +from encoding import * +from eccrypt import * +import ecdsa +import hashlib +from SecurityViolationException import * + +class Key: + + # --- KEY SETUP ------------------------------------------------------------ + + def __init__(self, public_key, private_key = None): + '''Create a Key(pair) from numeric keys.''' + self._pub = public_key + self._priv = private_key + self._fingerprint = {} + self._id = None + + @staticmethod + def generate(bits): + '''Generate a new ECDSA keypair''' + return Key(*ecdsa.keypair(bits)) + + # --- BINARY REPRESENTATION ------------------------------------------------ + + def encode(self, include_private = False): + '''Returns a strict binary representation of this Key''' + e = Encoder().int(self.keyid(), 8) + e.int(self._pub[0], 2).point(self._pub[1], 2) + if include_private and self._priv: + e.long(self._priv[1], 2) + else: + e.long(0, 2) + return e.out() + + def compress(self): + '''Returns a compact public key representation''' + + + @staticmethod + def decode(s): + '''Constructs a new Key object from its binary representation''' + kid, ksize, pub, priv = Decoder(s).int(8).int(2).point(2).long(2).out() + k = Key((ksize, pub), (ksize, priv) if priv else None) + if kid == k.keyid(): + return k + else: + raise ValueError, "Invalid Key ID" + + # --- IDENTIFICATION AND VALIDATION ---------------------------------------- + + def private(self): + '''Checks whether Key object contains private key''' + return bool(self._priv) + + def validate(self): + '''Checks key validity''' + if ecdsa.validate_public_key(self._pub): + if self._priv: # ? validate and match private key + return ecdsa.validate_private_key(self._priv) and \ + ecdsa.match_keys(self._pub, self._priv) + else: + return True # : everything valid + else: + return False + + def fingerprint(self, as_hex = True, hashfunc = 'sha1'): + '''Get the public key fingerprint''' + if hashfunc in self._fingerprint: + return self._fingerprint[hashfunc] if not as_hex else \ + self._fingerprint[hashfunc].encode("hex") + else: + h = hashlib.new(hashfunc, enc_point(self._pub[1])) + d = h.digest() + self._fingerprint[hashfunc] = d + return d.encode("hex") if as_hex else d + + def keyid(self): + '''Get a short, unique identifier''' + if not self._id: + self._id = dec_long(self.fingerprint(False, 'sha1')[:8]) + return self._id + + # --- DIGITAL SIGNATURES --------------------------------------------------- + + def sign(self, data, hashfunc = 'sha256'): + '''Sign data using the specified hash function''' + if self._priv: + h = dec_long(hashlib.new(hashfunc, data).digest()) + s = ecdsa.sign(h, self._priv) + return enc_point(s) + else: + raise AttributeError, "Private key needed for signing." + + def verify(self, data, sig, hashfunc = 'sha256'): + '''Verify the signature of data using the specified hash function''' + h = dec_long(hashlib.new(hashfunc, data).digest()) + s = dec_point(sig) + return ecdsa.verify(h, s, self._pub) + + # --- HYBRID ENCRYPTION ---------------------------------------------------- + + def encrypt(self, data): + '''Encrypt a message using this public key''' + ctext, mkey = encrypt(data, self._pub) + return Encoder().point(mkey).str(ctext, 4).out() + + def decrypt(self, data): + '''Decrypt an encrypted message using this private key''' + mkey, ctext = Decoder(data).point().str(4).out() + return decrypt(ctext, mkey, self._priv) + + # --- AUTHENTICATED ENCRYPTION --------------------------------------------- + + def auth_encrypt(self, data, receiver): + '''Sign and encrypt a message''' + sgn = self.sign(data) + ctext, mkey = encrypt(data, receiver._pub) + return Encoder().point(mkey).str(ctext, 4).str(sgn, 2).out() + + def auth_decrypt(self, data, source): + '''Decrypt and verify a message''' + mkey, ctext, sgn = Decoder(data).point().str(4).str(2).out() + text = decrypt(ctext, mkey, self._priv) + if source.verify(text, sgn): + return text + else: + raise SecurityViolationException, "Invalid Signature" + + +if __name__ == "__main__": + + import time + + def test_overhead(): + print "sender", "receiver", "+bytes", "+enctime", "+dectime" + for s in [192, 224, 256, 384, 521]: + sender = Key.generate(s) + for r in [192, 224, 256, 384, 521]: + receiver = Key.generate(r) + t = time.time() + e = sender.auth_encrypt("", receiver) + t1 = time.time() - t + t = time.time() + receiver.auth_decrypt(e, sender) + t2 = time.time() - t + print s, r, len(e), t1, t2 + + + + diff --git a/python/PyECC/ecc/Rabbit.py b/python/PyECC/ecc/Rabbit.py new file mode 100644 index 000000000..209f01e1e --- /dev/null +++ b/python/PyECC/ecc/Rabbit.py @@ -0,0 +1,270 @@ +# ------------------------------------------------------------------------------ +# +# R A B B I T Stream Cipher +# by M. Boesgaard, M. Vesterager, E. Zenner (specified in RFC 4503) +# +# +# Pure Python Implementation by Toni Mattis +# +# ------------------------------------------------------------------------------ + + +WORDSIZE = 0x100000000 + +rot08 = lambda x: ((x << 8) & 0xFFFFFFFF) | (x >> 24) +rot16 = lambda x: ((x << 16) & 0xFFFFFFFF) | (x >> 16) + +def _nsf(u, v): + '''Internal non-linear state transition''' + s = (u + v) % WORDSIZE + s = s * s + return (s ^ (s >> 32)) % WORDSIZE + +class Rabbit: + + def __init__(self, key, iv = None): + '''Initialize Rabbit cipher using a 128 bit integer/string''' + + if isinstance(key, str): + # interpret key string in big endian byte order + if len(key) < 16: + key = '\x00' * (16 - len(key)) + key + # if len(key) > 16 bytes only the first 16 will be considered + k = [ord(key[i + 1]) | (ord(key[i]) << 8) + for i in xrange(14, -1, -2)] + else: + # k[0] = least significant 16 bits + # k[7] = most significant 16 bits + k = [(key >> i) & 0xFFFF for i in xrange(0, 128, 16)] + + # State and counter initialization + x = [(k[(j + 5) % 8] << 16) | k[(j + 4) % 8] if j & 1 else + (k[(j + 1) % 8] << 16) | k[j] for j in xrange(8)] + c = [(k[j] << 16) | k[(j + 1) % 8] if j & 1 else + (k[(j + 4) % 8] << 16) | k[(j + 5) % 8] for j in xrange(8)] + + self.x = x + self.c = c + self.b = 0 + self._buf = 0 # output buffer + self._buf_bytes = 0 # fill level of buffer + + self.next() + self.next() + self.next() + self.next() + + for j in xrange(8): + c[j] ^= x[(j + 4) % 8] + + self.start_x = self.x[:] # backup initial key for IV/reset + self.start_c = self.c[:] + self.start_b = self.b + + if iv != None: + self.set_iv(iv) + + def reset(self, iv = None): + '''Reset the cipher and optionally set a new IV (int64 / string).''' + + self.c = self.start_c[:] + self.x = self.start_x[:] + self.b = self.start_b + self._buf = 0 + self._buf_bytes = 0 + if iv != None: + self.set_iv(iv) + + def set_iv(self, iv): + '''Set a new IV (64 bit integer / bytestring).''' + + if isinstance(iv, str): + i = 0 + for c in iv: + i = (i << 8) | ord(c) + iv = i + + c = self.c + i0 = iv & 0xFFFFFFFF + i2 = iv >> 32 + i1 = ((i0 >> 16) | (i2 & 0xFFFF0000)) % WORDSIZE + i3 = ((i2 << 16) | (i0 & 0x0000FFFF)) % WORDSIZE + + c[0] ^= i0 + c[1] ^= i1 + c[2] ^= i2 + c[3] ^= i3 + c[4] ^= i0 + c[5] ^= i1 + c[6] ^= i2 + c[7] ^= i3 + + self.next() + self.next() + self.next() + self.next() + + + def next(self): + '''Proceed to the next internal state''' + + c = self.c + x = self.x + b = self.b + + t = c[0] + 0x4D34D34D + b + c[0] = t % WORDSIZE + t = c[1] + 0xD34D34D3 + t // WORDSIZE + c[1] = t % WORDSIZE + t = c[2] + 0x34D34D34 + t // WORDSIZE + c[2] = t % WORDSIZE + t = c[3] + 0x4D34D34D + t // WORDSIZE + c[3] = t % WORDSIZE + t = c[4] + 0xD34D34D3 + t // WORDSIZE + c[4] = t % WORDSIZE + t = c[5] + 0x34D34D34 + t // WORDSIZE + c[5] = t % WORDSIZE + t = c[6] + 0x4D34D34D + t // WORDSIZE + c[6] = t % WORDSIZE + t = c[7] + 0xD34D34D3 + t // WORDSIZE + c[7] = t % WORDSIZE + b = t // WORDSIZE + + g = [_nsf(x[j], c[j]) for j in xrange(8)] + + x[0] = (g[0] + rot16(g[7]) + rot16(g[6])) % WORDSIZE + x[1] = (g[1] + rot08(g[0]) + g[7]) % WORDSIZE + x[2] = (g[2] + rot16(g[1]) + rot16(g[0])) % WORDSIZE + x[3] = (g[3] + rot08(g[2]) + g[1]) % WORDSIZE + x[4] = (g[4] + rot16(g[3]) + rot16(g[2])) % WORDSIZE + x[5] = (g[5] + rot08(g[4]) + g[3]) % WORDSIZE + x[6] = (g[6] + rot16(g[5]) + rot16(g[4])) % WORDSIZE + x[7] = (g[7] + rot08(g[6]) + g[5]) % WORDSIZE + + self.b = b + return self + + + def derive(self): + '''Derive a 128 bit integer from the internal state''' + + x = self.x + return ((x[0] & 0xFFFF) ^ (x[5] >> 16)) | \ + (((x[0] >> 16) ^ (x[3] & 0xFFFF)) << 16)| \ + (((x[2] & 0xFFFF) ^ (x[7] >> 16)) << 32)| \ + (((x[2] >> 16) ^ (x[5] & 0xFFFF)) << 48)| \ + (((x[4] & 0xFFFF) ^ (x[1] >> 16)) << 64)| \ + (((x[4] >> 16) ^ (x[7] & 0xFFFF)) << 80)| \ + (((x[6] & 0xFFFF) ^ (x[3] >> 16)) << 96)| \ + (((x[6] >> 16) ^ (x[1] & 0xFFFF)) << 112) + + + def keystream(self, n): + '''Generate a keystream of n bytes''' + + res = "" + b = self._buf + j = self._buf_bytes + next = self.next + derive = self.derive + + for i in xrange(n): + if not j: + j = 16 + next() + b = derive() + res += chr(b & 0xFF) + j -= 1 + b >>= 1 + + self._buf = b + self._buf_bytes = j + return res + + + def encrypt(self, data): + '''Encrypt/Decrypt data of arbitrary length.''' + + res = "" + b = self._buf + j = self._buf_bytes + next = self.next + derive = self.derive + + for c in data: + if not j: # empty buffer => fetch next 128 bits + j = 16 + next() + b = derive() + res += chr(ord(c) ^ (b & 0xFF)) + j -= 1 + b >>= 1 + self._buf = b + self._buf_bytes = j + return res + + decrypt = encrypt + + + +if __name__ == "__main__": + + import time + + # --- Official Test Vectors --- + + # RFC 4503 Appendix A.1 - Testing without IV Setup + + r = Rabbit(0) + assert r.next().derive() == 0xB15754F036A5D6ECF56B45261C4AF702 + assert r.next().derive() == 0x88E8D815C59C0C397B696C4789C68AA7 + assert r.next().derive() == 0xF416A1C3700CD451DA68D1881673D696 + + r = Rabbit(0x912813292E3D36FE3BFC62F1DC51C3AC) + assert r.next().derive() == 0x3D2DF3C83EF627A1E97FC38487E2519C + assert r.next().derive() == 0xF576CD61F4405B8896BF53AA8554FC19 + assert r.next().derive() == 0xE5547473FBDB43508AE53B20204D4C5E + + r = Rabbit(0x8395741587E0C733E9E9AB01C09B0043) + assert r.next().derive() == 0x0CB10DCDA041CDAC32EB5CFD02D0609B + assert r.next().derive() == 0x95FC9FCA0F17015A7B7092114CFF3EAD + assert r.next().derive() == 0x9649E5DE8BFC7F3F924147AD3A947428 + + # RFC 4503 Appendix A.2 - Testing with IV Setup + + r = Rabbit(0, 0) + assert r.next().derive() == 0xC6A7275EF85495D87CCD5D376705B7ED + assert r.next().derive() == 0x5F29A6AC04F5EFD47B8F293270DC4A8D + assert r.next().derive() == 0x2ADE822B29DE6C1EE52BDB8A47BF8F66 + + r = Rabbit(0, 0xC373F575C1267E59) + assert r.next().derive() == 0x1FCD4EB9580012E2E0DCCC9222017D6D + assert r.next().derive() == 0xA75F4E10D12125017B2499FFED936F2E + assert r.next().derive() == 0xEBC112C393E738392356BDD012029BA7 + + r = Rabbit(0, 0xA6EB561AD2F41727) + assert r.next().derive() == 0x445AD8C805858DBF70B6AF23A151104D + assert r.next().derive() == 0x96C8F27947F42C5BAEAE67C6ACC35B03 + assert r.next().derive() == 0x9FCBFC895FA71C17313DF034F01551CB + + + # --- Performance Tests --- + + def test_gen(n = 1048576): + '''Measure time for generating n bytes => (total, bytes per second)''' + + r = Rabbit(0) + t = time.time() + r.keystream(n) + t = time.time() - t + return t, n / t + + def test_enc(n = 1048576): + '''Measure time for encrypting n bytes => (total, bytes per second)''' + + r = Rabbit(0) + x = 'x' * n + t = time.time() + r.encrypt(x) + t = time.time() - t + return t, n / t diff --git a/python/PyECC/ecc/SecurityViolationException.py b/python/PyECC/ecc/SecurityViolationException.py new file mode 100644 index 000000000..c4fc13687 --- /dev/null +++ b/python/PyECC/ecc/SecurityViolationException.py @@ -0,0 +1,2 @@ +class SecurityViolationException(Exception): + pass diff --git a/python/PyECC/ecc/__init__.py b/python/PyECC/ecc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/python/PyECC/ecc/curves.py b/python/PyECC/ecc/curves.py new file mode 100644 index 000000000..ee5847fc5 --- /dev/null +++ b/python/PyECC/ecc/curves.py @@ -0,0 +1,81 @@ +# +# Predefined Elliptic Curves +# for use in signing and key exchange +# +''' +Predefined elliptic curves for use in signing and key exchange. +This Module implements FIPS approved standard curves P-192, P-224, P-256, +P-384 and P-521 along with two weak non-standard curves of field size 128 +and 160 bits. + +The weak curves cannot be used for signing but provide a faster way to +obfuscate non-critical transmissions. +''' + +# FIPS approved elliptic curves over prime fields +# (see FIPS 186-3, Appendix D.1.2) +DOMAINS = { + # Bits : (p, order of E(GF(P)), parameter b, base point x, base point y) + 192 : (0xfffffffffffffffffffffffffffffffeffffffffffffffffL, + 0xffffffffffffffffffffffff99def836146bc9b1b4d22831L, + 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1L, + 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012L, + 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811L), + + 224 : (0xffffffffffffffffffffffffffffffff000000000000000000000001L, + 0xffffffffffffffffffffffffffff16a2e0b8f03e13dd29455c5c2a3dL, + 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4L, + 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21L, + 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34L), + + 256 : (0xffffffff00000001000000000000000000000000ffffffffffffffffffffffffL, + 0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551L, + 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604bL, + 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296L, + 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5L), + + 384 : (0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffeffffffff0000000000000000ffffffffL, + 0xffffffffffffffffffffffffffffffffffffffffffffffffc7634d81f4372ddf581a0db248b0a77aecec196accc52973L, + 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aefL, + 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7L, + 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5fL), + + 521 : (0x1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffL, + 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409L, + 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00L, + 0x0c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66L, + 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650L) + } + + +# Additional non-standard curves for low security but high performance +# (not intended for use in signing, hence the missing group order) + +DOMAINS.update({ + 128 : (0xffffffffffffffffffffffffffffff61L, + None, + 0xd83d3eb8266a89927d73d5fe263d5f23L, + 0xa94d2d8531f7af8bde367def12b98eadL, + 0x9f44e1d671beb68fd2df7f877ab13fa6L), + + 160 : (0xffffffffffffffffffffffffffffffffffffffd1L, + None, + 0x94bfe70deef7b94742c089ca4db3ca27fbe1f754L, + 0xcc6562c2969ac57524b8d0f300d1f598c908c121L, + 0x952ddde80a252683dd7ba90fb5919899b5af69f5L) + }) + +CURVE_P = 3 # global parameter of all curves (for efficiency reasons) + + +def get_curve(bits): + '''Get a known curve of the given size => (bits, prime, order, p, q, point). + Order may be None if unknown.''' + if bits in DOMAINS: + p, n, b, x, y = DOMAINS[bits] + return bits, p, n, CURVE_P, p - b, (x, y) + else: + raise KeyError, "Key size not implemented: %s" % bits + +def implemented_keys(must_sign = False): + return [k for k in DOMAINS if not must_sign or DOMAINS[k][1]] diff --git a/python/PyECC/ecc/eccrypt.py b/python/PyECC/ecc/eccrypt.py new file mode 100644 index 000000000..c38876d07 --- /dev/null +++ b/python/PyECC/ecc/eccrypt.py @@ -0,0 +1,65 @@ +# Elliptic Curve Hybrid Encryption Scheme +# +# COPYRIGHT (c) 2010 by Toni Mattis +# + +from curves import get_curve +from elliptic import mulp +from encoding import enc_long +from random import SystemRandom +from Rabbit import Rabbit + +# important for cryptographically secure random numbers: +random = SystemRandom() + +# Encryption Algorithm: +# --------------------- +# Input: Message M, public key Q +# +# 0. retrieve the group from which Q was generated. +# 1. generate random number k between 1 and the group order. +# 2. compute KG = k * G (where G is the base point of the group). +# 3. compute SG = k * Q (where Q is the public key of the receiver). +# 4. symmetrically encrypt M to M' using SG's x-coordinate as key. +# +# Return: Ciphertext M', temporary key KG + + +def encrypt(message, qk, encrypter = Rabbit): + '''Encrypt a message using public key qk => (ciphertext, temp. pubkey)''' + bits, q = qk + try: + bits, cn, n, cp, cq, g = get_curve(bits) + if not n: + raise ValueError, "Key size %s not suitable for encryption" % bits + except KeyError: + raise ValueError, "Key size %s not implemented" % bits + + k = random.randint(1, n - 1) # temporary private key k + kg = mulp(cp, cq, cn, g, k) # temporary public key k*G + sg = mulp(cp, cq, cn, q, k) # shared secret k*Q = k*d*G + + return encrypter(enc_long(sg[0])).encrypt(message), kg + +# Decryption Algorithm: +# --------------------- +# Input: Ciphertext M', temporary key KG, private key d +# +# 0. retrieve the group from which d and KG were generated. +# 1. compute SG = q * KG. +# 2. symmetrically decrypt M' to M using SG's x-coordinate as key. +# +# Return: M + +def decrypt(message, kg, dk, decrypter = Rabbit): + '''Decrypt a message using temp. public key kg and private key dk''' + bits, d = dk + try: + bits, cn, n, cp, cq, g = get_curve(bits) + except KeyError: + raise ValueError, "Key size %s not implemented" % bits + + sg = mulp(cp, cq, cn, kg, d) # shared secret d*(k*G) = k*d*G + return decrypter(enc_long(sg[0])).decrypt(message) + + diff --git a/python/PyECC/ecc/ecdsa.py b/python/PyECC/ecc/ecdsa.py new file mode 100644 index 000000000..6b52aeaa5 --- /dev/null +++ b/python/PyECC/ecc/ecdsa.py @@ -0,0 +1,153 @@ +# +# Elliptic Curve Digital Signature Algorithm (ECDSA) +# +# COPYRIGHT (c) 2010 by Toni Mattis +# + +from elliptic import inv, mulf, mulp, muladdp, element +from curves import get_curve, implemented_keys +from os import urandom + +import hashlib + +def randkey(bits, n): + '''Generate a random number (mod n) having the specified bit length''' + rb = urandom(bits / 8 + 8) # + 64 bits as recommended in FIPS 186-3 + c = 0 + for r in rb: + c = (c << 8) | ord(r) + return (c % (n - 1)) + 1 + +def keypair(bits): + '''Generate a new keypair (qk, dk) with dk = private and qk = public key''' + try: + bits, cn, n, cp, cq, g = get_curve(bits) + except KeyError: + raise ValueError, "Key size %s not implemented" % bits + if n > 0: + d = randkey(bits, n) + q = mulp(cp, cq, cn, g, d) + return (bits, q), (bits, d) + else: + raise ValueError, "Key size %s not suitable for signing" % bits + +def supported_keys(): + '''Return a list of all key sizes implemented for signing''' + return implemented_keys(True) + +def validate_public_key(qk): + '''Check whether public key qk is valid''' + bits, q = qk + x, y = q + bits, cn, n, cp, cq, g = get_curve(bits) + return q and 0 < x < cn and 0 < y < cn and \ + element(q, cp, cq, cn) and (mulp(cp, cq, cn, q, n) == None) + +def validate_private_key(dk): + '''Check whether private key dk is valid''' + bits, d = dk + bits, cn, n, cp, cq, g = get_curve(bits) + return 0 < d < cn + +def match_keys(qk, dk): + '''Check whether dk is the private key belonging to qk''' + bits, d = dk + bitz, q = qk + if bits == bitz: + bits, cn, n, cp, cq, g = get_curve(bits) + return mulp(cp, cq, cn, g, d) == q + else: + return False + +def truncate(h, hmax): + '''Truncate a hash to the bit size of hmax''' + while h > hmax: + h >>= 1 + return h + +def sign(h, dk): + '''Sign the numeric value h using private key dk''' + bits, d = dk + bits, cn, n, cp, cq, g = get_curve(bits) + h = truncate(h, cn) + r = s = 0 + while r == 0 or s == 0: + k = randkey(bits, cn) + kinv = inv(k, n) + kg = mulp(cp, cq, cn, g, k) + r = kg[0] % n + if r == 0: + continue + s = (kinv * (h + r * d)) % n + return r, s + +def verify(h, sig, qk): + '''Verify that 'sig' is a valid signature of h using public key qk''' + bits, q = qk + try: + bits, cn, n, cp, cq, g = get_curve(bits) + except KeyError: + return False + h = truncate(h, cn) + r, s = sig + if 0 < r < n and 0 < s < n: + w = inv(s, n) + u1 = (h * w) % n + u2 = (r * w) % n + x, y = muladdp(cp, cq, cn, g, u1, q, u2) + return r % n == x % n + return False + +def hash_sign(s, dk, hashfunc = 'sha256'): + h = int(hashlib.new(hashfunc, s).hexdigest(), 16) + return (hashfunc,) + sign(h, dk) + +def hash_verify(s, sig, qk): + h = int(hashlib.new(sig[0], s).hexdigest(), 16) + return verify(h, sig[1:], qk) + + +if __name__ == "__main__": + + import time + + testh1 = 0x0123456789ABCDEF + testh2 = 0x0123456789ABCDEE + + for k in supported_keys(): + qk, dk = keypair(k) + s1 = sign(testh1, dk) + s2 = sign(testh1, (dk[0], dk[1] ^ 1)) + s3 = (s1[0], s1[1] ^ 1) + qk2 = (qk[0], (qk[1][0] ^ 1, qk[1][1])) + + assert verify(testh1, s1, qk) # everything ok -> must succeed + assert not verify(testh2, s1, qk) # modified hash -> must fail + assert not verify(testh1, s2, qk) # different priv. key -> must fail + assert not verify(testh1, s3, qk) # modified signature -> must fail + assert not verify(testh1, s1, qk2) # different publ. key -> must fail + + + def test_perf(bits, rounds = 50): + '''-> (key generations, signatures, verifications) / second''' + h = 0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF + d = get_curve(bits) + + t = time.time() + for i in xrange(rounds): + qk, dk = keypair(bits) + tgen = time.time() - t + + t = time.time() + for i in xrange(rounds): + s = sign(0, dk) + tsign = time.time() - t + + t = time.time() + for i in xrange(rounds): + verify(0, s, qk) + tver = time.time() - t + + return rounds / tgen, rounds / tsign, rounds / tver + + diff --git a/python/PyECC/ecc/elliptic.py b/python/PyECC/ecc/elliptic.py new file mode 100644 index 000000000..9191a8848 --- /dev/null +++ b/python/PyECC/ecc/elliptic.py @@ -0,0 +1,381 @@ + +# --- ELLIPTIC CURVE MATH ------------------------------------------------------ +# +# curve definition: y^2 = x^3 - p*x - q +# over finite field: Z/nZ* (prime residue classes modulo a prime number n) +# +# +# COPYRIGHT (c) 2010 by Toni Mattis +# ------------------------------------------------------------------------------ + +''' +Module for elliptic curve arithmetic over a prime field GF(n). +E(GF(n)) takes the form y**2 == x**3 - p*x - q (mod n) for a prime n. + +0. Structures used by this module + + PARAMETERS and SCALARS are non-negative (long) integers. + + A POINT (x, y), usually denoted p1, p2, ... + is a pair of (long) integers where 0 <= x < n and 0 <= y < n + + A POINT in PROJECTIVE COORDINATES, usually denoted jp1, jp2, ... + takes the form (X, Y, Z, Z**2, Z**3) where x = X / Z**2 + and y = Y / z**3. This form is called Jacobian coordinates. + + The NEUTRAL element "0" or "O" is represented by None + in both coordinate systems. + +1. Basic Functions + + euclid() Is the Extended Euclidean Algorithm. + inv() Computes the multiplicative inversion modulo n. + curve_q() Finds the curve parameter q (mod n) + when p and a point are given. + element() Tests whether a point (x, y) is on the curve. + +2. Point transformations + + to_projective() Converts a point (x, y) to projective coordinates. + from_projective() Converts a point from projective coordinates + to (x, y) using the transformation described above. + neg() Computes the inverse point -P in both coordinate + systems. + +3. Slow point arithmetic + + These algorithms make use of basic geometry and modular arithmetic + thus being suitable for small numbers and academic study. + + add() Computes the sum of two (x, y)-points + mul() Perform scalar multiplication using "double & add" + +4. Fast point arithmetic + + These algorithms make use of projective coordinates, signed binary + expansion and a JSP-like approach (joint sparse form). + + The following functions consume and return projective coordinates: + + addf() Optimized point addition. + doublef() Optimized point doubling. + mulf() Highly optimized scalar multiplication. + muladdf() Highly optimized addition of two products. + + The following functions use the optimized ones above but consume + and output (x, y)-coordinates for a more convenient usage: + + mulp() Encapsulates mulf() + muladdp() Encapsulates muladdf() + + For single additions add() is generally faster than an encapsulation of + addf() which would involve expensive coordinate transformations. + Hence there is no addp() and doublep(). +''' + +# BASIC MATH ------------------------------------------------------------------- + +def euclid(a, b): + '''Solve x*a + y*b = ggt(a, b) and return (x, y, ggt(a, b))''' + # Non-recursive approach hence suitable for large numbers + x = yy = 0 + y = xx = 1 + while b: + q = a // b + a, b = b, a % b + x, xx = xx - q * x, x + y, yy = yy - q * y, y + return xx, yy, a + +def inv(a, n): + '''Perform inversion 1/a modulo n. a and n should be COPRIME.''' + # coprimality is not checked here in favour of performance + i = euclid(a, n)[0] + while i < 0: + i += n + return i + +def curve_q(x, y, p, n): + '''Find curve parameter q mod n having point (x, y) and parameter p''' + return ((x * x - p) * x - y * y) % n + +def element(point, p, q, n): + '''Test, whether the given point is on the curve (p, q, n)''' + if point: + x, y = point + return (x * x * x - p * x - q) % n == (y * y) % n + else: + return True + +def to_projective(p): + '''Transform point p given as (x, y) to projective coordinates''' + if p: + return (p[0], p[1], 1, 1, 1) + else: + return None # Identity point (0) + +def from_projective(jp, n): + '''Transform a point from projective coordinates to (x, y) mod n''' + if jp: + return (jp[0] * inv(jp[3], n)) % n, (jp[1] * inv(jp[4], n)) % n + else: + return None # Identity point (0) + +def neg(p, n): + '''Compute the inverse point to p in any coordinate system''' + return (p[0], (n - p[1]) % n) + p[2:] if p else None + + +# POINT ADDITION --------------------------------------------------------------- + +# addition of points in y**2 = x**3 - p*x - q over +def add(p, q, n, p1, p2): + '''Add points p1 and p2 over curve (p, q, n)''' + if p1 and p2: + x1, y1 = p1 + x2, y2 = p2 + if (x1 - x2) % n: + s = ((y1 - y2) * inv(x1 - x2, n)) % n # slope + x = (s * s - x1 - x2) % n # intersection with curve + return (x, n - (y1 + s * (x - x1)) % n) + else: + if (y1 + y2) % n: # slope s calculated by derivation + s = ((3 * x1 * x1 - p) * inv(2 * y1, n)) % n + x = (s * s - 2 * x1) % n # intersection with curve + return (x, n - (y1 + s * (x - x1)) % n) + else: + return None + else: # either p1 is not none -> ret. p1, otherwiese p2, which may be + return p1 if p1 else p2 # none too. + + +# faster addition: redundancy in projective coordinates eliminates +# expensive inversions mod n. +def addf(p, q, n, jp1, jp2): + '''Add jp1 and jp2 in projective (jacobian) coordinates.''' + if jp1 and jp2: + + x1, y1, z1, z1s, z1c = jp1 + x2, y2, z2, z2s, z2c = jp2 + + s1 = (y1 * z2c) % n + s2 = (y2 * z1c) % n + + u1 = (x1 * z2s) % n + u2 = (x2 * z1s) % n + + if (u1 - u2) % n: + + h = (u2 - u1) % n + r = (s2 - s1) % n + + hs = (h * h) % n + hc = (hs * h) % n + + x3 = (-hc - 2 * u1 * hs + r * r) % n + y3 = (-s1 * hc + r * (u1 * hs - x3)) % n + z3 = (z1 * z2 * h) % n + + z3s = (z3 * z3) % n + z3c = (z3s * z3) % n + + return (x3, y3, z3, z3s, z3c) + + else: + if (s1 + s2) % n: + return doublef(p, q, n, jp1) + else: + return None + else: + return jp1 if jp1 else jp2 + +# explicit point doubling using redundant coordinates +def doublef(p, q, n, jp): + '''Double jp in projective (jacobian) coordinates''' + if not jp: + return None + x1, y1, z1, z1p2, z1p3 = jp + + y1p2 = (y1 * y1) % n + a = (4 * x1 * y1p2) % n + b = (3 * x1 * x1 - p * z1p3 * z1) % n + x3 = (b * b - 2 * a) % n + y3 = (b * (a - x3) - 8 * y1p2 * y1p2) % n + z3 = (2 * y1 * z1) % n + z3p2 = (z3 * z3) % n + + return x3, y3, z3, z3p2, (z3p2 * z3) % n + + +# SCALAR MULTIPLICATION -------------------------------------------------------- + +# scalar multiplication p1 * c = p1 + p1 + ... + p1 (c times) in O(log(n)) +def mul(p, q, n, p1, c): + '''multiply point p1 by scalar c over curve (p, q, n)''' + res = None + while c > 0: + if c & 1: + res = add(p, q, n, res, p1) + c >>= 1 # c = c / 2 + p1 = add(p, q, n, p1, p1) # p1 = p1 * 2 + return res + + +# this method allows _signed_bin() to choose between 1 and -1. It will select +# the sign which leaves the higher number of zeroes in the binary +# representation (the higher GDB). +def _gbd(n): + '''Compute second greatest base-2 divisor''' + i = 1 + if n <= 0: return 0 + while not n % i: + i <<= 1 + return i >> 2 + + +# This method transforms n into a binary representation having signed bits. +# A signed binary expansion contains more zero-bits hence reducing the number +# of additions required by a multiplication algorithm. +# +# Example: 15 ( 0b1111 ) can be written as 16 - 1, resulting in (1,0,0,0,-1) +# and saving 2 additions. Subtraction can be performed as +# efficiently as addition. +def _signed_bin(n): + '''Transform n into an optimized signed binary representation''' + r = [] + while n > 1: + if n & 1: + cp = _gbd(n + 1) + cn = _gbd(n - 1) + if cp > cn: # -1 leaves more zeroes -> subtract -1 (= +1) + r.append(-1) + n += 1 + else: # +1 leaves more zeroes -> subtract +1 (= -1) + r.append(+1) + n -= 1 + else: + r.append(0) # be glad about one more zero + n >>= 1 + r.append(n) + return r[::-1] + + +# This multiplication algorithm combines signed binary expansion and +# fast addition using projective coordinates resulting in 5 to 10 times +# faster multiplication. +def mulf(p, q, n, jp1, c): + '''Multiply point jp1 by c in projective coordinates''' + sb = _signed_bin(c) + res = None + jp0 = neg(jp1, n) # additive inverse of jp1 to be used fot bit -1 + for s in sb: + res = doublef(p, q, n, res) + if s: + res = addf(p, q, n, res, jp1) if s > 0 else \ + addf(p, q, n, res, jp0) + return res + +# Encapsulates mulf() in order to enable flat coordinates (x, y) +def mulp(p, q, n, p1, c): + '''Multiply point p by c using fast multiplication''' + return from_projective(mulf(p, q, n, to_projective(p1), c), n) + + +# Sum of two products using Shamir's trick and signed binary expansion +def muladdf(p, q, n, jp1, c1, jp2, c2): + '''Efficiently compute c1 * jp1 + c2 * jp2 in projective coordinates''' + s1 = _signed_bin(c1) + s2 = _signed_bin(c2) + diff = len(s2) - len(s1) + if diff > 0: + s1 = [0] * diff + s1 + elif diff < 0: + s2 = [0] * -diff + s2 + + jp1p2 = addf(p, q, n, jp1, jp2) + jp1n2 = addf(p, q, n, jp1, neg(jp2, n)) + + precomp = ((None, jp2, neg(jp2, n)), + (jp1, jp1p2, jp1n2), + (neg(jp1, n), neg(jp1n2, n), neg(jp1p2, n))) + res = None + + for i, j in zip(s1, s2): + res = doublef(p, q, n, res) + if i or j: + res = addf(p, q, n, res, precomp[i][j]) + return res + +# Encapsulate muladdf() +def muladdp(p, q, n, p1, c1, p2, c2): + '''Efficiently compute c1 * p1 + c2 * p2 in (x, y)-coordinates''' + return from_projective(muladdf(p, q, n, + to_projective(p1), c1, + to_projective(p2), c2), n) + +# POINT COMPRESSION ------------------------------------------------------------ + +# Compute the square root modulo n + + +# Determine the sign-bit of a point allowing to reconstruct y-coordinates +# when x and the sign-bit are given: +def sign_bit(p1): + '''Return the signedness of a point p1''' + return p1[1] % 2 if p1 else 0 + +# Reconstruct the y-coordinate when curve parameters, x and the sign-bit of +# the y coordinate are given: +def y_from_x(x, p, q, n, sign): + '''Return the y coordinate over curve (p, q, n) for given (x, sign)''' + + # optimized form of (x**3 - p*x - q) % n + a = (((x * x) % n - p) * x - q) % n + + + +if __name__ == "__main__": + import rsa + import time + + t = time.time() + n = rsa.get_prime(256/8, 20) + tp = time.time() - t + p = rsa.random.randint(1, n) + p1 = (rsa.random.randint(1, n), rsa.random.randint(1, n)) + q = curve_q(p1[0], p1[1], p, n) + r1 = rsa.random.randint(1,n) + r2 = rsa.random.randint(1,n) + q1 = mulp(p, q, n, p1, r1) + q2 = mulp(p, q, n, p1, r2) + s1 = mulp(p, q, n, q1, r2) + s2 = mulp(p, q, n, q2, r1) + s1 == s2 + tt = time.time() - t + + def test(tcount, bits = 256): + n = rsa.get_prime(bits/8, 20) + p = rsa.random.randint(1, n) + p1 = (rsa.random.randint(1, n), rsa.random.randint(1, n)) + q = curve_q(p1[0], p1[1], p, n) + p2 = mulp(p, q, n, p1, rsa.random.randint(1, n)) + + c1 = [rsa.random.randint(1, n) for i in xrange(tcount)] + c2 = [rsa.random.randint(1, n) for i in xrange(tcount)] + c = zip(c1, c2) + + t = time.time() + for i, j in c: + from_projective(addf(p, q, n, + mulf(p, q, n, to_projective(p1), i), + mulf(p, q, n, to_projective(p2), j)), n) + t1 = time.time() - t + t = time.time() + for i, j in c: + muladdp(p, q, n, p1, i, p2, j) + t2 = time.time() - t + + return tcount, t1, t2 + + + diff --git a/python/PyECC/ecc/encoding.py b/python/PyECC/ecc/encoding.py new file mode 100644 index 000000000..24d3eb5a8 --- /dev/null +++ b/python/PyECC/ecc/encoding.py @@ -0,0 +1,178 @@ +# +# Encodings and Formats for Elliptic Curve Cryptography +# + +import StringIO + +# Big-Endian Encoding + +def enc_long(n): + '''Encodes arbitrarily large number n to a sequence of bytes. + Big endian byte order is used.''' + s = "" + while n > 0: + s = chr(n & 0xFF) + s + n >>= 8 + return s + +def enc_int(n): + '''Encodes an integer n to a 4-byte string. + Big endian byte order is used.''' + return chr((n >> 24) & 0xFF) + chr((n >> 16) & 0xFF) + \ + chr((n >> 8) & 0xFF) + chr( n & 0xFF) + +def enc_fixed_long(n, length): + return enc_long(n)[:length].rjust(length, '\x00') + +def dec_long(s): + '''Decodes s to its numeric representation. + Big endian byte order is used.''' + n = 0 + for c in s: + n = (n << 8) | ord(c) + return n + +# dec_int not necessary, +# dec_long does the same when provided with 4 bytes input. + +# Chunks + +def enc_chunks(*args): + '''Chain given string args or sub-chunks to a single chunk''' + return ''.join([enc_int(len(a)) + a for a in args]) + +def dec_chunks(s): + '''Split a chunk into strings or sub-chunks''' + i = 0 + result = [] + while i < len(s): + size = dec_long(s[i : i + 4]) + i += 4 + result.append(s[i : i + size]) + i += size + return result + +# Point and signature data + +def enc_point(p): + '''Encode a point p = (x, y)''' + x, y = p + sx = enc_long(x) + sy = enc_long(y) + diff = len(sx) - len(sy) + if diff > 0: + sy = '\x00' * diff + sy + elif diff < 0: + sx = '\x00' * -diff + sx + return sx + sy + +def dec_point(s): + '''Decode an even length string s to a point(x, y)''' + d = len(s) / 2 + return (dec_long(s[:d]), dec_long(s[d:])) + + +class Encoder: + + def __init__(self): + self._io = StringIO.StringIO() + + def int(self, n, size = 4): + self._io.write(enc_fixed_long(n, size)) + return self + + def long(self, n, pre = 2): + lstr = enc_long(n) + self._io.write(enc_fixed_long(len(lstr), pre) + lstr) + return self + + def str(self, s, pre = 2): + self._io.write(enc_fixed_long(len(s), pre) + s) + return self + + def point(self, p, pre = 2): + lstr = enc_point(p) + self._io.write(enc_fixed_long(len(lstr), pre) + lstr) + return self + + def chunk(self, enc, pre = 2): + lstr = enc.out() + self._io.write(enc_fixed_long(len(lstr), pre) + lstr) + return self + + def out(self): + return self._io.getvalue() + +class Decoder: + + def __init__(self, data, offset = 0): + self._io = StringIO.StringIO(data) + self._io.seek(offset) + self._res = [] + self._limit = None + self._parent = None + + def _ret(self): +## if self._parent and self._io.tell() >= self._limit: +## return self.exit() +## else: +## return self + return self + + def int(self, size = 4): + self._res.append(dec_long(self._io.read(size))) + return self._ret() + + + def long(self, pre = 2): + llen = dec_long(self._io.read(pre)) + self._res.append(dec_long(self._io.read(llen))) + return self._ret() + + def str(self, pre = 2): + llen = dec_long(self._io.read(pre)) + self._res.append(self._io.read(llen)) + return self._ret() + + def point(self, pre = 2): + llen = dec_long(self._io.read(pre)) + self._res.append(dec_point(self._io.read(llen))) + return self._ret() + + def enter(self, pre = 2): + llen = dec_long(self._io.read(pre)) + subcoder = Decoder("") + subcoder._io = self._io + subcoder._parent = self + subcoder._limit = self._io.tell() + llen + return subcoder + + def chunk(self, pre = 2): + llen = dec_long(self._io.read(pre)) + self._res.append(Decoder(self._io.read(llen))) + return self._ret() + + def exit(self): + if self._parent: + self._parent._io.seek(self._limit) + self._parent._res.append(self._res) + return self._parent + else: + raise RuntimeError, "Cannont exit top level Decoder" + + def continues(self): + return (not self._limit) or (self._io.tell() < self._limit) + + def out(self, exit_all = False): + if exit_all and self._parent: + return self.exit().out() + else: + r = self._res + self._res = [] + return r + + def only(self): + if self._res: + return self._res.pop(0) + else: + return RuntimeError, "Only what? (Empty decoder stack)" diff --git a/python/PyECC/ecc/performance.py b/python/PyECC/ecc/performance.py new file mode 100644 index 000000000..724176aef --- /dev/null +++ b/python/PyECC/ecc/performance.py @@ -0,0 +1,50 @@ +from Key import Key +import time +from collections import OrderedDict + +def test_generation_perf(n = 100): + results = OrderedDict() + for bits in (192, 224, 256, 384, 521): + t = time.time() + for i in xrange(n): + k = Key.generate(bits) + t = time.time() - t + results[bits] = t + return results + +def test_signing_perf(n = 100): + results = OrderedDict() + for bits in (192, 224, 256, 384, 521): + k = Key.generate(bits) + t = time.time() + for i in xrange(n): + k.sign("random string") + t = time.time() - t + results[bits] = t + return results + +def test_verification_perf(n = 100): + results = OrderedDict() + for bits in (192, 224, 256, 384, 521): + k = Key.generate(bits) + s = k.sign("random string") + t = time.time() + for i in xrange(n): + k.verify("random string", s) + t = time.time() - t + results[bits] = t + return results + +def print_dict(title, d): + print title + print '-' * len(title) + for k, v in d.items(): + print k, '\t', v + print + +n = 100 +print_dict("Key generation", test_generation_perf(n)) +print_dict("Signing", test_signing_perf(n)) +print_dict("Verifying", test_verification_perf(n)) + + diff --git a/python/PyECC/ecc/primes.py b/python/PyECC/ecc/primes.py new file mode 100644 index 000000000..a8bc1424b --- /dev/null +++ b/python/PyECC/ecc/primes.py @@ -0,0 +1,82 @@ +''' +This module implements simple prime generation and primality testing. +''' + +from random import SystemRandom +random = SystemRandom() +from os import urandom + +def exp(x, n, m): + '''Efficiently compute x ** n mod m''' + y = 1 + z = x + while n > 0: + if n & 1: + y = (y * z) % m + z = (z * z) % m + n //= 2 + return y + + +# Miller-Rabin-Test + +def prime(n, k): + '''Checks whether n is probably prime (with probability 1 - 4**(-k)''' + + if n % 2 == 0: + return False + + d = n - 1 + s = 0 + + while d % 2 == 0: + s += 1 + d /= 2 + + for i in xrange(k): + + a = long(2 + random.randint(0, n - 4)) + x = exp(a, d, n) + if (x == 1) or (x == n - 1): + continue + + for r in xrange(1, s): + x = (x * x) % n + + if x == 1: + return False + + if x == n - 1: + break + + else: + return False + return True + + +# Generate and Test Algorithms + +def get_prime(size, accuracy): + '''Generate a pseudorandom prime number with the specified size (bytes).''' + + while 1: + + # read some random data from the operating system + rstr = urandom(size - 1) + r = 128 | ord(urandom(1)) # MSB = 1 (not less than size) + for c in rstr: + r = (r << 8) | ord(c) + r |= 1 # LSB = 1 (odd) + + # test whether this results in a prime number + if prime(r, accuracy): + return r + + +def get_prime_upto(n, accuracy): + '''Find largest prime less than n''' + n |= 1 + while n > 0: + n -= 2 + if prime(n, accuracy): + return n diff --git a/python/PyECC/ecc/shacrypt.py b/python/PyECC/ecc/shacrypt.py new file mode 100644 index 000000000..69ee7b943 --- /dev/null +++ b/python/PyECC/ecc/shacrypt.py @@ -0,0 +1,38 @@ +# ------------------------------------------------------------------------------ +# +# SHA-512-BASED FEISTEL CIPHER +# by Toni Mattis +# +# Feistel Function: SHA-512(Block || Key) +# Key Size: Fully Dynamic +# Block Size: 1024 Bits +# Rounds: User-Specified +# +# ------------------------------------------------------------------------------ + +from hashlib import sha512 + +BPOS = tuple(range(64)) + +def enc_block(block, key, rounds = 16): + x = block[:64] + y = block[64:] + for i in xrange(rounds): + h = sha512(x + key).digest() + y = ''.join([chr(ord(y[k]) ^ ord(h[k])) for k in BPOS]) + h = sha512(y + key).digest() + x = ''.join([chr(ord(x[k]) ^ ord(h[k])) for k in BPOS]) + return x + y + +def dec_block(block, key, rounds = 16): + x = block[:64] + y = block[64:] + for i in xrange(rounds): + h = sha512(y + key).digest() + x = ''.join([chr(ord(x[k]) ^ ord(h[k])) for k in BPOS]) + h = sha512(x + key).digest() + y = ''.join([chr(ord(y[k]) ^ ord(h[k])) for k in BPOS]) + return x + y + + + diff --git a/python/PyECC/setup.py b/python/PyECC/setup.py new file mode 100644 index 000000000..b9e507c18 --- /dev/null +++ b/python/PyECC/setup.py @@ -0,0 +1,77 @@ +#!/usr/bin/python2.4 +# +# Copyright 2007 The Python-Twitter Developers +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# copied from https://github.com/bear/python-twitter/blob/master/setup.py +# + +'''The setup and build script for the python-twitter library.''' + +__author__ = 'niccokunzmann@aol.com' +__version__ = '0.0.1' + + +# The base package metadata to be used by both distutils and setuptools +METADATA = dict( + name = "ecc", + version = __version__, + packages = ['ecc'], + author='Toni Mattis', + author_email='solaris@live.de', + description='Pure Python implementation of an elliptic curve cryptosystem based on FIPS 186-3', + license='MIT', + url='https://github.com/niccokunzmann/ecc', + keywords='elliptic curve cryptosystem rabbit cipher', +) + +# Extra package metadata to be used only if setuptools is installed +SETUPTOOLS_METADATA = dict( + install_requires = [], + include_package_data = True, + classifiers = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: Communications', + 'Topic :: Security :: Cryptography', + 'Topic :: Internet', + ], +## test_suite = 'distacc_test', +) + + +def Read(file): + return open(file).read() + +def BuildLongDescription(): + return '\n'.join([Read('README.md'), ]) + +def Main(): + # Build the long_description from the README and CHANGES + METADATA['long_description'] = BuildLongDescription() + + # Use setuptools if available, otherwise fallback and use distutils + try: + import setuptools + METADATA.update(SETUPTOOLS_METADATA) + setuptools.setup(**METADATA) + except ImportError: + import distutils.core + distutils.core.setup(**METADATA) + + +if __name__ == '__main__': + Main() diff --git a/python/README b/python/README new file mode 100644 index 000000000..f9e677b5e --- /dev/null +++ b/python/README @@ -0,0 +1,21 @@ +This directory contains common Python code. + +The basic rule is that if Python code is cross-module (that's "module" in the +Mozilla meaning - as in "module ownership") and is MPL-compatible, it should +go here. + +What should not go here: + +* Python that is not MPL-compatible (see other-licenses/) +* Python that has good reason to remain close to its "owning" (Mozilla) + module (e.g. it is only being consumed from there). + +Historical information can be found at +https://bugzilla.mozilla.org/show_bug.cgi?id=775243 + +## pyyaml | pystache + +Used in taskcluster related mach commands to update download from github +and remove .git and tests. + +Then run tests in taskcluster/tests/ diff --git a/python/altgraph/MANIFEST.in b/python/altgraph/MANIFEST.in new file mode 100644 index 000000000..9a9b96078 --- /dev/null +++ b/python/altgraph/MANIFEST.in @@ -0,0 +1,9 @@ +include ReadMe.txt +include *.txt MANIFEST.in *.py +graft doc +graft doc/_static +graft doc/_templates +graft altgraph_tests +global-exclude .DS_Store +global-exclude *.pyc +global-exclude *.so diff --git a/python/altgraph/PKG-INFO b/python/altgraph/PKG-INFO new file mode 100644 index 000000000..87b602f52 --- /dev/null +++ b/python/altgraph/PKG-INFO @@ -0,0 +1,216 @@ +Metadata-Version: 1.1 +Name: altgraph +Version: 0.12 +Summary: Python graph (network) package +Home-page: http://packages.python.org/altgraph +Author: Ronald Oussoren +Author-email: ronaldoussoren@mac.com +License: MIT +Download-URL: http://pypi.python.org/pypi/altgraph +Description: altgraph is a fork of graphlib: a graph (network) package for constructing + graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with + graphviz output. + + altgraph includes some additional usage of Python 2.6+ features and + enhancements related to modulegraph and macholib. + + + Release history + =============== + + 0.12 + ---- + + - Added ``ObjectGraph.edgeData`` to retrieve the edge data + from a specific edge. + + - Added ``AltGraph.update_edge_data`` and ``ObjectGraph.updateEdgeData`` + to update the data associated with a graph edge. + + 0.11 + ---- + + - Stabilize the order of elements in dot file exports, + patch from bitbucket user 'pombredanne'. + + - Tweak setup.py file to remove dependency on distribute (but + keep the dependency on setuptools) + + + 0.10.2 + ------ + + - There where no classifiers in the package metadata due to a bug + in setup.py + + 0.10.1 + ------ + + This is a bugfix release + + Bug fixes: + + - Issue #3: The source archive contains a README.txt + while the setup file refers to ReadMe.txt. + + This is caused by a misfeature in distutils, as a + workaround I've renamed ReadMe.txt to README.txt + in the source tree and setup file. + + + 0.10 + ----- + + This is a minor feature release + + Features: + + - Do not use "2to3" to support Python 3. + + As a side effect of this altgraph now supports + Python 2.6 and later, and no longer supports + earlier releases of Python. + + - The order of attributes in the Dot output + is now always alphabetical. + + With this change the output will be consistent + between runs and Python versions. + + 0.9 + --- + + This is a minor bugfix release + + Features: + + - Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method + yielding all nodes in an object graph. + + Bugfixes: + + - The 0.8 release didn't work with py2app when using + python 3.x. + + + 0.8 + ----- + + This is a minor feature release. The major new feature + is a extensive set of unittests, which explains almost + all other changes in this release. + + Bugfixes: + + - Installing failed with Python 2.5 due to using a distutils + class that isn't available in that version of Python + (issue #1 on the issue tracker) + + - ``altgraph.GraphStat.degree_dist`` now actually works + + - ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will + no longer create the edge when one of the nodes doesn't + exist. + + - ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs. + + - ``altgraph.Graph.back_topo_sort`` was completely broken in + previous releases. + + - ``altgraph.Graph.forw_bfs_subgraph`` now actually works. + + - ``altgraph.Graph.back_bfs_subgraph`` now actually works. + + - ``altgraph.Graph.iterdfs`` now returns the correct result + when the ``forward`` argument is ``False``. + + - ``altgraph.Graph.iterdata`` now returns the correct result + when the ``forward`` argument is ``False``. + + + Features: + + - The ``altgraph.Graph`` constructor now accepts an argument + that contains 2- and 3-tuples instead of requireing that + all items have the same size. The (optional) argument can now + also be any iterator. + + - ``altgraph.Graph.Graph.add_node`` has no effect when you + add a hidden node. + + - The private method ``altgraph.Graph._bfs`` is no longer + present. + + - The private method ``altgraph.Graph._dfs`` is no longer + present. + + - ``altgraph.ObjectGraph`` now has a ``__contains__`` methods, + which means you can use the ``in`` operator to check if a + node is part of a graph. + + - ``altgraph.GraphUtil.generate_random_graph`` will raise + ``GraphError`` instead of looping forever when it is + impossible to create the requested graph. + + - ``altgraph.Dot.edge_style`` raises ``GraphError`` when + one of the nodes is not present in the graph. The method + silently added the tail in the past, but without ensuring + a consistent graph state. + + - ``altgraph.Dot.save_img`` now works when the mode is + ``"neato"``. + + 0.7.2 + ----- + + This is a minor bugfix release + + Bugfixes: + + - distutils didn't include the documentation subtree + + 0.7.1 + ----- + + This is a minor feature release + + Features: + + - Documentation is now generated using `sphinx `_ + and can be viewed at . + + - The repository has moved to bitbucket + + - ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no + implementation and no specified behaviour. + + - the module ``altgraph.compat`` is gone, which means altgraph will no + longer work with Python 2.3. + + + 0.7.0 + ----- + + This is a minor feature release. + + Features: + + - Support for Python 3 + + - It is now possible to run tests using 'python setup.py test' + + (The actual testsuite is still very minimal though) + +Keywords: graph +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Visualization diff --git a/python/altgraph/README.txt b/python/altgraph/README.txt new file mode 100644 index 000000000..904a14be5 --- /dev/null +++ b/python/altgraph/README.txt @@ -0,0 +1,6 @@ +altgraph is a fork of graphlib: a graph (network) package for constructing +graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with +graphviz output. + +altgraph includes some additional usage of Python 2.6+ features and +enhancements related to modulegraph and macholib. diff --git a/python/altgraph/altgraph/Dot.py b/python/altgraph/altgraph/Dot.py new file mode 100644 index 000000000..49a471e4d --- /dev/null +++ b/python/altgraph/altgraph/Dot.py @@ -0,0 +1,299 @@ +''' +altgraph.Dot - Interface to the dot language +============================================ + +The :py:mod:`~altgraph.Dot` module provides a simple interface to the +file format used in the `graphviz `_ +program. The module is intended to offload the most tedious part of the process +(the **dot** file generation) while transparently exposing most of its features. + +To display the graphs or to generate image files the `graphviz `_ +package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must +be accesible in the program path so that they can be ran from processes spawned +within the module. + +Example usage +------------- + +Here is a typical usage:: + + from altgraph import Graph, Dot + + # create a graph + edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ] + graph = Graph.Graph(edges) + + # create a dot representation of the graph + dot = Dot.Dot(graph) + + # display the graph + dot.display() + + # save the dot representation into the mydot.dot file + dot.save_dot(file_name='mydot.dot') + + # save dot file as gif image into the graph.gif file + dot.save_img(file_name='graph', file_type='gif') + +Directed graph and non-directed graph +------------------------------------- + +Dot class can use for both directed graph and non-directed graph +by passing ``graphtype`` parameter. + +Example:: + + # create directed graph(default) + dot = Dot.Dot(graph, graphtype="digraph") + + # create non-directed graph + dot = Dot.Dot(graph, graphtype="graph") + +Customizing the output +---------------------- + +The graph drawing process may be customized by passing +valid :command:`dot` parameters for the nodes and edges. For a list of all +parameters see the `graphviz `_ +documentation. + +Example:: + + # customizing the way the overall graph is drawn + dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75) + + # customizing node drawing + dot.node_style(1, label='BASE_NODE',shape='box', color='blue' ) + dot.node_style(2, style='filled', fillcolor='red') + + # customizing edge drawing + dot.edge_style(1, 2, style='dotted') + dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90') + dot.edge_style(4, 5, arrowsize=2, style='bold') + + +.. note:: + + dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to + display all graphics styles. To verify the output save it to an image file + and look at it that way. + +Valid attributes +---------------- + + - dot styles, passed via the :py:meth:`Dot.style` method:: + + rankdir = 'LR' (draws the graph horizontally, left to right) + ranksep = number (rank separation in inches) + + - node attributes, passed via the :py:meth:`Dot.node_style` method:: + + style = 'filled' | 'invisible' | 'diagonals' | 'rounded' + shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle' + + - edge attributes, passed via the :py:meth:`Dot.edge_style` method:: + + style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold' + arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee' + weight = number (the larger the number the closer the nodes will be) + + - valid `graphviz colors `_ + + - for more details on how to control the graph drawing process see the + `graphviz reference `_. +''' +import os +import warnings + +from altgraph import GraphError + + +class Dot(object): + ''' + A class providing a **graphviz** (dot language) representation + allowing a fine grained control over how the graph is being + displayed. + + If the :command:`dot` and :command:`dotty` programs are not in the current system path + their location needs to be specified in the contructor. + ''' + + def __init__(self, graph=None, nodes=None, edgefn=None, nodevisitor=None, edgevisitor=None, name="G", dot='dot', dotty='dotty', neato='neato', graphtype="digraph"): + ''' + Initialization. + ''' + self.name, self.attr = name, {} + + assert graphtype in ['graph', 'digraph'] + self.type = graphtype + + self.temp_dot = "tmp_dot.dot" + self.temp_neo = "tmp_neo.dot" + + self.dot, self.dotty, self.neato = dot, dotty, neato + + # self.nodes: node styles + # self.edges: edge styles + self.nodes, self.edges = {}, {} + + if graph is not None and nodes is None: + nodes = graph + if graph is not None and edgefn is None: + def edgefn(node, graph=graph): + return graph.out_nbrs(node) + if nodes is None: + nodes = () + + seen = set() + for node in nodes: + if nodevisitor is None: + style = {} + else: + style = nodevisitor(node) + if style is not None: + self.nodes[node] = {} + self.node_style(node, **style) + seen.add(node) + if edgefn is not None: + for head in seen: + for tail in (n for n in edgefn(head) if n in seen): + if edgevisitor is None: + edgestyle = {} + else: + edgestyle = edgevisitor(head, tail) + if edgestyle is not None: + if head not in self.edges: + self.edges[head] = {} + self.edges[head][tail] = {} + self.edge_style(head, tail, **edgestyle) + + def style(self, **attr): + ''' + Changes the overall style + ''' + self.attr = attr + + def display(self, mode='dot'): + ''' + Displays the current graph via dotty + ''' + + if mode == 'neato': + self.save_dot(self.temp_neo) + neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo) + os.system(neato_cmd) + else: + self.save_dot(self.temp_dot) + + plot_cmd = "%s %s" % (self.dotty, self.temp_dot) + os.system(plot_cmd) + + def node_style(self, node, **kwargs): + ''' + Modifies a node style to the dot representation. + ''' + if node not in self.edges: + self.edges[node] = {} + self.nodes[node] = kwargs + + def all_node_style(self, **kwargs): + ''' + Modifies all node styles + ''' + for node in self.nodes: + self.node_style(node, **kwargs) + + def edge_style(self, head, tail, **kwargs): + ''' + Modifies an edge style to the dot representation. + ''' + if tail not in self.nodes: + raise GraphError("invalid node %s" % (tail,)) + + try: + if tail not in self.edges[head]: + self.edges[head][tail]= {} + self.edges[head][tail] = kwargs + except KeyError: + raise GraphError("invalid edge %s -> %s " % (head, tail) ) + + def iterdot(self): + # write graph title + if self.type == 'digraph': + yield 'digraph %s {\n' % (self.name,) + elif self.type == 'graph': + yield 'graph %s {\n' % (self.name,) + + else: + raise GraphError("unsupported graphtype %s" % (self.type,)) + + # write overall graph attributes + for attr_name, attr_value in sorted(self.attr.items()): + yield '%s="%s";' % (attr_name, attr_value) + yield '\n' + + # some reusable patterns + cpatt = '%s="%s",' # to separate attributes + epatt = '];\n' # to end attributes + + # write node attributes + for node_name, node_attr in sorted(self.nodes.items()): + yield '\t"%s" [' % (node_name,) + for attr_name, attr_value in sorted(node_attr.items()): + yield cpatt % (attr_name, attr_value) + yield epatt + + # write edge attributes + for head in sorted(self.edges): + for tail in sorted(self.edges[head]): + if self.type == 'digraph': + yield '\t"%s" -> "%s" [' % (head, tail) + else: + yield '\t"%s" -- "%s" [' % (head, tail) + for attr_name, attr_value in sorted(self.edges[head][tail].items()): + yield cpatt % (attr_name, attr_value) + yield epatt + + # finish file + yield '}\n' + + def __iter__(self): + return self.iterdot() + + def save_dot(self, file_name=None): + ''' + Saves the current graph representation into a file + ''' + + if not file_name: + warnings.warn(DeprecationWarning, "always pass a file_name") + file_name = self.temp_dot + + fp = open(file_name, "w") + try: + for chunk in self.iterdot(): + fp.write(chunk) + finally: + fp.close() + + def save_img(self, file_name=None, file_type="gif", mode='dot'): + ''' + Saves the dot file as an image file + ''' + + if not file_name: + warnings.warn(DeprecationWarning, "always pass a file_name") + file_name = "out" + + if mode == 'neato': + self.save_dot(self.temp_neo) + neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo) + os.system(neato_cmd) + plot_cmd = self.dot + else: + self.save_dot(self.temp_dot) + plot_cmd = self.dot + + file_name = "%s.%s" % (file_name, file_type) + create_cmd = "%s -T%s %s -o %s" % (plot_cmd, file_type, self.temp_dot, file_name) + os.system(create_cmd) diff --git a/python/altgraph/altgraph/Graph.py b/python/altgraph/altgraph/Graph.py new file mode 100644 index 000000000..491e5c228 --- /dev/null +++ b/python/altgraph/altgraph/Graph.py @@ -0,0 +1,677 @@ +""" +altgraph.Graph - Base Graph class +================================= + +.. + #--Version 2.1 + #--Bob Ippolito October, 2004 + + #--Version 2.0 + #--Istvan Albert June, 2004 + + #--Version 1.0 + #--Nathan Denny, May 27, 1999 +""" + +from altgraph import GraphError +from collections import deque + +class Graph(object): + """ + The Graph class represents a directed graph with *N* nodes and *E* edges. + + Naming conventions: + + - the prefixes such as *out*, *inc* and *all* will refer to methods + that operate on the outgoing, incoming or all edges of that node. + + For example: :py:meth:`inc_degree` will refer to the degree of the node + computed over the incoming edges (the number of neighbours linking to + the node). + + - the prefixes such as *forw* and *back* will refer to the + orientation of the edges used in the method with respect to the node. + + For example: :py:meth:`forw_bfs` will start at the node then use the outgoing + edges to traverse the graph (goes forward). + """ + + def __init__(self, edges=None): + """ + Initialization + """ + + self.next_edge = 0 + self.nodes, self.edges = {}, {} + self.hidden_edges, self.hidden_nodes = {}, {} + + if edges is not None: + for item in edges: + if len(item) == 2: + head, tail = item + self.add_edge(head, tail) + elif len(item) == 3: + head, tail, data = item + self.add_edge(head, tail, data) + else: + raise GraphError("Cannot create edge from %s"%(item,)) + + + def __repr__(self): + return '' % ( + self.number_of_nodes(), self.number_of_edges()) + + def add_node(self, node, node_data=None): + """ + Adds a new node to the graph. Arbitrary data can be attached to the + node via the node_data parameter. Adding the same node twice will be + silently ignored. + + The node must be a hashable value. + """ + # + # the nodes will contain tuples that will store incoming edges, + # outgoing edges and data + # + # index 0 -> incoming edges + # index 1 -> outgoing edges + + if node in self.hidden_nodes: + # Node is present, but hidden + return + + if node not in self.nodes: + self.nodes[node] = ([], [], node_data) + + def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True): + """ + Adds a directed edge going from head_id to tail_id. + Arbitrary data can be attached to the edge via edge_data. + It may create the nodes if adding edges between nonexisting ones. + + :param head_id: head node + :param tail_id: tail node + :param edge_data: (optional) data attached to the edge + :param create_nodes: (optional) creates the head_id or tail_id node in case they did not exist + """ + # shorcut + edge = self.next_edge + + # add nodes if on automatic node creation + if create_nodes: + self.add_node(head_id) + self.add_node(tail_id) + + # update the corresponding incoming and outgoing lists in the nodes + # index 0 -> incoming edges + # index 1 -> outgoing edges + + try: + self.nodes[tail_id][0].append(edge) + self.nodes[head_id][1].append(edge) + except KeyError: + raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id)) + + # store edge information + self.edges[edge] = (head_id, tail_id, edge_data) + + + self.next_edge += 1 + + def hide_edge(self, edge): + """ + Hides an edge from the graph. The edge may be unhidden at some later + time. + """ + try: + head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge] + self.nodes[tail_id][0].remove(edge) + self.nodes[head_id][1].remove(edge) + del self.edges[edge] + except KeyError: + raise GraphError('Invalid edge %s' % edge) + + def hide_node(self, node): + """ + Hides a node from the graph. The incoming and outgoing edges of the + node will also be hidden. The node may be unhidden at some later time. + """ + try: + all_edges = self.all_edges(node) + self.hidden_nodes[node] = (self.nodes[node], all_edges) + for edge in all_edges: + self.hide_edge(edge) + del self.nodes[node] + except KeyError: + raise GraphError('Invalid node %s' % node) + + def restore_node(self, node): + """ + Restores a previously hidden node back into the graph and restores + all of its incoming and outgoing edges. + """ + try: + self.nodes[node], all_edges = self.hidden_nodes[node] + for edge in all_edges: + self.restore_edge(edge) + del self.hidden_nodes[node] + except KeyError: + raise GraphError('Invalid node %s' % node) + + def restore_edge(self, edge): + """ + Restores a previously hidden edge back into the graph. + """ + try: + head_id, tail_id, data = self.hidden_edges[edge] + self.nodes[tail_id][0].append(edge) + self.nodes[head_id][1].append(edge) + self.edges[edge] = head_id, tail_id, data + del self.hidden_edges[edge] + except KeyError: + raise GraphError('Invalid edge %s' % edge) + + def restore_all_edges(self): + """ + Restores all hidden edges. + """ + for edge in list(self.hidden_edges.keys()): + try: + self.restore_edge(edge) + except GraphError: + pass + + def restore_all_nodes(self): + """ + Restores all hidden nodes. + """ + for node in list(self.hidden_nodes.keys()): + self.restore_node(node) + + def __contains__(self, node): + """ + Test whether a node is in the graph + """ + return node in self.nodes + + def edge_by_id(self, edge): + """ + Returns the edge that connects the head_id and tail_id nodes + """ + try: + head, tail, data = self.edges[edge] + except KeyError: + head, tail = None, None + raise GraphError('Invalid edge %s' % edge) + + return (head, tail) + + def edge_by_node(self, head, tail): + """ + Returns the edge that connects the head_id and tail_id nodes + """ + for edge in self.out_edges(head): + if self.tail(edge) == tail: + return edge + return None + + def number_of_nodes(self): + """ + Returns the number of nodes + """ + return len(self.nodes) + + def number_of_edges(self): + """ + Returns the number of edges + """ + return len(self.edges) + + def __iter__(self): + """ + Iterates over all nodes in the graph + """ + return iter(self.nodes) + + def node_list(self): + """ + Return a list of the node ids for all visible nodes in the graph. + """ + return list(self.nodes.keys()) + + def edge_list(self): + """ + Returns an iterator for all visible nodes in the graph. + """ + return list(self.edges.keys()) + + def number_of_hidden_edges(self): + """ + Returns the number of hidden edges + """ + return len(self.hidden_edges) + + def number_of_hidden_nodes(self): + """ + Returns the number of hidden nodes + """ + return len(self.hidden_nodes) + + def hidden_node_list(self): + """ + Returns the list with the hidden nodes + """ + return list(self.hidden_nodes.keys()) + + def hidden_edge_list(self): + """ + Returns a list with the hidden edges + """ + return list(self.hidden_edges.keys()) + + def describe_node(self, node): + """ + return node, node data, outgoing edges, incoming edges for node + """ + incoming, outgoing, data = self.nodes[node] + return node, data, outgoing, incoming + + def describe_edge(self, edge): + """ + return edge, edge data, head, tail for edge + """ + head, tail, data = self.edges[edge] + return edge, data, head, tail + + def node_data(self, node): + """ + Returns the data associated with a node + """ + return self.nodes[node][2] + + def edge_data(self, edge): + """ + Returns the data associated with an edge + """ + return self.edges[edge][2] + + def update_edge_data(self, edge, edge_data): + """ + Replace the edge data for a specific edge + """ + self.edges[edge] = self.edges[edge][0:2] + (edge_data,) + + def head(self, edge): + """ + Returns the node of the head of the edge. + """ + return self.edges[edge][0] + + def tail(self, edge): + """ + Returns node of the tail of the edge. + """ + return self.edges[edge][1] + + def out_nbrs(self, node): + """ + List of nodes connected by outgoing edges + """ + l = [self.tail(n) for n in self.out_edges(node)] + return l + + def inc_nbrs(self, node): + """ + List of nodes connected by incoming edges + """ + l = [self.head(n) for n in self.inc_edges(node)] + return l + + def all_nbrs(self, node): + """ + List of nodes connected by incoming and outgoing edges + """ + l = dict.fromkeys( self.inc_nbrs(node) + self.out_nbrs(node) ) + return list(l) + + def out_edges(self, node): + """ + Returns a list of the outgoing edges + """ + try: + return list(self.nodes[node][1]) + except KeyError: + raise GraphError('Invalid node %s' % node) + + return None + + def inc_edges(self, node): + """ + Returns a list of the incoming edges + """ + try: + return list(self.nodes[node][0]) + except KeyError: + raise GraphError('Invalid node %s' % node) + + return None + + def all_edges(self, node): + """ + Returns a list of incoming and outging edges. + """ + return set(self.inc_edges(node) + self.out_edges(node)) + + def out_degree(self, node): + """ + Returns the number of outgoing edges + """ + return len(self.out_edges(node)) + + def inc_degree(self, node): + """ + Returns the number of incoming edges + """ + return len(self.inc_edges(node)) + + def all_degree(self, node): + """ + The total degree of a node + """ + return self.inc_degree(node) + self.out_degree(node) + + def _topo_sort(self, forward=True): + """ + Topological sort. + + Returns a list of nodes where the successors (based on outgoing and + incoming edges selected by the forward parameter) of any given node + appear in the sequence after that node. + """ + topo_list = [] + queue = deque() + indeg = {} + + # select the operation that will be performed + if forward: + get_edges = self.out_edges + get_degree = self.inc_degree + get_next = self.tail + else: + get_edges = self.inc_edges + get_degree = self.out_degree + get_next = self.head + + for node in self.node_list(): + degree = get_degree(node) + if degree: + indeg[node] = degree + else: + queue.append(node) + + while queue: + curr_node = queue.popleft() + topo_list.append(curr_node) + for edge in get_edges(curr_node): + tail_id = get_next(edge) + if tail_id in indeg: + indeg[tail_id] -= 1 + if indeg[tail_id] == 0: + queue.append(tail_id) + + if len(topo_list) == len(self.node_list()): + valid = True + else: + # the graph has cycles, invalid topological sort + valid = False + + return (valid, topo_list) + + def forw_topo_sort(self): + """ + Topological sort. + + Returns a list of nodes where the successors (based on outgoing edges) + of any given node appear in the sequence after that node. + """ + return self._topo_sort(forward=True) + + def back_topo_sort(self): + """ + Reverse topological sort. + + Returns a list of nodes where the successors (based on incoming edges) + of any given node appear in the sequence after that node. + """ + return self._topo_sort(forward=False) + + def _bfs_subgraph(self, start_id, forward=True): + """ + Private method creates a subgraph in a bfs order. + + The forward parameter specifies whether it is a forward or backward + traversal. + """ + if forward: + get_bfs = self.forw_bfs + get_nbrs = self.out_nbrs + else: + get_bfs = self.back_bfs + get_nbrs = self.inc_nbrs + + g = Graph() + bfs_list = get_bfs(start_id) + for node in bfs_list: + g.add_node(node) + + for node in bfs_list: + for nbr_id in get_nbrs(node): + g.add_edge(node, nbr_id) + + return g + + def forw_bfs_subgraph(self, start_id): + """ + Creates and returns a subgraph consisting of the breadth first + reachable nodes based on their outgoing edges. + """ + return self._bfs_subgraph(start_id, forward=True) + + def back_bfs_subgraph(self, start_id): + """ + Creates and returns a subgraph consisting of the breadth first + reachable nodes based on the incoming edges. + """ + return self._bfs_subgraph(start_id, forward=False) + + def iterdfs(self, start, end=None, forward=True): + """ + Collecting nodes in some depth first traversal. + + The forward parameter specifies whether it is a forward or backward + traversal. + """ + visited, stack = set([start]), deque([start]) + + if forward: + get_edges = self.out_edges + get_next = self.tail + else: + get_edges = self.inc_edges + get_next = self.head + + while stack: + curr_node = stack.pop() + yield curr_node + if curr_node == end: + break + for edge in sorted(get_edges(curr_node)): + tail = get_next(edge) + if tail not in visited: + visited.add(tail) + stack.append(tail) + + def iterdata(self, start, end=None, forward=True, condition=None): + """ + Perform a depth-first walk of the graph (as ``iterdfs``) + and yield the item data of every node where condition matches. The + condition callback is only called when node_data is not None. + """ + + visited, stack = set([start]), deque([start]) + + if forward: + get_edges = self.out_edges + get_next = self.tail + else: + get_edges = self.inc_edges + get_next = self.head + + get_data = self.node_data + + while stack: + curr_node = stack.pop() + curr_data = get_data(curr_node) + if curr_data is not None: + if condition is not None and not condition(curr_data): + continue + yield curr_data + if curr_node == end: + break + for edge in get_edges(curr_node): + tail = get_next(edge) + if tail not in visited: + visited.add(tail) + stack.append(tail) + + def _iterbfs(self, start, end=None, forward=True): + """ + The forward parameter specifies whether it is a forward or backward + traversal. Returns a list of tuples where the first value is the hop + value the second value is the node id. + """ + queue, visited = deque([(start, 0)]), set([start]) + + # the direction of the bfs depends on the edges that are sampled + if forward: + get_edges = self.out_edges + get_next = self.tail + else: + get_edges = self.inc_edges + get_next = self.head + + while queue: + curr_node, curr_step = queue.popleft() + yield (curr_node, curr_step) + if curr_node == end: + break + for edge in get_edges(curr_node): + tail = get_next(edge) + if tail not in visited: + visited.add(tail) + queue.append((tail, curr_step + 1)) + + + def forw_bfs(self, start, end=None): + """ + Returns a list of nodes in some forward BFS order. + + Starting from the start node the breadth first search proceeds along + outgoing edges. + """ + return [node for node, step in self._iterbfs(start, end, forward=True)] + + def back_bfs(self, start, end=None): + """ + Returns a list of nodes in some backward BFS order. + + Starting from the start node the breadth first search proceeds along + incoming edges. + """ + return [node for node, step in self._iterbfs(start, end, forward=False)] + + def forw_dfs(self, start, end=None): + """ + Returns a list of nodes in some forward DFS order. + + Starting with the start node the depth first search proceeds along + outgoing edges. + """ + return list(self.iterdfs(start, end, forward=True)) + + def back_dfs(self, start, end=None): + """ + Returns a list of nodes in some backward DFS order. + + Starting from the start node the depth first search proceeds along + incoming edges. + """ + return list(self.iterdfs(start, end, forward=False)) + + def connected(self): + """ + Returns :py:data:`True` if the graph's every node can be reached from every + other node. + """ + node_list = self.node_list() + for node in node_list: + bfs_list = self.forw_bfs(node) + if len(bfs_list) != len(node_list): + return False + return True + + def clust_coef(self, node): + """ + Computes and returns the local clustering coefficient of node. The + local cluster coefficient is proportion of the actual number of edges between + neighbours of node and the maximum number of edges between those neighbours. + + See + for a formal definition. + """ + num = 0 + nbr_set = set(self.out_nbrs(node)) + + if node in nbr_set: + nbr_set.remove(node) # loop defense + + for nbr in nbr_set: + sec_set = set(self.out_nbrs(nbr)) + if nbr in sec_set: + sec_set.remove(nbr) # loop defense + num += len(nbr_set & sec_set) + + nbr_num = len(nbr_set) + if nbr_num: + clust_coef = float(num) / (nbr_num * (nbr_num - 1)) + else: + clust_coef = 0.0 + return clust_coef + + def get_hops(self, start, end=None, forward=True): + """ + Computes the hop distance to all nodes centered around a specified node. + + First order neighbours are at hop 1, their neigbours are at hop 2 etc. + Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of the forward + parameter. If the distance between all neighbouring nodes is 1 the hop + number corresponds to the shortest distance between the nodes. + + :param start: the starting node + :param end: ending node (optional). When not specified will search the whole graph. + :param forward: directionality parameter (optional). If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}. + :return: returns a list of tuples where each tuple contains the node and the hop. + + Typical usage:: + + >>> print (graph.get_hops(1, 8)) + >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] + # node 1 is at 0 hops + # node 2 is at 1 hop + # ... + # node 8 is at 5 hops + """ + if forward: + return list(self._iterbfs(start=start, end=end, forward=True)) + else: + return list(self._iterbfs(start=start, end=end, forward=False)) diff --git a/python/altgraph/altgraph/GraphAlgo.py b/python/altgraph/altgraph/GraphAlgo.py new file mode 100644 index 000000000..9e6fff2b1 --- /dev/null +++ b/python/altgraph/altgraph/GraphAlgo.py @@ -0,0 +1,147 @@ +''' +altgraph.GraphAlgo - Graph algorithms +===================================== +''' +from altgraph import GraphError + +def dijkstra(graph, start, end=None): + """ + Dijkstra's algorithm for shortest paths + + `David Eppstein, UC Irvine, 4 April 2002 `_ + + `Python Cookbook Recipe `_ + + Find shortest paths from the start node to all nodes nearer than or equal to the end node. + + Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive. + This code does not verify this property for all edges (only the edges examined until the end + vertex is reached), but will correctly compute shortest paths even for some graphs with negative + edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake. + + *Adapted to altgraph by Istvan Albert, Pennsylvania State University - June, 9 2004* + + """ + D = {} # dictionary of final distances + P = {} # dictionary of predecessors + Q = _priorityDictionary() # estimated distances of non-final vertices + Q[start] = 0 + + for v in Q: + D[v] = Q[v] + if v == end: break + + for w in graph.out_nbrs(v): + edge_id = graph.edge_by_node(v,w) + vwLength = D[v] + graph.edge_data(edge_id) + if w in D: + if vwLength < D[w]: + raise GraphError("Dijkstra: found better path to already-final vertex") + elif w not in Q or vwLength < Q[w]: + Q[w] = vwLength + P[w] = v + + return (D,P) + +def shortest_path(graph, start, end): + """ + Find a single shortest path from the given start node to the given end node. + The input has the same conventions as dijkstra(). The output is a list of the nodes + in order along the shortest path. + + **Note that the distances must be stored in the edge data as numeric data** + """ + + D,P = dijkstra(graph, start, end) + Path = [] + while 1: + Path.append(end) + if end == start: break + end = P[end] + Path.reverse() + return Path + +# +# Utility classes and functions +# +class _priorityDictionary(dict): + ''' + Priority dictionary using binary heaps (internal use only) + + David Eppstein, UC Irvine, 8 Mar 2002 + + Implements a data structure that acts almost like a dictionary, with two modifications: + 1. D.smallest() returns the value x minimizing D[x]. For this to work correctly, + all values D[x] stored in the dictionary must be comparable. + 2. iterating "for x in D" finds and removes the items from D in sorted order. + Each item is not removed until the next item is requested, so D[x] will still + return a useful value until the next iteration of the for-loop. + Each operation takes logarithmic amortized time. + ''' + def __init__(self): + ''' + Initialize priorityDictionary by creating binary heap of pairs (value,key). + Note that changing or removing a dict entry will not remove the old pair from the heap + until it is found by smallest() or until the heap is rebuilt. + ''' + self.__heap = [] + dict.__init__(self) + + def smallest(self): + ''' + Find smallest item after removing deleted items from front of heap. + ''' + if len(self) == 0: + raise IndexError("smallest of empty priorityDictionary") + heap = self.__heap + while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]: + lastItem = heap.pop() + insertionPoint = 0 + while 1: + smallChild = 2*insertionPoint+1 + if smallChild+1 < len(heap) and heap[smallChild] > heap[smallChild+1] : + smallChild += 1 + if smallChild >= len(heap) or lastItem <= heap[smallChild]: + heap[insertionPoint] = lastItem + break + heap[insertionPoint] = heap[smallChild] + insertionPoint = smallChild + return heap[0][1] + + def __iter__(self): + ''' + Create destructive sorted iterator of priorityDictionary. + ''' + def iterfn(): + while len(self) > 0: + x = self.smallest() + yield x + del self[x] + return iterfn() + + def __setitem__(self,key,val): + ''' + Change value stored in dictionary and add corresponding pair to heap. + Rebuilds the heap if the number of deleted items gets large, to avoid memory leakage. + ''' + dict.__setitem__(self,key,val) + heap = self.__heap + if len(heap) > 2 * len(self): + self.__heap = [(v,k) for k,v in self.iteritems()] + self.__heap.sort() # builtin sort probably faster than O(n)-time heapify + else: + newPair = (val,key) + insertionPoint = len(heap) + heap.append(None) + while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]: + heap[insertionPoint] = heap[(insertionPoint-1)//2] + insertionPoint = (insertionPoint-1)//2 + heap[insertionPoint] = newPair + + def setdefault(self,key,val): + ''' + Reimplement setdefault to pass through our customized __setitem__. + ''' + if key not in self: + self[key] = val + return self[key] diff --git a/python/altgraph/altgraph/GraphStat.py b/python/altgraph/altgraph/GraphStat.py new file mode 100644 index 000000000..25fc46c2d --- /dev/null +++ b/python/altgraph/altgraph/GraphStat.py @@ -0,0 +1,73 @@ +''' +altgraph.GraphStat - Functions providing various graph statistics +================================================================= +''' +import sys + +def degree_dist(graph, limits=(0,0), bin_num=10, mode='out'): + ''' + Computes the degree distribution for a graph. + + Returns a list of tuples where the first element of the tuple is the center of the bin + representing a range of degrees and the second element of the tuple are the number of nodes + with the degree falling in the range. + + Example:: + + .... + ''' + + deg = [] + if mode == 'inc': + get_deg = graph.inc_degree + else: + get_deg = graph.out_degree + + for node in graph: + deg.append( get_deg(node) ) + + if not deg: + return [] + + results = _binning(values=deg, limits=limits, bin_num=bin_num) + + return results + +_EPS = 1.0/(2.0**32) +def _binning(values, limits=(0,0), bin_num=10): + ''' + Bins data that falls between certain limits, if the limits are (0, 0) the + minimum and maximum values are used. + + Returns a list of tuples where the first element of the tuple is the center of the bin + and the second element of the tuple are the counts. + ''' + if limits == (0, 0): + min_val, max_val = min(values) - _EPS, max(values) + _EPS + else: + min_val, max_val = limits + + # get bin size + bin_size = (max_val - min_val)/float(bin_num) + bins = [0] * (bin_num) + + # will ignore these outliers for now + out_points = 0 + for value in values: + try: + if (value - min_val) < 0: + out_points += 1 + else: + index = int((value - min_val)/float(bin_size)) + bins[index] += 1 + except IndexError: + out_points += 1 + + # make it ready for an x,y plot + result = [] + center = (bin_size/2) + min_val + for i, y in enumerate(bins): + x = center + bin_size * i + result.append( (x,y) ) + + return result diff --git a/python/altgraph/altgraph/GraphUtil.py b/python/altgraph/altgraph/GraphUtil.py new file mode 100644 index 000000000..d3b6acd74 --- /dev/null +++ b/python/altgraph/altgraph/GraphUtil.py @@ -0,0 +1,137 @@ +''' +altgraph.GraphUtil - Utility classes and functions +================================================== +''' + +import random +from collections import deque +from altgraph import Graph +from altgraph import GraphError + +def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False): + ''' + Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with *node_num* nodes + randomly connected by *edge_num* edges. + ''' + g = Graph.Graph() + + if not multi_edges: + if self_loops: + max_edges = node_num * node_num + else: + max_edges = node_num * (node_num-1) + + if edge_num > max_edges: + raise GraphError("inconsistent arguments to 'generate_random_graph'") + + nodes = range(node_num) + + for node in nodes: + g.add_node(node) + + while 1: + head = random.choice(nodes) + tail = random.choice(nodes) + + # loop defense + if head == tail and not self_loops: + continue + + # multiple edge defense + if g.edge_by_node(head,tail) is not None and not multi_edges: + continue + + # add the edge + g.add_edge(head, tail) + if g.number_of_edges() >= edge_num: + break + + return g + +def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False): + ''' + Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that will have *steps* \* *growth_num* nodes + and a scale free (powerlaw) connectivity. Starting with a fully connected graph with *growth_num* nodes + at every step *growth_num* nodes are added to the graph and are connected to existing nodes with + a probability proportional to the degree of these existing nodes. + ''' + # FIXME: The code doesn't seem to do what the documentation claims. + graph = Graph.Graph() + + # initialize the graph + store = [] + for i in range(growth_num): + #store += [ i ] * (growth_num - 1) + for j in range(i + 1, growth_num): + store.append(i) + store.append(j) + graph.add_edge(i,j) + + # generate + for node in range(growth_num, steps * growth_num): + graph.add_node(node) + while ( graph.out_degree(node) < growth_num ): + nbr = random.choice(store) + + # loop defense + if node == nbr and not self_loops: + continue + + # multi edge defense + if graph.edge_by_node(node, nbr) and not multi_edges: + continue + + graph.add_edge(node, nbr) + + + for nbr in graph.out_nbrs(node): + store.append(node) + store.append(nbr) + + return graph + +def filter_stack(graph, head, filters): + """ + Perform a walk in a depth-first order starting + at *head*. + + Returns (visited, removes, orphans). + + * visited: the set of visited nodes + * removes: the list of nodes where the node + data does not all *filters* + * orphans: tuples of (last_good, node), + where node is not in removes, is directly + reachable from a node in *removes* and + *last_good* is the closest upstream node that is not + in *removes*. + """ + + visited, removes, orphans = set([head]), set(), set() + stack = deque([(head, head)]) + get_data = graph.node_data + get_edges = graph.out_edges + get_tail = graph.tail + + while stack: + last_good, node = stack.pop() + data = get_data(node) + if data is not None: + for filtfunc in filters: + if not filtfunc(data): + removes.add(node) + break + else: + last_good = node + for edge in get_edges(node): + tail = get_tail(edge) + if last_good is not node: + orphans.add((last_good, tail)) + if tail not in visited: + visited.add(tail) + stack.append((last_good, tail)) + + orphans = [(last_good, tail) for (last_good, tail) in orphans if tail not in removes] + #orphans.sort() + + return visited, removes, orphans diff --git a/python/altgraph/altgraph/ObjectGraph.py b/python/altgraph/altgraph/ObjectGraph.py new file mode 100644 index 000000000..d07f51b68 --- /dev/null +++ b/python/altgraph/altgraph/ObjectGraph.py @@ -0,0 +1,202 @@ +""" +altgraph.ObjectGraph - Graph of objects with an identifier +========================================================== + +A graph of objects that have a "graphident" attribute. +graphident is the key for the object in the graph +""" + +from altgraph import GraphError +from altgraph.Graph import Graph +from altgraph.GraphUtil import filter_stack + +class ObjectGraph(object): + """ + A graph of objects that have a "graphident" attribute. + graphident is the key for the object in the graph + """ + def __init__(self, graph=None, debug=0): + if graph is None: + graph = Graph() + self.graphident = self + self.graph = graph + self.debug = debug + self.indent = 0 + graph.add_node(self, None) + + def __repr__(self): + return '<%s>' % (type(self).__name__,) + + def flatten(self, condition=None, start=None): + """ + Iterate over the subgraph that is entirely reachable by condition + starting from the given start node or the ObjectGraph root + """ + if start is None: + start = self + start = self.getRawIdent(start) + return self.graph.iterdata(start=start, condition=condition) + + def nodes(self): + for ident in self.graph: + node = self.graph.node_data(ident) + if node is not None: + yield self.graph.node_data(ident) + + + def get_edges(self, node): + start = self.getRawIdent(node) + _, _, outraw, incraw = self.graph.describe_node(start) + def iter_edges(lst, n): + seen = set() + for tpl in (self.graph.describe_edge(e) for e in lst): + ident = tpl[n] + if ident not in seen: + yield self.findNode(ident) + seen.add(ident) + return iter_edges(outraw, 3), iter_edges(incraw, 2) + + def edgeData(self, fromNode, toNode): + start = self.getRawIdent(fromNode) + stop = self.getRawIdent(toNode) + edge = self.graph.edge_by_node(start, stop) + return self.graph.edge_data(edge) + + def updateEdgeData(self, fromNode, toNode, edgeData): + start = self.getRawIdent(fromNode) + stop = self.getRawIdent(toNode) + edge = self.graph.edge_by_node(start, stop) + self.graph.update_edge_data(edge, edgeData) + + def filterStack(self, filters): + """ + Filter the ObjectGraph in-place by removing all edges to nodes that + do not match every filter in the given filter list + + Returns a tuple containing the number of: (nodes_visited, nodes_removed, nodes_orphaned) + """ + visited, removes, orphans = filter_stack(self.graph, self, filters) + + for last_good, tail in orphans: + self.graph.add_edge(last_good, tail, edge_data='orphan') + + for node in removes: + self.graph.hide_node(node) + + return len(visited)-1, len(removes), len(orphans) + + def removeNode(self, node): + """ + Remove the given node from the graph if it exists + """ + ident = self.getIdent(node) + if ident is not None: + self.graph.hide_node(ident) + + def removeReference(self, fromnode, tonode): + """ + Remove all edges from fromnode to tonode + """ + if fromnode is None: + fromnode = self + fromident = self.getIdent(fromnode) + toident = self.getIdent(tonode) + if fromident is not None and toident is not None: + while True: + edge = self.graph.edge_by_node(fromident, toident) + if edge is None: + break + self.graph.hide_edge(edge) + + def getIdent(self, node): + """ + Get the graph identifier for a node + """ + ident = self.getRawIdent(node) + if ident is not None: + return ident + node = self.findNode(node) + if node is None: + return None + return node.graphident + + def getRawIdent(self, node): + """ + Get the identifier for a node object + """ + if node is self: + return node + ident = getattr(node, 'graphident', None) + return ident + + def __contains__(self, node): + return self.findNode(node) is not None + + def findNode(self, node): + """ + Find the node on the graph + """ + ident = self.getRawIdent(node) + if ident is None: + ident = node + try: + return self.graph.node_data(ident) + except KeyError: + return None + + def addNode(self, node): + """ + Add a node to the graph referenced by the root + """ + self.msg(4, "addNode", node) + + try: + self.graph.restore_node(node.graphident) + except GraphError: + self.graph.add_node(node.graphident, node) + + def createReference(self, fromnode, tonode, edge_data=None): + """ + Create a reference from fromnode to tonode + """ + if fromnode is None: + fromnode = self + fromident, toident = self.getIdent(fromnode), self.getIdent(tonode) + if fromident is None or toident is None: + return + self.msg(4, "createReference", fromnode, tonode, edge_data) + self.graph.add_edge(fromident, toident, edge_data=edge_data) + + def createNode(self, cls, name, *args, **kw): + """ + Add a node of type cls to the graph if it does not already exist + by the given name + """ + m = self.findNode(name) + if m is None: + m = cls(name, *args, **kw) + self.addNode(m) + return m + + def msg(self, level, s, *args): + """ + Print a debug message with the given level + """ + if s and level <= self.debug: + print ("%s%s %s" % (" " * self.indent, s, ' '.join(map(repr, args)))) + + def msgin(self, level, s, *args): + """ + Print a debug message and indent + """ + if level <= self.debug: + self.msg(level, s, *args) + self.indent = self.indent + 1 + + def msgout(self, level, s, *args): + """ + Dedent and print a debug message + """ + if level <= self.debug: + self.indent = self.indent - 1 + self.msg(level, s, *args) diff --git a/python/altgraph/altgraph/__init__.py b/python/altgraph/altgraph/__init__.py new file mode 100644 index 000000000..9f72c18e5 --- /dev/null +++ b/python/altgraph/altgraph/__init__.py @@ -0,0 +1,135 @@ +''' +altgraph - a python graph library +================================= + +altgraph is a fork of `graphlib `_ tailored +to use newer Python 2.3+ features, including additional support used by the +py2app suite (modulegraph and macholib, specifically). + +altgraph is a python based graph (network) representation and manipulation package. +It has started out as an extension to the `graph_lib module `_ +written by Nathan Denny it has been significantly optimized and expanded. + +The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA `_ +(Library of Efficient Datatypes) representation. The library +includes methods for constructing graphs, BFS and DFS traversals, +topological sort, finding connected components, shortest paths as well as a number +graph statistics functions. The library can also visualize graphs +via `graphviz `_. + +The package contains the following modules: + + - the :py:mod:`altgraph.Graph` module contains the :class:`~altgraph.Graph.Graph` class that stores the graph data + + - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances) + + - the :py:mod:`altgraph.GraphStat` module contains functions for computing statistical measures on graphs + + - the :py:mod:`altgraph.GraphUtil` module contains functions for generating, reading and saving graphs + + - the :py:mod:`altgraph.Dot` module contains functions for displaying graphs via `graphviz `_ + + - the :py:mod:`altgraph.ObjectGraph` module implements a graph of objects with a unique identifier + +Installation +------------ + +Download and unpack the archive then type:: + + python setup.py install + +This will install the library in the default location. For instructions on +how to customize the install procedure read the output of:: + + python setup.py --help install + +To verify that the code works run the test suite:: + + python setup.py test + +Example usage +------------- + +Lets assume that we want to analyze the graph below (links to the full picture) GRAPH_IMG. +Our script then might look the following way:: + + from altgraph import Graph, GraphAlgo, Dot + + # these are the edges + edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5), + (6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8), + (8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ] + + # creates the graph + graph = Graph.Graph() + for head, tail in edges: + graph.add_edge(head, tail) + + # do a forward bfs from 1 at most to 20 + print(graph.forw_bfs(1)) + +This will print the nodes in some breadth first order:: + + [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9] + +If we wanted to get the hop-distance from node 1 to node 8 +we coud write:: + + print(graph.get_hops(1, 8)) + +This will print the following:: + + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] + +Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ... +node 8 is 5 hops away. To find the shortest distance between two nodes you +can use:: + + print(GraphAlgo.shortest_path(graph, 1, 12)) + +It will print the nodes on one (if there are more) the shortest paths:: + + [1, 2, 4, 5, 7, 13, 11, 12] + +To display the graph we can use the GraphViz backend:: + + dot = Dot.Dot(graph) + + # display the graph on the monitor + dot.display() + + # save it in an image file + dot.save_img(file_name='graph', file_type='gif') + + + +.. + @author: U{Istvan Albert} + + @license: MIT License + + Copyright (c) 2004 Istvan Albert unless otherwise noted. + + Permission is hereby granted, free of charge, to any person obtaining a copy of this software + and associated documentation files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do + so. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE + FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + @requires: Python 2.3 or higher + + @newfield contributor: Contributors: + @contributor: U{Reka Albert } + +''' +import pkg_resources +__version__ = pkg_resources.require('altgraph')[0].version + +class GraphError(ValueError): + pass diff --git a/python/altgraph/altgraph_tests/__init__.py b/python/altgraph/altgraph_tests/__init__.py new file mode 100644 index 000000000..6890389df --- /dev/null +++ b/python/altgraph/altgraph_tests/__init__.py @@ -0,0 +1 @@ +""" altgraph tests """ diff --git a/python/altgraph/altgraph_tests/test_altgraph.py b/python/altgraph/altgraph_tests/test_altgraph.py new file mode 100644 index 000000000..2ca6b251e --- /dev/null +++ b/python/altgraph/altgraph_tests/test_altgraph.py @@ -0,0 +1,45 @@ +#!/usr/bin/env py.test +import os +import sys + +from altgraph import Graph, GraphAlgo +import unittest + +class BasicTests (unittest.TestCase): + def setUp(self): + self.edges = [ + (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5), (6,14), (14,15), + (6, 15), (5,7), (7, 8), (7,13), (12,8), (8,13), (11,12), (11,9), + (13,11), (9,13), (13,10) + ] + + # these are the edges + self.store = {} + self.g = Graph.Graph() + for head, tail in self.edges: + self.store[head] = self.store[tail] = None + self.g.add_edge(head, tail) + + def test_num_edges(self): + # check the parameters + self.assertEqual(self.g.number_of_nodes(), len(self.store)) + self.assertEqual(self.g.number_of_edges(), len(self.edges)) + + def test_forw_bfs(self): + # do a forward bfs + self.assertEqual( self.g.forw_bfs(1), + [1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9]) + + + def test_get_hops(self): + # diplay the hops and hop numbers between nodes + self.assertEqual(self.g.get_hops(1, 8), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]) + + def test_shortest_path(self): + self.assertEqual(GraphAlgo.shortest_path(self.g, 1, 12), + [1, 2, 4, 5, 7, 13, 11, 12]) + + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/altgraph_tests/test_dot.py b/python/altgraph/altgraph_tests/test_dot.py new file mode 100644 index 000000000..83993dad5 --- /dev/null +++ b/python/altgraph/altgraph_tests/test_dot.py @@ -0,0 +1,370 @@ +import unittest +import os + +from altgraph import Dot +from altgraph import Graph +from altgraph import GraphError + + +class TestDot (unittest.TestCase): + + def test_constructor(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g) + + self.assertEqual(dot.name, 'G') + self.assertEqual(dot.attr, {}) + self.assertEqual(dot.temp_dot, 'tmp_dot.dot') + self.assertEqual(dot.temp_neo, 'tmp_neo.dot') + self.assertEqual(dot.dot, 'dot') + self.assertEqual(dot.dotty, 'dotty') + self.assertEqual(dot.neato, 'neato') + self.assertEqual(dot.type, 'digraph') + + self.assertEqual(dot.nodes, dict([(x, {}) for x in g])) + + edges = {} + for head in g: + edges[head] = {} + for tail in g.out_nbrs(head): + edges[head][tail] = {} + + self.assertEqual(dot.edges[1], edges[1]) + self.assertEqual(dot.edges, edges) + + + dot = Dot.Dot(g, nodes=[1,2], + edgefn=lambda node: list(sorted(g.out_nbrs(node)))[:-1], + nodevisitor=lambda node: {'label': node}, + edgevisitor=lambda head, tail: {'label': (head, tail) }, + name="testgraph", + dot='/usr/local/bin/dot', + dotty='/usr/local/bin/dotty', + neato='/usr/local/bin/neato', + graphtype="graph") + + self.assertEqual(dot.name, 'testgraph') + self.assertEqual(dot.attr, {}) + self.assertEqual(dot.temp_dot, 'tmp_dot.dot') + self.assertEqual(dot.temp_neo, 'tmp_neo.dot') + self.assertEqual(dot.dot, '/usr/local/bin/dot') + self.assertEqual(dot.dotty, '/usr/local/bin/dotty') + self.assertEqual(dot.neato, '/usr/local/bin/neato') + self.assertEqual(dot.type, 'graph') + + self.assertEqual(dot.nodes, dict([(x, {'label': x}) for x in [1,2]])) + + edges = {} + for head in [1,2]: + edges[head] = {} + for tail in list(sorted(g.out_nbrs(head)))[:-1]: + if tail not in [1,2]: continue + edges[head][tail] = {'label': (head, tail) } + + self.assertEqual(dot.edges[1], edges[1]) + self.assertEqual(dot.edges, edges) + + self.assertRaises(GraphError, Dot.Dot, g, nodes=[1,2, 9]) + + def test_style(self): + g = Graph.Graph([]) + + dot = Dot.Dot(g) + + self.assertEqual(dot.attr, {}) + + dot.style(key='value') + self.assertEqual(dot.attr, {'key': 'value'}) + + dot.style(key2='value2') + self.assertEqual(dot.attr, {'key2': 'value2'}) + + def test_node_style(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g) + + self.assertEqual(dot.nodes[1], {}) + + dot.node_style(1, key='value') + self.assertEqual(dot.nodes[1], {'key': 'value'}) + + dot.node_style(1, key2='value2') + self.assertEqual(dot.nodes[1], {'key2': 'value2'}) + self.assertEqual(dot.nodes[2], {}) + + dot.all_node_style(key3='value3') + for n in g: + self.assertEqual(dot.nodes[n], {'key3': 'value3'}) + + self.assertTrue(9 not in dot.nodes) + dot.node_style(9, key='value') + self.assertEqual(dot.nodes[9], {'key': 'value'}) + + def test_edge_style(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g) + + self.assertEqual(dot.edges[1][2], {}) + dot.edge_style(1,2, foo='bar') + self.assertEqual(dot.edges[1][2], {'foo': 'bar'}) + + dot.edge_style(1,2, foo2='2bar') + self.assertEqual(dot.edges[1][2], {'foo2': '2bar'}) + + self.assertEqual(dot.edges[1][3], {}) + + self.assertFalse(6 in dot.edges[1]) + dot.edge_style(1,6, foo2='2bar') + self.assertEqual(dot.edges[1][6], {'foo2': '2bar'}) + + self.assertRaises(GraphError, dot.edge_style, 1, 9, a=1) + self.assertRaises(GraphError, dot.edge_style, 9, 1, a=1) + + + def test_iter(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g) + dot.style(graph="foobar") + dot.node_style(1, key='value') + dot.node_style(2, key='another', key2='world') + dot.edge_style(1,4, key1='value1', key2='value2') + dot.edge_style(2,4, key1='valueA') + + self.assertEqual(list(iter(dot)), list(dot.iterdot())) + + for item in dot.iterdot(): + self.assertTrue(isinstance(item, str)) + + first = list(dot.iterdot())[0] + self.assertEqual(first, "digraph %s {\n"%(dot.name,)) + + dot.type = 'graph' + first = list(dot.iterdot())[0] + self.assertEqual(first, "graph %s {\n"%(dot.name,)) + + dot.type = 'foo' + self.assertRaises(GraphError, list, dot.iterdot()) + dot.type = 'digraph' + + self.assertEqual(list(dot), [ + 'digraph G {\n', + 'graph="foobar";', + '\n', + + '\t"1" [', + 'key="value",', + '];\n', + + '\t"2" [', + 'key="another",', + 'key2="world",', + '];\n', + + '\t"3" [', + '];\n', + + '\t"4" [', + '];\n', + + '\t"6" [', + '];\n', + + '\t"7" [', + '];\n', + + '\t"1" -> "2" [', + '];\n', + + '\t"1" -> "3" [', + '];\n', + + '\t"1" -> "4" [', + 'key1="value1",', + 'key2="value2",', + '];\n', + + '\t"2" -> "4" [', + 'key1="valueA",', + '];\n', + + '\t"2" -> "6" [', + '];\n', + + '\t"2" -> "7" [', + '];\n', + + '\t"6" -> "1" [', + '];\n', + + '\t"7" -> "4" [', + '];\n', + '}\n']) + + + def test_save(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g) + dot.style(graph="foobar") + dot.node_style(1, key='value') + dot.node_style(2, key='another', key2='world') + dot.edge_style(1,4, key1='value1', key2='value2') + dot.edge_style(2,4, key1='valueA') + + fn = 'test_dot.dot' + self.assertTrue(not os.path.exists(fn)) + + try: + dot.save_dot(fn) + + fp = open(fn, 'r') + data = fp.read() + fp.close() + self.assertEqual(data, ''.join(dot)) + + finally: + if os.path.exists(fn): + os.unlink(fn) + + + def test_img(self): + g = Graph.Graph([ + (1,2), + (1,3), + (1,4), + (2,4), + (2,6), + (2,7), + (7,4), + (6,1), + ] + ) + + dot = Dot.Dot(g, dot='/usr/local/bin/!!dot', dotty='/usr/local/bin/!!dotty', neato='/usr/local/bin/!!neato') + dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75) + dot.node_style(1, label='BASE_NODE',shape='box', color='blue') + dot.node_style(2, style='filled', fillcolor='red') + dot.edge_style(1,4, style='dotted') + dot.edge_style(2,4, arrowhead='dot', label='binds', labelangle='90') + + system_cmds = [] + def fake_system(cmd): + system_cmds.append(cmd) + return None + + try: + real_system = os.system + os.system = fake_system + + system_cmds = [] + dot.save_img('foo') + self.assertEqual(system_cmds, ['/usr/local/bin/!!dot -Tgif tmp_dot.dot -o foo.gif']) + + system_cmds = [] + dot.save_img('foo', file_type='jpg') + self.assertEqual(system_cmds, ['/usr/local/bin/!!dot -Tjpg tmp_dot.dot -o foo.jpg']) + + system_cmds = [] + dot.save_img('bar', file_type='jpg', mode='neato') + self.assertEqual(system_cmds, [ + '/usr/local/bin/!!neato -o tmp_dot.dot tmp_neo.dot', + '/usr/local/bin/!!dot -Tjpg tmp_dot.dot -o bar.jpg', + ]) + + system_cmds = [] + dot.display() + self.assertEqual(system_cmds, [ + '/usr/local/bin/!!dotty tmp_dot.dot' + ]) + + system_cmds = [] + dot.display(mode='neato') + self.assertEqual(system_cmds, [ + '/usr/local/bin/!!neato -o tmp_dot.dot tmp_neo.dot', + '/usr/local/bin/!!dotty tmp_dot.dot' + ]) + + finally: + if os.path.exists(dot.temp_dot): + os.unlink(dot.temp_dot) + if os.path.exists(dot.temp_neo): + os.unlink(dot.temp_neo) + os.system = real_system + + if os.path.exists('/usr/local/bin/dot') and os.path.exists('/usr/local/bin/neato'): + try: + dot.dot='/usr/local/bin/dot' + dot.neato='/usr/local/bin/neato' + self.assertFalse(os.path.exists('foo.gif')) + dot.save_img('foo') + self.assertTrue(os.path.exists('foo.gif')) + os.unlink('foo.gif') + + self.assertFalse(os.path.exists('foo.gif')) + dot.save_img('foo', mode='neato') + self.assertTrue(os.path.exists('foo.gif')) + os.unlink('foo.gif') + + finally: + if os.path.exists(dot.temp_dot): + os.unlink(dot.temp_dot) + if os.path.exists(dot.temp_neo): + os.unlink(dot.temp_neo) + + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/altgraph_tests/test_graph.py b/python/altgraph/altgraph_tests/test_graph.py new file mode 100644 index 000000000..553549f5a --- /dev/null +++ b/python/altgraph/altgraph_tests/test_graph.py @@ -0,0 +1,644 @@ +import unittest + +from altgraph import GraphError +from altgraph.Graph import Graph + +class TestGraph (unittest.TestCase): + + def test_nodes(self): + graph = Graph() + + self.assertEqual(graph.node_list(), []) + + o1 = object() + o1b = object() + o2 = object() + graph.add_node(1, o1) + graph.add_node(1, o1b) + graph.add_node(2, o2) + graph.add_node(3) + + self.assertRaises(TypeError, graph.add_node, []) + + self.assertTrue(graph.node_data(1) is o1) + self.assertTrue(graph.node_data(2) is o2) + self.assertTrue(graph.node_data(3) is None) + + self.assertTrue(1 in graph) + self.assertTrue(2 in graph) + self.assertTrue(3 in graph) + + self.assertEqual(graph.number_of_nodes(), 3) + self.assertEqual(graph.number_of_hidden_nodes(), 0) + self.assertEqual(graph.hidden_node_list(), []) + self.assertEqual(list(sorted(graph)), [1, 2, 3]) + + graph.hide_node(1) + graph.hide_node(2) + graph.hide_node(3) + + + self.assertEqual(graph.number_of_nodes(), 0) + self.assertEqual(graph.number_of_hidden_nodes(), 3) + self.assertEqual(list(sorted(graph.hidden_node_list())), [1, 2, 3]) + + self.assertFalse(1 in graph) + self.assertFalse(2 in graph) + self.assertFalse(3 in graph) + + graph.add_node(1) + self.assertFalse(1 in graph) + + graph.restore_node(1) + self.assertTrue(1 in graph) + self.assertFalse(2 in graph) + self.assertFalse(3 in graph) + + graph.restore_all_nodes() + self.assertTrue(1 in graph) + self.assertTrue(2 in graph) + self.assertTrue(3 in graph) + + self.assertEqual(list(sorted(graph.node_list())), [1, 2, 3]) + + v = graph.describe_node(1) + self.assertEqual(v, (1, o1, [], [])) + + def test_edges(self): + graph = Graph() + graph.add_node(1) + graph.add_node(2) + graph.add_node(3) + graph.add_node(4) + graph.add_node(5) + + self.assertTrue(isinstance(graph.edge_list(), list)) + + graph.add_edge(1, 2) + graph.add_edge(4, 5, 'a') + + self.assertRaises(GraphError, graph.add_edge, 'a', 'b', create_nodes=False) + + self.assertEqual(graph.number_of_hidden_edges(), 0) + self.assertEqual(graph.number_of_edges(), 2) + e = graph.edge_by_node(1, 2) + self.assertTrue(isinstance(e, int)) + graph.hide_edge(e) + self.assertEqual(graph.number_of_hidden_edges(), 1) + self.assertEqual(graph.number_of_edges(), 1) + e2 = graph.edge_by_node(1, 2) + self.assertTrue(e2 is None) + + graph.restore_edge(e) + e2 = graph.edge_by_node(1, 2) + self.assertEqual(e, e2) + self.assertEqual(graph.number_of_hidden_edges(), 0) + + self.assertEqual(graph.number_of_edges(), 2) + + e1 = graph.edge_by_node(1, 2) + e2 = graph.edge_by_node(4, 5) + graph.hide_edge(e1) + graph.hide_edge(e2) + + self.assertEqual(graph.number_of_edges(), 0) + graph.restore_all_edges() + self.assertEqual(graph.number_of_edges(), 2) + + self.assertEqual(graph.edge_by_id(e1), (1,2)) + self.assertRaises(GraphError, graph.edge_by_id, (e1+1)*(e2+1)+1) + + self.assertEqual(list(sorted(graph.edge_list())), [e1, e2]) + + self.assertEqual(graph.describe_edge(e1), (e1, 1, 1, 2)) + self.assertEqual(graph.describe_edge(e2), (e2, 'a', 4, 5)) + + self.assertEqual(graph.edge_data(e1), 1) + self.assertEqual(graph.edge_data(e2), 'a') + + self.assertEqual(graph.head(e2), 4) + self.assertEqual(graph.tail(e2), 5) + + graph.add_edge(1, 3) + graph.add_edge(1, 5) + graph.add_edge(4, 1) + + self.assertEqual(list(sorted(graph.out_nbrs(1))), [2, 3, 5]) + self.assertEqual(list(sorted(graph.inc_nbrs(1))), [4]) + self.assertEqual(list(sorted(graph.inc_nbrs(5))), [1, 4]) + self.assertEqual(list(sorted(graph.all_nbrs(1))), [2, 3, 4, 5]) + + graph.add_edge(5, 1) + self.assertEqual(list(sorted(graph.all_nbrs(5))), [1, 4]) + + self.assertEqual(graph.out_degree(1), 3) + self.assertEqual(graph.inc_degree(2), 1) + self.assertEqual(graph.inc_degree(5), 2) + self.assertEqual(graph.all_degree(5), 3) + + v = graph.out_edges(4) + self.assertTrue(isinstance(v, list)) + self.assertEqual(graph.edge_by_id(v[0]), (4, 5)) + + v = graph.out_edges(1) + for e in v: + self.assertEqual(graph.edge_by_id(e)[0], 1) + + v = graph.inc_edges(1) + self.assertTrue(isinstance(v, list)) + self.assertEqual(graph.edge_by_id(v[0]), (4, 1)) + + v = graph.inc_edges(5) + for e in v: + self.assertEqual(graph.edge_by_id(e)[1], 5) + + v = graph.all_edges(5) + for e in v: + self.assertTrue(graph.edge_by_id(e)[1] == 5 or graph.edge_by_id(e)[0] == 5) + + e1 = graph.edge_by_node(1, 2) + self.assertTrue(isinstance(e1, int)) + graph.hide_node(1) + self.assertRaises(GraphError, graph.edge_by_node, 1, 2) + graph.restore_node(1) + e2 = graph.edge_by_node(1, 2) + self.assertEqual(e1, e2) + + + + def test_toposort(self): + graph = Graph() + graph.add_node(1) + graph.add_node(2) + graph.add_node(3) + graph.add_node(4) + graph.add_node(5) + + graph.add_edge(1, 2) + graph.add_edge(1, 3) + graph.add_edge(2, 4) + graph.add_edge(3, 5) + + ok, result = graph.forw_topo_sort() + self.assertTrue(ok) + for idx in range(1, 6): + self.assertTrue(idx in result) + + self.assertTrue(result.index(1) < result.index(2)) + self.assertTrue(result.index(1) < result.index(3)) + self.assertTrue(result.index(2) < result.index(4)) + self.assertTrue(result.index(3) < result.index(5)) + + ok, result = graph.back_topo_sort() + self.assertTrue(ok) + for idx in range(1, 6): + self.assertTrue(idx in result) + self.assertTrue(result.index(2) < result.index(1)) + self.assertTrue(result.index(3) < result.index(1)) + self.assertTrue(result.index(4) < result.index(2)) + self.assertTrue(result.index(5) < result.index(3)) + + + # Same graph as before, but with edges + # reversed, which means we should get + # the same results as before if using + # back_topo_sort rather than forw_topo_sort + # (and v.v.) + + graph = Graph() + graph.add_node(1) + graph.add_node(2) + graph.add_node(3) + graph.add_node(4) + graph.add_node(5) + + graph.add_edge(2, 1) + graph.add_edge(3, 1) + graph.add_edge(4, 2) + graph.add_edge(5, 3) + + ok, result = graph.back_topo_sort() + self.assertTrue(ok) + for idx in range(1, 6): + self.assertTrue(idx in result) + + self.assertTrue(result.index(1) < result.index(2)) + self.assertTrue(result.index(1) < result.index(3)) + self.assertTrue(result.index(2) < result.index(4)) + self.assertTrue(result.index(3) < result.index(5)) + + ok, result = graph.forw_topo_sort() + self.assertTrue(ok) + for idx in range(1, 6): + self.assertTrue(idx in result) + self.assertTrue(result.index(2) < result.index(1)) + self.assertTrue(result.index(3) < result.index(1)) + self.assertTrue(result.index(4) < result.index(2)) + self.assertTrue(result.index(5) < result.index(3)) + + + # Create a cycle + graph.add_edge(1, 5) + ok, result = graph.forw_topo_sort() + self.assertFalse(ok) + ok, result = graph.back_topo_sort() + self.assertFalse(ok) + + def test_bfs_subgraph(self): + graph = Graph() + graph.add_edge(1, 2) + graph.add_edge(1, 4) + graph.add_edge(2, 4) + graph.add_edge(4, 8) + graph.add_edge(4, 9) + graph.add_edge(4, 10) + graph.add_edge(8, 10) + + subgraph = graph.forw_bfs_subgraph(10) + self.assertTrue(isinstance(subgraph, Graph)) + self.assertEqual(subgraph.number_of_nodes(), 1) + self.assertTrue(10 in subgraph) + self.assertEqual(subgraph.number_of_edges(), 0) + + subgraph = graph.forw_bfs_subgraph(4) + self.assertTrue(isinstance(subgraph, Graph)) + self.assertEqual(subgraph.number_of_nodes(), 4) + self.assertTrue(4 in subgraph) + self.assertTrue(8 in subgraph) + self.assertTrue(9 in subgraph) + self.assertTrue(10 in subgraph) + self.assertEqual(subgraph.number_of_edges(), 4) + e = subgraph.edge_by_node(4, 8) + e = subgraph.edge_by_node(4, 9) + e = subgraph.edge_by_node(4, 10) + e = subgraph.edge_by_node(8, 10) + + # same graph as before, but switch around + # edges. This results in the same test results + # but now for back_bfs_subgraph rather than + # forw_bfs_subgraph + + graph = Graph() + graph.add_edge(2, 1) + graph.add_edge(4, 1) + graph.add_edge(4, 2) + graph.add_edge(8, 4) + graph.add_edge(9, 4) + graph.add_edge(10, 4) + graph.add_edge(10, 8) + + subgraph = graph.back_bfs_subgraph(10) + self.assertTrue(isinstance(subgraph, Graph)) + self.assertEqual(subgraph.number_of_nodes(), 1) + self.assertTrue(10 in subgraph) + self.assertEqual(subgraph.number_of_edges(), 0) + + subgraph = graph.back_bfs_subgraph(4) + self.assertTrue(isinstance(subgraph, Graph)) + self.assertEqual(subgraph.number_of_nodes(), 4) + self.assertTrue(4 in subgraph) + self.assertTrue(8 in subgraph) + self.assertTrue(9 in subgraph) + self.assertTrue(10 in subgraph) + self.assertEqual(subgraph.number_of_edges(), 4) + e = subgraph.edge_by_node(4, 8) + e = subgraph.edge_by_node(4, 9) + e = subgraph.edge_by_node(4, 10) + e = subgraph.edge_by_node(8, 10) + + def test_iterdfs(self): + graph = Graph() + graph.add_edge("1", "1.1") + graph.add_edge("1", "1.2") + graph.add_edge("1", "1.3") + graph.add_edge("1.1", "1.1.1") + graph.add_edge("1.1", "1.1.2") + graph.add_edge("1.2", "1.2.1") + graph.add_edge("1.2", "1.2.2") + graph.add_edge("1.2.2", "1.2.2.1") + graph.add_edge("1.2.2", "1.2.2.2") + graph.add_edge("1.2.2", "1.2.2.3") + + result = list(graph.iterdfs("1")) + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + result = list(graph.iterdfs("1", "1.2.1")) + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1' + ]) + + result = graph.forw_dfs("1") + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + result = graph.forw_dfs("1", "1.2.1") + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1' + ]) + + graph = Graph() + graph.add_edge("1.1", "1") + graph.add_edge("1.2", "1") + graph.add_edge("1.3", "1") + graph.add_edge("1.1.1", "1.1") + graph.add_edge("1.1.2", "1.1") + graph.add_edge("1.2.1", "1.2") + graph.add_edge("1.2.2", "1.2") + graph.add_edge("1.2.2.1", "1.2.2") + graph.add_edge("1.2.2.2", "1.2.2") + graph.add_edge("1.2.2.3", "1.2.2") + + result = list(graph.iterdfs("1", forward=False)) + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + result = list(graph.iterdfs("1", "1.2.1", forward=False)) + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1' + ]) + result = graph.back_dfs("1") + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + result = graph.back_dfs("1", "1.2.1") + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1' + ]) + + + # Introduce cyle: + graph.add_edge("1", "1.2") + result = list(graph.iterdfs("1", forward=False)) + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + + result = graph.back_dfs("1") + self.assertEqual(result, [ + '1', '1.3', '1.2', '1.2.2', '1.2.2.3', '1.2.2.2', + '1.2.2.1', '1.2.1', '1.1', '1.1.2', '1.1.1' + ]) + + + def test_iterdata(self): + graph = Graph() + graph.add_node("1", "I") + graph.add_node("1.1", "I.I") + graph.add_node("1.2", "I.II") + graph.add_node("1.3", "I.III") + graph.add_node("1.1.1", "I.I.I") + graph.add_node("1.1.2", "I.I.II") + graph.add_node("1.2.1", "I.II.I") + graph.add_node("1.2.2", "I.II.II") + graph.add_node("1.2.2.1", "I.II.II.I") + graph.add_node("1.2.2.2", "I.II.II.II") + graph.add_node("1.2.2.3", "I.II.II.III") + + graph.add_edge("1", "1.1") + graph.add_edge("1", "1.2") + graph.add_edge("1", "1.3") + graph.add_edge("1.1", "1.1.1") + graph.add_edge("1.1", "1.1.2") + graph.add_edge("1.2", "1.2.1") + graph.add_edge("1.2", "1.2.2") + graph.add_edge("1.2.2", "1.2.2.1") + graph.add_edge("1.2.2", "1.2.2.2") + graph.add_edge("1.2.2", "1.2.2.3") + + result = list(graph.iterdata("1", forward=True)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II', + 'I.II.II.I', 'I.II.I', 'I.I', 'I.I.II', 'I.I.I' + ]) + + result = list(graph.iterdata("1", end="1.2.1", forward=True)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II', + 'I.II.II.I', 'I.II.I' + ]) + + result = list(graph.iterdata("1", condition=lambda n: len(n) < 6, forward=True)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', + 'I.I', 'I.I.I' + ]) + + + # And the revese option: + graph = Graph() + graph.add_node("1", "I") + graph.add_node("1.1", "I.I") + graph.add_node("1.2", "I.II") + graph.add_node("1.3", "I.III") + graph.add_node("1.1.1", "I.I.I") + graph.add_node("1.1.2", "I.I.II") + graph.add_node("1.2.1", "I.II.I") + graph.add_node("1.2.2", "I.II.II") + graph.add_node("1.2.2.1", "I.II.II.I") + graph.add_node("1.2.2.2", "I.II.II.II") + graph.add_node("1.2.2.3", "I.II.II.III") + + graph.add_edge("1.1", "1") + graph.add_edge("1.2", "1") + graph.add_edge("1.3", "1") + graph.add_edge("1.1.1", "1.1") + graph.add_edge("1.1.2", "1.1") + graph.add_edge("1.2.1", "1.2") + graph.add_edge("1.2.2", "1.2") + graph.add_edge("1.2.2.1", "1.2.2") + graph.add_edge("1.2.2.2", "1.2.2") + graph.add_edge("1.2.2.3", "1.2.2") + + result = list(graph.iterdata("1", forward=False)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II', + 'I.II.II.I', 'I.II.I', 'I.I', 'I.I.II', 'I.I.I' + ]) + + result = list(graph.iterdata("1", end="1.2.1", forward=False)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', 'I.II.II', 'I.II.II.III', 'I.II.II.II', + 'I.II.II.I', 'I.II.I' + ]) + + result = list(graph.iterdata("1", condition=lambda n: len(n) < 6, forward=False)) + self.assertEqual(result, [ + 'I', 'I.III', 'I.II', + 'I.I', 'I.I.I' + ]) + + def test_bfs(self): + graph = Graph() + graph.add_edge("1", "1.1") + graph.add_edge("1.1", "1.1.1") + graph.add_edge("1.1", "1.1.2") + graph.add_edge("1.1.2", "1.1.2.1") + graph.add_edge("1.1.2", "1.1.2.2") + graph.add_edge("1", "1.2") + graph.add_edge("1", "1.3") + graph.add_edge("1.2", "1.2.1") + + self.assertEqual(graph.forw_bfs("1"), + ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2']) + self.assertEqual(graph.forw_bfs("1", "1.1.1"), + ['1', '1.1', '1.2', '1.3', '1.1.1']) + + + # And the "reverse" graph + graph = Graph() + graph.add_edge("1.1", "1") + graph.add_edge("1.1.1", "1.1") + graph.add_edge("1.1.2", "1.1") + graph.add_edge("1.1.2.1", "1.1.2") + graph.add_edge("1.1.2.2", "1.1.2") + graph.add_edge("1.2", "1") + graph.add_edge("1.3", "1") + graph.add_edge("1.2.1", "1.2") + + self.assertEqual(graph.back_bfs("1"), + ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2']) + self.assertEqual(graph.back_bfs("1", "1.1.1"), + ['1', '1.1', '1.2', '1.3', '1.1.1']) + + + + # check cycle handling + graph.add_edge("1", "1.2.1") + self.assertEqual(graph.back_bfs("1"), + ['1', '1.1', '1.2', '1.3', '1.1.1', '1.1.2', '1.2.1', '1.1.2.1', '1.1.2.2']) + + + def test_connected(self): + graph = Graph() + graph.add_node(1) + graph.add_node(2) + graph.add_node(3) + graph.add_node(4) + + self.assertFalse(graph.connected()) + + graph.add_edge(1, 2) + graph.add_edge(3, 4) + self.assertFalse(graph.connected()) + + graph.add_edge(2, 3) + graph.add_edge(4, 1) + self.assertTrue(graph.connected()) + + def test_edges_complex(self): + g = Graph() + g.add_edge(1, 2) + e = g.edge_by_node(1,2) + g.hide_edge(e) + g.hide_node(2) + self.assertRaises(GraphError, g.restore_edge, e) + + g.restore_all_edges() + self.assertRaises(GraphError, g.edge_by_id, e) + + def test_clust_coef(self): + g = Graph() + g.add_edge(1, 2) + g.add_edge(1, 3) + g.add_edge(1, 4) + self.assertEqual(g.clust_coef(1), 0) + + g.add_edge(2, 5) + g.add_edge(3, 5) + g.add_edge(4, 5) + self.assertEqual(g.clust_coef(1), 0) + + g.add_edge(2, 3) + self.assertEqual(g.clust_coef(1), 1./6) + g.add_edge(2, 4) + self.assertEqual(g.clust_coef(1), 2./6) + g.add_edge(4, 2) + self.assertEqual(g.clust_coef(1), 3./6) + + g.add_edge(2, 3) + g.add_edge(2, 4) + g.add_edge(3, 4) + g.add_edge(3, 2) + g.add_edge(4, 2) + g.add_edge(4, 3) + self.assertEqual(g.clust_coef(1), 1) + + + def test_get_hops(self): + graph = Graph() + graph.add_edge(1, 2) + graph.add_edge(1, 3) + graph.add_edge(2, 4) + graph.add_edge(4, 5) + graph.add_edge(5, 7) + graph.add_edge(7, 8) + + self.assertEqual(graph.get_hops(1), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]) + + self.assertEqual(graph.get_hops(1, 5), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3)]) + + graph.add_edge(5, 1) + graph.add_edge(7, 1) + graph.add_edge(7, 4) + + self.assertEqual(graph.get_hops(1), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]) + + # And the reverse graph + graph = Graph() + graph.add_edge(2, 1) + graph.add_edge(3, 1) + graph.add_edge(4, 2) + graph.add_edge(5, 4) + graph.add_edge(7, 5) + graph.add_edge(8, 7) + + self.assertEqual(graph.get_hops(1, forward=False), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]) + + self.assertEqual(graph.get_hops(1, 5, forward=False), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3)]) + + graph.add_edge(1, 5) + graph.add_edge(1, 7) + graph.add_edge(4, 7) + + self.assertEqual(graph.get_hops(1, forward=False), + [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]) + + + def test_constructor(self): + graph = Graph(iter([ + (1, 2), + (2, 3, 'a'), + (1, 3), + (3, 4), + ])) + self.assertEqual(graph.number_of_nodes(), 4) + self.assertEqual(graph.number_of_edges(), 4) + try: + graph.edge_by_node(1,2) + graph.edge_by_node(2,3) + graph.edge_by_node(1,3) + graph.edge_by_node(3,4) + except GraphError: + self.fail("Incorrect graph") + + self.assertEqual(graph.edge_data(graph.edge_by_node(2, 3)), 'a') + + self.assertRaises(GraphError, Graph, [(1,2,3,4)]) + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/altgraph_tests/test_graphstat.py b/python/altgraph/altgraph_tests/test_graphstat.py new file mode 100644 index 000000000..b628b6f24 --- /dev/null +++ b/python/altgraph/altgraph_tests/test_graphstat.py @@ -0,0 +1,70 @@ +import unittest + +from altgraph import GraphStat +from altgraph import Graph +import sys + +class TestDegreesDist (unittest.TestCase): + + def test_simple(self): + a = Graph.Graph() + self.assertEqual(GraphStat.degree_dist(a), []) + + a.add_node(1) + a.add_node(2) + a.add_node(3) + + self.assertEqual(GraphStat.degree_dist(a), GraphStat._binning([0, 0, 0])) + + for x in range(100): + a.add_node(x) + + for x in range(1, 100): + for y in range(1, 50): + if x % y == 0: + a.add_edge(x, y) + + counts_inc = [] + counts_out = [] + for n in a: + counts_inc.append(a.inc_degree(n)) + counts_out.append(a.out_degree(n)) + + self.assertEqual(GraphStat.degree_dist(a), GraphStat._binning(counts_out)) + self.assertEqual(GraphStat.degree_dist(a, mode='inc'), GraphStat._binning(counts_inc)) + +class TestBinning (unittest.TestCase): + def test_simple(self): + + # Binning [0, 100) into 10 bins + a = list(range(100)) + out = GraphStat._binning(a, limits=(0, 100), bin_num=10) + + self.assertEqual(out, + [ (x*1.0, 10) for x in range(5, 100, 10) ]) + + + # Check that outliers are ignored. + a = list(range(100)) + out = GraphStat._binning(a, limits=(0, 90), bin_num=9) + + self.assertEqual(out, + [ (x*1.0, 10) for x in range(5, 90, 10) ]) + + + out = GraphStat._binning(a, limits=(0, 100), bin_num=15) + binSize = 100 / 15.0 + result = [0]*15 + for i in range(100): + bin = int(i/binSize) + try: + result[bin] += 1 + except IndexError: + pass + + result = [ (i * binSize + binSize/2, result[i]) for i in range(len(result))] + + self.assertEqual(result, out) + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/altgraph_tests/test_graphutil.py b/python/altgraph/altgraph_tests/test_graphutil.py new file mode 100644 index 000000000..c1166237c --- /dev/null +++ b/python/altgraph/altgraph_tests/test_graphutil.py @@ -0,0 +1,140 @@ +import unittest +from altgraph import GraphUtil +from altgraph import Graph, GraphError + +class TestGraphUtil (unittest.TestCase): + + def test_generate_random(self): + g = GraphUtil.generate_random_graph(10, 50) + self.assertEqual(g.number_of_nodes(), 10) + self.assertEqual(g.number_of_edges(), 50) + + seen = set() + + for e in g.edge_list(): + h, t = g.edge_by_id(e) + self.assertFalse(h == t) + self.assertTrue((h, t) not in seen) + seen.add((h, t)) + + g = GraphUtil.generate_random_graph(5, 30, multi_edges=True) + self.assertEqual(g.number_of_nodes(), 5) + self.assertEqual(g.number_of_edges(), 30) + + seen = set() + + for e in g.edge_list(): + h, t = g.edge_by_id(e) + self.assertFalse(h == t) + if (h, t) in seen: + break + seen.add((h, t)) + + else: + self.fail("no duplicates?") + + g = GraphUtil.generate_random_graph(5, 21, self_loops=True) + self.assertEqual(g.number_of_nodes(), 5) + self.assertEqual(g.number_of_edges(), 21) + + seen = set() + + for e in g.edge_list(): + h, t = g.edge_by_id(e) + self.assertFalse((h, t) in seen) + if h == t: + break + seen.add((h, t)) + + else: + self.fail("no self loops?") + + self.assertRaises(GraphError, GraphUtil.generate_random_graph, 5, 21) + g = GraphUtil.generate_random_graph(5, 21, True) + self.assertRaises(GraphError, GraphUtil.generate_random_graph, 5, 26, True) + + def test_generate_scale_free(self): + graph = GraphUtil.generate_scale_free_graph(50, 10) + self.assertEqual(graph.number_of_nodes(), 500) + + counts = {} + for node in graph: + degree = graph.inc_degree(node) + try: + counts[degree] += 1 + except KeyError: + counts[degree] = 1 + + total_counts = sum(counts.values()) + P = {} + for degree, count in counts.items(): + P[degree] = count * 1.0 / total_counts + + # XXX: use algoritm + # to check if P[degree] ~ degree ** G (for some G) + + #print sorted(P.items()) + + #print sorted([(count, degree) for degree, count in counts.items()]) + + #self.fail("missing tests for GraphUtil.generate_scale_free_graph") + + def test_filter_stack(self): + g = Graph.Graph() + g.add_node("1", "N.1") + g.add_node("1.1", "N.1.1") + g.add_node("1.1.1", "N.1.1.1") + g.add_node("1.1.2", "N.1.1.2") + g.add_node("1.1.3", "N.1.1.3") + g.add_node("1.1.1.1", "N.1.1.1.1") + g.add_node("1.1.1.2", "N.1.1.1.2") + g.add_node("1.1.2.1", "N.1.1.2.1") + g.add_node("1.1.2.2", "N.1.1.2.2") + g.add_node("1.1.2.3", "N.1.1.2.3") + g.add_node("2", "N.2") + + g.add_edge("1", "1.1") + g.add_edge("1.1", "1.1.1") + g.add_edge("1.1", "1.1.2") + g.add_edge("1.1", "1.1.3") + g.add_edge("1.1.1", "1.1.1.1") + g.add_edge("1.1.1", "1.1.1.2") + g.add_edge("1.1.2", "1.1.2.1") + g.add_edge("1.1.2", "1.1.2.2") + g.add_edge("1.1.2", "1.1.2.3") + + v, r, o = GraphUtil.filter_stack(g, "1", [ + lambda n: n != "N.1.1.1", lambda n: n != "N.1.1.2.3" ]) + + self.assertEqual(v, + set(["1", "1.1", "1.1.1", "1.1.2", "1.1.3", + "1.1.1.1", "1.1.1.2", "1.1.2.1", "1.1.2.2", + "1.1.2.3"])) + self.assertEqual(r, set([ + "1.1.1", "1.1.2.3"])) + + o.sort() + self.assertEqual(o, + [ + ("1.1", "1.1.1.1"), + ("1.1", "1.1.1.2") + ]) + + v, r, o = GraphUtil.filter_stack(g, "1", [ + lambda n: n != "N.1.1.1", lambda n: n != "N.1.1.1.2" ]) + + self.assertEqual(v, + set(["1", "1.1", "1.1.1", "1.1.2", "1.1.3", + "1.1.1.1", "1.1.1.2", "1.1.2.1", "1.1.2.2", + "1.1.2.3"])) + self.assertEqual(r, set([ + "1.1.1", "1.1.1.2"])) + + self.assertEqual(o, + [ + ("1.1", "1.1.1.1"), + ]) + + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/altgraph_tests/test_object_graph.py b/python/altgraph/altgraph_tests/test_object_graph.py new file mode 100644 index 000000000..9035607e7 --- /dev/null +++ b/python/altgraph/altgraph_tests/test_object_graph.py @@ -0,0 +1,349 @@ +import unittest +import sys +from altgraph.ObjectGraph import ObjectGraph +from altgraph.Graph import Graph + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO + + +class Node (object): + def __init__(self, graphident): + self.graphident = graphident + +class SubNode (Node): + pass + +class ArgNode (object): + def __init__(self, graphident, *args, **kwds): + self.graphident = graphident + self.args = args + self.kwds = kwds + + def __repr__(self): + return ''%(self.graphident,) + +class TestObjectGraph (unittest.TestCase): + + def test_constructor(self): + graph = ObjectGraph() + self.assertTrue(isinstance(graph, ObjectGraph)) + + g = Graph() + graph = ObjectGraph(g) + self.assertTrue(graph.graph is g) + self.assertEqual(graph.debug, 0) + self.assertEqual(graph.indent, 0) + + graph = ObjectGraph(debug=5) + self.assertEqual(graph.debug, 5) + + def test_repr(self): + graph = ObjectGraph() + self.assertEqual(repr(graph), '') + + + def testNodes(self): + graph = ObjectGraph() + n1 = Node("n1") + n2 = Node("n2") + n3 = Node("n3") + n4 = Node("n4") + + n1b = Node("n1") + + self.assertTrue(graph.getIdent(graph) is graph) + self.assertTrue(graph.getRawIdent(graph) is graph) + + graph.addNode(n1) + graph.addNode(n2) + graph.addNode(n3) + + self.assertTrue(n1 in graph) + self.assertFalse(n4 in graph) + self.assertTrue("n1" in graph) + self.assertFalse("n4" in graph) + + self.assertTrue(graph.findNode(n1) is n1) + self.assertTrue(graph.findNode(n1b) is n1) + self.assertTrue(graph.findNode(n2) is n2) + self.assertTrue(graph.findNode(n4) is None) + self.assertTrue(graph.findNode("n1") is n1) + self.assertTrue(graph.findNode("n2") is n2) + self.assertTrue(graph.findNode("n4") is None) + + self.assertEqual(graph.getRawIdent(n1), "n1") + self.assertEqual(graph.getRawIdent(n1b), "n1") + self.assertEqual(graph.getRawIdent(n4), "n4") + self.assertEqual(graph.getRawIdent("n1"), None) + + self.assertEqual(graph.getIdent(n1), "n1") + self.assertEqual(graph.getIdent(n1b), "n1") + self.assertEqual(graph.getIdent(n4), "n4") + self.assertEqual(graph.getIdent("n1"), "n1") + + self.assertTrue(n3 in graph) + graph.removeNode(n3) + self.assertTrue(n3 not in graph) + graph.addNode(n3) + self.assertTrue(n3 in graph) + + n = graph.createNode(SubNode, "n1") + self.assertTrue(n is n1) + + n = graph.createNode(SubNode, "n8") + self.assertTrue(isinstance(n, SubNode)) + self.assertTrue(n in graph) + self.assertTrue(graph.findNode("n8") is n) + + n = graph.createNode(ArgNode, "args", 1, 2, 3, a='a', b='b') + self.assertTrue(isinstance(n, ArgNode)) + self.assertTrue(n in graph) + self.assertTrue(graph.findNode("args") is n) + self.assertEqual(n.args, (1, 2, 3)) + self.assertEqual(n.kwds, {'a':'a', 'b':'b'}) + + def testEdges(self): + graph = ObjectGraph() + n1 = graph.createNode(ArgNode, "n1", 1) + n2 = graph.createNode(ArgNode, "n2", 1) + n3 = graph.createNode(ArgNode, "n3", 1) + n4 = graph.createNode(ArgNode, "n4", 1) + + graph.createReference(n1, n2, "n1-n2") + graph.createReference("n1", "n3", "n1-n3") + graph.createReference("n2", n3) + + g = graph.graph + e = g.edge_by_node("n1", "n2") + self.assertTrue(e is not None) + self.assertEqual(g.edge_data(e), "n1-n2") + + e = g.edge_by_node("n1", "n3") + self.assertTrue(e is not None) + self.assertEqual(g.edge_data(e), "n1-n3") + + e = g.edge_by_node("n2", "n3") + self.assertTrue(e is not None) + self.assertEqual(g.edge_data(e), None) + + e = g.edge_by_node("n1", "n4") + self.assertTrue(e is None) + + graph.removeReference(n1, n2) + e = g.edge_by_node("n1", "n2") + self.assertTrue(e is None) + + graph.removeReference("n1", "n3") + e = g.edge_by_node("n1", "n3") + self.assertTrue(e is None) + + graph.createReference(n1, n2, "foo") + e = g.edge_by_node("n1", "n2") + self.assertTrue(e is not None) + self.assertEqual(g.edge_data(e), "foo") + + + def test_flatten(self): + graph = ObjectGraph() + n1 = graph.createNode(ArgNode, "n1", 1) + n2 = graph.createNode(ArgNode, "n2", 2) + n3 = graph.createNode(ArgNode, "n3", 3) + n4 = graph.createNode(ArgNode, "n4", 4) + n5 = graph.createNode(ArgNode, "n5", 5) + n6 = graph.createNode(ArgNode, "n6", 6) + n7 = graph.createNode(ArgNode, "n7", 7) + n8 = graph.createNode(ArgNode, "n8", 8) + + graph.createReference(graph, n1) + graph.createReference(graph, n7) + graph.createReference(n1, n2) + graph.createReference(n1, n4) + graph.createReference(n2, n3) + graph.createReference(n2, n5) + graph.createReference(n5, n6) + graph.createReference(n4, n6) + graph.createReference(n4, n2) + + self.assertFalse(isinstance(graph.flatten(), list)) + + fl = list(graph.flatten()) + self.assertTrue(n1 in fl) + self.assertTrue(n2 in fl) + self.assertTrue(n3 in fl) + self.assertTrue(n4 in fl) + self.assertTrue(n5 in fl) + self.assertTrue(n6 in fl) + self.assertTrue(n7 in fl) + self.assertFalse(n8 in fl) + + fl = list(graph.flatten(start=n2)) + self.assertFalse(n1 in fl) + self.assertTrue(n2 in fl) + self.assertTrue(n3 in fl) + self.assertFalse(n4 in fl) + self.assertTrue(n5 in fl) + self.assertTrue(n6 in fl) + self.assertFalse(n7 in fl) + self.assertFalse(n8 in fl) + + graph.createReference(n1, n5) + fl = list(graph.flatten(lambda n: n.args[0] % 2 != 0)) + self.assertTrue(n1 in fl) + self.assertFalse(n2 in fl) + self.assertFalse(n3 in fl) + self.assertFalse(n4 in fl) + self.assertTrue(n5 in fl) + self.assertFalse(n6 in fl) + self.assertTrue(n7 in fl) + self.assertFalse(n8 in fl) + + def test_iter_nodes(self): + graph = ObjectGraph() + n1 = graph.createNode(ArgNode, "n1", 1) + n2 = graph.createNode(ArgNode, "n2", 2) + n3 = graph.createNode(ArgNode, "n3", 3) + n4 = graph.createNode(ArgNode, "n4", 4) + n5 = graph.createNode(ArgNode, "n5", 5) + n6 = graph.createNode(ArgNode, "n6", 5) + + nodes = graph.nodes() + if sys.version[0] == '2': + self.assertTrue(hasattr(nodes, 'next')) + else: + self.assertTrue(hasattr(nodes, '__next__')) + self.assertTrue(hasattr(nodes, '__iter__')) + + nodes = list(nodes) + self.assertEqual(len(nodes), 6) + self.assertTrue(n1 in nodes) + self.assertTrue(n2 in nodes) + self.assertTrue(n3 in nodes) + self.assertTrue(n4 in nodes) + self.assertTrue(n5 in nodes) + self.assertTrue(n6 in nodes) + + def test_get_edges(self): + graph = ObjectGraph() + n1 = graph.createNode(ArgNode, "n1", 1) + n2 = graph.createNode(ArgNode, "n2", 2) + n3 = graph.createNode(ArgNode, "n3", 3) + n4 = graph.createNode(ArgNode, "n4", 4) + n5 = graph.createNode(ArgNode, "n5", 5) + n6 = graph.createNode(ArgNode, "n6", 5) + + graph.createReference(n1, n2) + graph.createReference(n1, n3) + graph.createReference(n3, n1) + graph.createReference(n5, n1) + graph.createReference(n2, n4) + graph.createReference(n2, n5) + graph.createReference(n6, n2) + + outs, ins = graph.get_edges(n1) + + self.assertFalse(isinstance(outs, list)) + self.assertFalse(isinstance(ins, list)) + + ins = list(ins) + outs = list(outs) + + + self.assertTrue(n1 not in outs) + self.assertTrue(n2 in outs) + self.assertTrue(n3 in outs) + self.assertTrue(n4 not in outs) + self.assertTrue(n5 not in outs) + self.assertTrue(n6 not in outs) + + self.assertTrue(n1 not in ins) + self.assertTrue(n2 not in ins) + self.assertTrue(n3 in ins) + self.assertTrue(n4 not in ins) + self.assertTrue(n5 in ins) + self.assertTrue(n6 not in ins) + + def test_filterStack(self): + graph = ObjectGraph() + n1 = graph.createNode(ArgNode, "n1", 0) + n11 = graph.createNode(ArgNode, "n1.1", 1) + n12 = graph.createNode(ArgNode, "n1.2", 0) + n111 = graph.createNode(ArgNode, "n1.1.1", 0) + n112 = graph.createNode(ArgNode, "n1.1.2",2) + n2 = graph.createNode(ArgNode, "n2", 0) + n3 = graph.createNode(ArgNode, "n2", 0) + + graph.createReference(None, n1) + graph.createReference(None, n2) + graph.createReference(n1, n11) + graph.createReference(n1, n12) + graph.createReference(n11, n111) + graph.createReference(n11, n112) + + self.assertTrue(n1 in graph) + self.assertTrue(n2 in graph) + self.assertTrue(n11 in graph) + self.assertTrue(n12 in graph) + self.assertTrue(n111 in graph) + self.assertTrue(n112 in graph) + self.assertTrue(n2 in graph) + self.assertTrue(n3 in graph) + + visited, removes, orphans = graph.filterStack( + [lambda n: n.args[0] != 1, lambda n: n.args[0] != 2]) + + self.assertEqual(visited, 6) + self.assertEqual(removes, 2) + self.assertEqual(orphans, 1) + + e = graph.graph.edge_by_node(n1.graphident, n111.graphident) + self.assertEqual(graph.graph.edge_data(e), "orphan") + + self.assertTrue(n1 in graph) + self.assertTrue(n2 in graph) + self.assertTrue(n11 not in graph) + self.assertTrue(n12 in graph) + self.assertTrue(n111 in graph) + self.assertTrue(n112 not in graph) + self.assertTrue(n2 in graph) + self.assertTrue(n3 in graph) + + +class TestObjectGraphIO (unittest.TestCase): + def setUp(self): + self._stdout = sys.stdout + + def tearDown(self): + sys.stdout = self._stdout + + def test_msg(self): + graph = ObjectGraph() + + sys.stdout = fp = StringIO() + graph.msg(0, "foo") + self.assertEqual(fp.getvalue(), "foo \n") + + sys.stdout = fp = StringIO() + graph.msg(5, "foo") + self.assertEqual(fp.getvalue(), "") + + sys.stdout = fp = StringIO() + graph.debug = 10 + graph.msg(5, "foo") + self.assertEqual(fp.getvalue(), "foo \n") + + sys.stdout = fp = StringIO() + graph.msg(0, "foo", 1, "a") + self.assertEqual(fp.getvalue(), "foo 1 'a'\n") + + sys.stdout = fp = StringIO() + graph.msgin(0, "hello", "world") + graph.msg(0, "test me") + graph.msgout(0, "bye bye") + self.assertEqual(fp.getvalue(), "hello 'world'\n test me \nbye bye \n") + + +if __name__ == "__main__": # pragma: no cover + unittest.main() diff --git a/python/altgraph/doc/Makefile b/python/altgraph/doc/Makefile new file mode 100644 index 000000000..b91ac8142 --- /dev/null +++ b/python/altgraph/doc/Makefile @@ -0,0 +1,89 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/altgraph.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/altgraph.qhc" + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/python/altgraph/doc/_build/doctrees/changelog.doctree b/python/altgraph/doc/_build/doctrees/changelog.doctree new file mode 100644 index 000000000..b78e2f235 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/changelog.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/core.doctree b/python/altgraph/doc/_build/doctrees/core.doctree new file mode 100644 index 000000000..367409313 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/core.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/dot.doctree b/python/altgraph/doc/_build/doctrees/dot.doctree new file mode 100644 index 000000000..f2bd9ea89 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/dot.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/environment.pickle b/python/altgraph/doc/_build/doctrees/environment.pickle new file mode 100644 index 000000000..4e7b4ea03 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/environment.pickle differ diff --git a/python/altgraph/doc/_build/doctrees/graph.doctree b/python/altgraph/doc/_build/doctrees/graph.doctree new file mode 100644 index 000000000..5c9aef42c Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/graph.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/graphalgo.doctree b/python/altgraph/doc/_build/doctrees/graphalgo.doctree new file mode 100644 index 000000000..e22ca572d Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/graphalgo.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/graphstat.doctree b/python/altgraph/doc/_build/doctrees/graphstat.doctree new file mode 100644 index 000000000..2e0503615 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/graphstat.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/graphutil.doctree b/python/altgraph/doc/_build/doctrees/graphutil.doctree new file mode 100644 index 000000000..eb9fa2c65 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/graphutil.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/index.doctree b/python/altgraph/doc/_build/doctrees/index.doctree new file mode 100644 index 000000000..f3241e5e3 Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/index.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/license.doctree b/python/altgraph/doc/_build/doctrees/license.doctree new file mode 100644 index 000000000..ba3ce378a Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/license.doctree differ diff --git a/python/altgraph/doc/_build/doctrees/objectgraph.doctree b/python/altgraph/doc/_build/doctrees/objectgraph.doctree new file mode 100644 index 000000000..802396b9c Binary files /dev/null and b/python/altgraph/doc/_build/doctrees/objectgraph.doctree differ diff --git a/python/altgraph/doc/_build/html/.buildinfo b/python/altgraph/doc/_build/html/.buildinfo new file mode 100644 index 000000000..003a04976 --- /dev/null +++ b/python/altgraph/doc/_build/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 84aa655833f036f5ba0f6f2dbd1945fa +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/python/altgraph/doc/_build/html/_sources/changelog.txt b/python/altgraph/doc/_build/html/_sources/changelog.txt new file mode 100644 index 000000000..e491c9544 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/changelog.txt @@ -0,0 +1,176 @@ +Release history +=============== + +0.11 +---- + +- Stabilize the order of elements in dot file exports, + patch from bitbucket user 'pombredanne'. + +- Tweak setup.py file to remove dependency on distribute (but + keep the dependency on setuptools) + + +0.10.2 +------ + +- There where no classifiers in the package metadata due to a bug + in setup.py + +0.10.1 +------ + +This is a bugfix release + +Bug fixes: + +- Issue #3: The source archive contains a README.txt + while the setup file refers to ReadMe.txt. + + This is caused by a misfeature in distutils, as a + workaround I've renamed ReadMe.txt to README.txt + in the source tree and setup file. + + +0.10 +----- + +This is a minor feature release + +Features: + +- Do not use "2to3" to support Python 3. + + As a side effect of this altgraph now supports + Python 2.6 and later, and no longer supports + earlier releases of Python. + +- The order of attributes in the Dot output + is now always alphabetical. + + With this change the output will be consistent + between runs and Python versions. + +0.9 +--- + +This is a minor bugfix release + +Features: + +- Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method + yielding all nodes in an object graph. + +Bugfixes: + +- The 0.8 release didn't work with py2app when using + python 3.x. + + +0.8 +----- + +This is a minor feature release. The major new feature +is a extensive set of unittests, which explains almost +all other changes in this release. + +Bugfixes: + +- Installing failed with Python 2.5 due to using a distutils + class that isn't available in that version of Python + (issue #1 on the issue tracker) + +- ``altgraph.GraphStat.degree_dist`` now actually works + +- ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will + no longer create the edge when one of the nodes doesn't + exist. + +- ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs. + +- ``altgraph.Graph.back_topo_sort`` was completely broken in + previous releases. + +- ``altgraph.Graph.forw_bfs_subgraph`` now actually works. + +- ``altgraph.Graph.back_bfs_subgraph`` now actually works. + +- ``altgraph.Graph.iterdfs`` now returns the correct result + when the ``forward`` argument is ``False``. + +- ``altgraph.Graph.iterdata`` now returns the correct result + when the ``forward`` argument is ``False``. + + +Features: + +- The ``altgraph.Graph`` constructor now accepts an argument + that contains 2- and 3-tuples instead of requireing that + all items have the same size. The (optional) argument can now + also be any iterator. + +- ``altgraph.Graph.Graph.add_node`` has no effect when you + add a hidden node. + +- The private method ``altgraph.Graph._bfs`` is no longer + present. + +- The private method ``altgraph.Graph._dfs`` is no longer + present. + +- ``altgraph.ObjectGraph`` now has a ``__contains__`` methods, + which means you can use the ``in`` operator to check if a + node is part of a graph. + +- ``altgraph.GraphUtil.generate_random_graph`` will raise + ``GraphError`` instead of looping forever when it is + impossible to create the requested graph. + +- ``altgraph.Dot.edge_style`` raises ``GraphError`` when + one of the nodes is not present in the graph. The method + silently added the tail in the past, but without ensuring + a consistent graph state. + +- ``altgraph.Dot.save_img`` now works when the mode is + ``"neato"``. + +0.7.2 +----- + +This is a minor bugfix release + +Bugfixes: + +- distutils didn't include the documentation subtree + +0.7.1 +----- + +This is a minor feature release + +Features: + +- Documentation is now generated using `sphinx `_ + and can be viewed at . + +- The repository has moved to bitbucket + +- ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no + implementation and no specified behaviour. + +- the module ``altgraph.compat`` is gone, which means altgraph will no + longer work with Python 2.3. + + +0.7.0 +----- + +This is a minor feature release. + +Features: + +- Support for Python 3 + +- It is now possible to run tests using 'python setup.py test' + + (The actual testsuite is still very minimal though) diff --git a/python/altgraph/doc/_build/html/_sources/core.txt b/python/altgraph/doc/_build/html/_sources/core.txt new file mode 100644 index 000000000..8288f6a94 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/core.txt @@ -0,0 +1,26 @@ +:mod:`altgraph` --- A Python Graph Library +================================================== + +.. module:: altgraph + :synopsis: A directional graph for python + +altgraph is a fork of `graphlib `_ tailored +to use newer Python 2.3+ features, including additional support used by the +py2app suite (modulegraph and macholib, specifically). + +altgraph is a python based graph (network) representation and manipulation package. +It has started out as an extension to the `graph_lib module `_ +written by Nathan Denny it has been significantly optimized and expanded. + +The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA `_ +(Library of Efficient Datatypes) representation. The library +includes methods for constructing graphs, BFS and DFS traversals, +topological sort, finding connected components, shortest paths as well as a number +graph statistics functions. The library can also visualize graphs +via `graphviz `_. + + +.. exception:: GraphError + + Exception raised when methods are called with bad values of + an inconsistent state. diff --git a/python/altgraph/doc/_build/html/_sources/dot.txt b/python/altgraph/doc/_build/html/_sources/dot.txt new file mode 100644 index 000000000..3848c488a --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/dot.txt @@ -0,0 +1,224 @@ +:mod:`altgraph.Dot` --- Interface to the dot language +===================================================== + +.. module:: altgraph.Dot + :synopsis: Interface to the dot language as used by Graphviz.. + +The :py:mod:`~altgraph.Dot` module provides a simple interface to the +file format used in the `graphviz`_ program. The module is intended to +offload the most tedious part of the process (the **dot** file generation) +while transparently exposing most of its features. + +.. _`graphviz`: `_ + +To display the graphs or to generate image files the `graphviz`_ +package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must +be accesible in the program path so that they can be ran from processes spawned +within the module. + +Example usage +------------- + +Here is a typical usage:: + + from altgraph import Graph, Dot + + # create a graph + edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ] + graph = Graph.Graph(edges) + + # create a dot representation of the graph + dot = Dot.Dot(graph) + + # display the graph + dot.display() + + # save the dot representation into the mydot.dot file + dot.save_dot(file_name='mydot.dot') + + # save dot file as gif image into the graph.gif file + dot.save_img(file_name='graph', file_type='gif') + + +Directed graph and non-directed graph +------------------------------------- + +Dot class can use for both directed graph and non-directed graph +by passing *graphtype* parameter. + +Example:: + + # create directed graph(default) + dot = Dot.Dot(graph, graphtype="digraph") + + # create non-directed graph + dot = Dot.Dot(graph, graphtype="graph") + + +Customizing the output +---------------------- + +The graph drawing process may be customized by passing +valid :command:`dot` parameters for the nodes and edges. For a list of all +parameters see the `graphviz`_ documentation. + +Example:: + + # customizing the way the overall graph is drawn + dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75) + + # customizing node drawing + dot.node_style(1, label='BASE_NODE',shape='box', color='blue' ) + dot.node_style(2, style='filled', fillcolor='red') + + # customizing edge drawing + dot.edge_style(1, 2, style='dotted') + dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90') + dot.edge_style(4, 5, arrowsize=2, style='bold') + + + .. note:: + + dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to + display all graphics styles. To verify the output save it to an image + file and look at it that way. + +Valid attributes +---------------- + +- dot styles, passed via the :py:meth:`Dot.style` method:: + + rankdir = 'LR' (draws the graph horizontally, left to right) + ranksep = number (rank separation in inches) + +- node attributes, passed via the :py:meth:`Dot.node_style` method:: + + style = 'filled' | 'invisible' | 'diagonals' | 'rounded' + shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle' + +- edge attributes, passed via the :py:meth:`Dot.edge_style` method:: + + style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold' + arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee' + weight = number (the larger the number the closer the nodes will be) + +- valid `graphviz colors `_ + +- for more details on how to control the graph drawing process see the + `graphviz reference `_. + + +Class interface +--------------- + +.. class:: Dot(graph[, nodes[, edgefn[, nodevisitor[, edgevisitor[, name[, dot[, dotty[, neato[, graphtype]]]]]]]]]) + + Creates a new Dot generator based on the specified + :class:`Graph `. The Dot generator won't reference + the *graph* once it is constructed. + + If the *nodes* argument is present it is the list of nodes to include + in the graph, otherwise all nodes in *graph* are included. + + If the *edgefn* argument is present it is a function that yields the + nodes connected to another node, this defaults to + :meth:`graph.out_nbr `. The constructor won't + add edges to the dot file unless both the head and tail of the edge + are in *nodes*. + + If the *name* is present it specifies the name of the graph in the resulting + dot file. The default is ``"G"``. + + The functions *nodevisitor* and *edgevisitor* return the default style + for a given edge or node (both default to functions that return an empty + style). + + The arguments *dot*, *dotty* and *neato* are used to pass the path to + the corresponding `graphviz`_ command. + + +Updating graph attributes +......................... + +.. method:: Dot.style(\**attr) + + Sets the overall style (graph attributes) to the given attributes. + + See `Valid Attributes`_ for more information about the attributes. + +.. method:: Dot.node_style(node, \**attr) + + Sets the style for *node* to the given attributes. + + This method will add *node* to the graph when it isn't already + present. + + See `Valid Attributes`_ for more information about the attributes. + +.. method:: Dot.all_node_style(\**attr) + + Replaces the current style for all nodes + + +.. method:: edge_style(head, tail, \**attr) + + Sets the style of an edge to the given attributes. The edge will + be added to the graph when it isn't already present, but *head* + and *tail* must both be valid nodes. + + See `Valid Attributes`_ for more information about the attributes. + + + +Emitting output +............... + +.. method:: Dot.display([mode]) + + Displays the current graph via dotty. + + If the *mode* is ``"neato"`` the dot file is processed with + the neato command before displaying. + + This method won't return until the dotty command exits. + +.. method:: save_dot(filename) + + Saves the current graph representation into the given file. + + .. note:: + + For backward compatibility reasons this method can also + be called without an argument, it will then write the graph + into a fixed filename (present in the attribute :data:`Graph.temp_dot`). + + This feature is deprecated and should not be used. + + +.. method:: save_image(file_name[, file_type[, mode]]) + + Saves the current graph representation as an image file. The output + is written into a file whose basename is *file_name* and whose suffix + is *file_type*. + + The *file_type* specifies the type of file to write, the default + is ``"gif"``. + + If the *mode* is ``"neato"`` the dot file is processed with + the neato command before displaying. + + .. note:: + + For backward compatibility reasons this method can also + be called without an argument, it will then write the graph + with a fixed basename (``"out"``). + + This feature is deprecated and should not be used. + +.. method:: iterdot() + + Yields all lines of a `graphviz`_ input file (including line endings). + +.. method:: __iter__() + + Alias for the :meth:`iterdot` method. diff --git a/python/altgraph/doc/_build/html/_sources/graph.txt b/python/altgraph/doc/_build/html/_sources/graph.txt new file mode 100644 index 000000000..72e36bbc1 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/graph.txt @@ -0,0 +1,299 @@ +:mod:`altgraph.Graph` --- Basic directional graphs +================================================== + +.. module:: altgraph.Graph + :synopsis: Basic directional graphs. + +The module :mod:`altgraph.Graph` provides a class :class:`Graph` that +represents a directed graph with *N* nodes and *E* edges. + +.. class:: Graph([edges]) + + Constructs a new empty :class:`Graph` object. If the optional + *edges* parameter is supplied, updates the graph by adding the + specified edges. + + All of the elements in *edges* should be tuples with two or three + elements. The first two elements of the tuple are the source and + destination node of the edge, the optional third element is the + edge data. The source and destination nodes are added to the graph + when the aren't already present. + + +Node related methods +-------------------- + +.. method:: Graph.add_node(node[, node_data]) + + Adds a new node to the graph if it is not already present. The new + node must be a hashable object. + + Arbitrary data can be attached to the node via the optional *node_data* + argument. + + .. note:: the node also won't be added to the graph when it is + present but currently hidden. + + +.. method:: Graph.hide_node(node) + + Hides a *node* from the graph. The incoming and outgoing edges of + the node will also be hidden. + + Raises :class:`altgraph.GraphError` when the node is not (visible) + node of the graph. + + +.. method:: Graph.restore_node(node) + + Restores a previously hidden *node*. The incoming and outgoing + edges of the node are also restored. + + Raises :class:`altgraph.GraphError` when the node is not a hidden + node of the graph. + +.. method:: Graph.restore_all_nodes() + + Restores all hidden nodes. + +.. method:: Graph.number_of_nodes() + + Return the number of visible nodes in the graph. + +.. method:: Graph.number_of_hidden_nodes() + + Return the number of hidden nodes in the graph. + +.. method:: Graph.node_list() + + Return a list with all visible nodes in the graph. + +.. method:: Graph.hidden_node_list() + + Return a list with all hidden nodes in the graph. + +.. method:: node_data(node) + + Return the data associated with the *node* when it was + added. + +.. method:: Graph.describe_node(node) + + Returns *node*, the node's data and the lists of outgoing + and incoming edges for the node. + + .. note:: + + the edge lists should not be modified, doing so + can result in unpredicatable behavior. + +.. method:: Graph.__contains__(node) + + Returns True iff *node* is a node in the graph. This + method is accessed through the *in* operator. + +.. method:: Graph.__iter__() + + Yield all nodes in the graph. + +.. method:: Graph.out_edges(node) + + Return the list of outgoing edges for *node* + +.. method:: Graph.inc_edges(node) + + Return the list of incoming edges for *node* + +.. method:: Graph.all_edges(node) + + Return the list of incoming and outgoing edges for *node* + +.. method:: Graph.out_degree(node) + + Return the number of outgoing edges for *node*. + +.. method:: Graph.inc_degree(node) + + Return the number of incoming edges for *node*. + +.. method:: Graph.all_degree(node) + + Return the number of edges (incoming or outgoing) for *node*. + +Edge related methods +-------------------- + +.. method:: Graph.add_edge(head_id, tail_id [, edge data [, create_nodes]]) + + Adds a directed edge from *head_id* to *tail_id*. Arbitrary data can + be added via *edge_data*. When *create_nodes* is *True* (the default), + *head_id* and *tail_id* will be added to the graph when the aren't + already present. + +.. method:: Graph.hide_edge(edge) + + Hides an edge from the graph. The edge may be unhidden at some later + time. + +.. method:: Graph.restore_edge(edge) + + Restores a previously hidden *edge*. + +.. method:: Graph.restore_all_edges() + + Restore all edges that were hidden before, except for edges + referring to hidden nodes. + +.. method:: Graph.edge_by_node(head, tail) + + Return the edge ID for an edge from *head* to *tail*, + or :data:`None` when no such edge exists. + +.. method:: Graph.edge_by_id(edge) + + Return the head and tail of the *edge* + +.. method:: Graph.edge_data(edge) + + Return the data associated with the *edge*. + + +.. method:: Graph.head(edge) + + Return the head of an *edge* + +.. method:: Graph.tail(edge) + + Return the tail of an *edge* + +.. method:: Graph.describe_edge(edge) + + Return the *edge*, the associated data, its head and tail. + +.. method:: Graph.number_of_edges() + + Return the number of visible edges. + +.. method:: Graph.number_of_hidden_edges() + + Return the number of hidden edges. + +.. method:: Graph.edge_list() + + Returns a list with all visible edges in the graph. + +.. method:: Graph.hidden_edge_list() + + Returns a list with all hidden edges in the graph. + +Graph traversal +--------------- + +.. method:: Graph.out_nbrs(node) + + Return a list of all nodes connected by outgoing edges. + +.. method:: Graph.inc_nbrs(node) + + Return a list of all nodes connected by incoming edges. + +.. method:: Graph.all_nbrs(node) + + Returns a list of nodes connected by an incoming or outgoing edge. + +.. method:: Graph.forw_topo_sort() + + Return a list of nodes where the successors (based on outgoing + edges) of any given node apear in the sequence after that node. + +.. method:: Graph.back_topo_sort() + + Return a list of nodes where the successors (based on incoming + edges) of any given node apear in the sequence after that node. + +.. method:: Graph.forw_bfs_subgraph(start_id) + + Return a subgraph consisting of the breadth first + reachable nodes from *start_id* based on their outgoing edges. + + +.. method:: Graph.back_bfs_subgraph(start_id) + + Return a subgraph consisting of the breadth first + reachable nodes from *start_id* based on their incoming edges. + +.. method:: Graph.iterdfs(start[, end[, forward]]) + + Yield nodes in a depth first traversal starting at the *start* + node. + + If *end* is specified traversal stops when reaching that node. + + If forward is True (the default) edges are traversed in forward + direction, otherwise they are traversed in reverse direction. + +.. method:: Graph.iterdata(start[, end[, forward[, condition]]]) + + Yield the associated data for nodes in a depth first traversal + starting at the *start* node. This method will not yield values for nodes + without associated data. + + If *end* is specified traversal stops when reaching that node. + + If *condition* is specified and the condition callable returns + False for the associated data this method will not yield the + associated data and will not follow the edges for the node. + + If forward is True (the default) edges are traversed in forward + direction, otherwise they are traversed in reverse direction. + +.. method:: Graph.forw_bfs(start[, end]) + + Returns a list of nodes starting at *start* in some bread first + search order (following outgoing edges). + + When *end* is specified iteration stops at that node. + +.. method:: Graph.back_bfs(start[, end]) + + Returns a list of nodes starting at *start* in some bread first + search order (following incoming edges). + + When *end* is specified iteration stops at that node. + +.. method:: Graph.get_hops(start[, end[, forward]]) + + Computes the hop distance to all nodes centered around a specified node. + + First order neighbours are at hop 1, their neigbours are at hop 2 etc. + Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of + the forward parameter. + + If the distance between all neighbouring nodes is 1 the hop number + corresponds to the shortest distance between the nodes. + + Typical usage:: + + >>> print graph.get_hops(1, 8) + >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] + # node 1 is at 0 hops + # node 2 is at 1 hop + # ... + # node 8 is at 5 hops + + +Graph statistics +---------------- + +.. method:: Graph.connected() + + Returns True iff every node in the graph can be reached from + every other node. + +.. method:: Graph.clust_coef(node) + + Returns the local clustering coefficient of node. + + The local cluster coefficient is the proportion of the actual number + of edges between neighbours of node and the maximum number of + edges between those nodes. diff --git a/python/altgraph/doc/_build/html/_sources/graphalgo.txt b/python/altgraph/doc/_build/html/_sources/graphalgo.txt new file mode 100644 index 000000000..84d492f44 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/graphalgo.txt @@ -0,0 +1,26 @@ +:mod:`altgraph.GraphAlgo` --- Graph algorithms +================================================== + +.. module:: altgraph.GraphAlgo + :synopsis: Basic graphs algoritms + +.. function:: dijkstra(graph, start[, end]) + + Dijkstra's algorithm for shortest paths. + + Find shortest paths from the start node to all nodes nearer + than or equal to the *end* node. The edge data is assumed to be the edge length. + + .. note:: + + Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive. + This code does not verify this property for all edges (only the edges examined until the end + vertex is reached), but will correctly compute shortest paths even for some graphs with negative + edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake. + + +.. function:: shortest_path(graph, start, end) + + Find a single shortest path from the given start node to the given end node. + The input has the same conventions as :func:`dijkstra`. The output is a list + of the nodes in order along the shortest path. diff --git a/python/altgraph/doc/_build/html/_sources/graphstat.txt b/python/altgraph/doc/_build/html/_sources/graphstat.txt new file mode 100644 index 000000000..0931a12dd --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/graphstat.txt @@ -0,0 +1,25 @@ +:mod:`altgraph.GraphStat` --- Functions providing various graph statistics +========================================================================== + +.. module:: altgraph.GraphStat + :synopsis: Functions providing various graph statistics + +The module :mod:`altgraph.GraphStat` provides function that calculate +graph statistics. Currently there is only one such function, more may +be added later. + +.. function:: degree_dist(graph[, limits[, bin_num[, mode]]]) + + Groups the number of edges per node into *bin_num* bins + and returns the list of those bins. Every item in the result + is a tuple with the center of the bin and the number of items + in that bin. + + When the *limits* argument is present it must be a tuple with + the mininum and maximum number of edges that get binned (that + is, when *limits* is ``(4, 10)`` only nodes with between 4 + and 10 edges get counted. + + The *mode* argument is used to count incoming (``'inc'``) or + outgoing (``'out'``) edges. The default is to count the outgoing + edges. diff --git a/python/altgraph/doc/_build/html/_sources/graphutil.txt b/python/altgraph/doc/_build/html/_sources/graphutil.txt new file mode 100644 index 000000000..c07836df8 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/graphutil.txt @@ -0,0 +1,55 @@ +:mod:`altgraph.GraphUtil` --- Utility functions +================================================ + +.. module:: altgraph.GraphUtil + :synopsis: Utility functions + +The module :mod:`altgraph.GraphUtil` performs a number of more +or less useful utility functions. + +.. function:: generate_random_graph(node_num, edge_num[, self_loops[, multi_edges]) + + Generates and returns a :class:`Graph ` instance + with *node_num* nodes randomly connected by *edge_num* edges. + + When *self_loops* is present and True there can be edges that point from + a node to itself. + + When *multi_edge* is present and True there can be duplicate edges. + + This method raises :class:`GraphError `_ + +.. function:: filter_stack(graph, head, filters) + + Perform a depth-first oder walk of the graph starting at *head* and + apply all filter functions in *filters* on the node data of the nodes + found. + + Returns (*visited*, *removes*, *orphans*), where + + * *visited*: the set of visited nodes + + * *removes*: the list of nodes where the node data doesn't match + all *filters*. + + * *orphans*: list of tuples (*last_good*, *node*), where + node is not in *removes* and one of the nodes that is connected + by an incoming edge is in *removes*. *Last_good* is the + closest upstream node that is not in *removes*. diff --git a/python/altgraph/doc/_build/html/_sources/index.txt b/python/altgraph/doc/_build/html/_sources/index.txt new file mode 100644 index 000000000..1e8d504ed --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/index.txt @@ -0,0 +1,41 @@ +.. altgraph documentation master file, created by + sphinx-quickstart on Tue Aug 31 11:04:49 2010. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Altgraph - A basic graph library +================================ + +altgraph is a fork of graphlib: a graph (network) package for constructing +graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with +graphviz output. + +The primary users of this package are `macholib `_ and `modulegraph `_. + +.. toctree:: + :maxdepth: 1 + + changelog + license + core + graph + objectgraph + graphalgo + graphstat + graphutil + dot + +Online Resources +---------------- + +* `Sourcecode repository on bitbucket `_ + +* `The issue tracker `_ + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/python/altgraph/doc/_build/html/_sources/license.txt b/python/altgraph/doc/_build/html/_sources/license.txt new file mode 100644 index 000000000..498e60be0 --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/license.txt @@ -0,0 +1,25 @@ +License +======= + +Copyright (c) 2004 Istvan Albert unless otherwise noted. + +Parts are copyright (c) Bob Ippolito + +Parts are copyright (c) 2010-2014 Ronald Oussoren + +MIT License +........... + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do +so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/python/altgraph/doc/_build/html/_sources/objectgraph.txt b/python/altgraph/doc/_build/html/_sources/objectgraph.txt new file mode 100644 index 000000000..87485255f --- /dev/null +++ b/python/altgraph/doc/_build/html/_sources/objectgraph.txt @@ -0,0 +1,134 @@ +:mod:`altgraph.ObjectGraph` --- Graphs of objecs with an identifier +=================================================================== + +.. module:: altgraph.ObjectGraph + :synopsis: A graph of objects that have a "graphident" attribute. + +.. class:: ObjectGraph([graph[, debug]]) + + A graph of objects that have a "graphident" attribute. The + value of this attribute is the key for the object in the + graph. + + The optional *graph* is a previously constructed + :class:`Graph `. + + The optional *debug* level controls the amount of debug output + (see :meth:`msg`, :meth:`msgin` and :meth:`msgout`). + + .. note:: the altgraph library does not generate output, the + debug attribute and message methods are present for use + by subclasses. + +.. data:: ObjectGraph.graph + + An :class:`Graph ` object that contains + the graph data. + + +.. method:: ObjectGraph.addNode(node) + + Adds a *node* to the graph. + + .. note:: re-adding a node that was previously removed + using :meth:`removeNode` will reinstate the previously + removed node. + +.. method:: ObjectGraph.createNode(self, cls, name, \*args, \**kwds) + + Creates a new node using ``cls(*args, **kwds)`` and adds that + node using :meth:`addNode`. + + Returns the newly created node. + +.. method:: ObjectGraph.removeNode(node) + + Removes a *node* from the graph when it exists. The *node* argument + is either a node object, or the graphident of a node. + +.. method:: ObjectGraph.createReferences(fromnode, tonode[, edge_data]) + + Creates a reference from *fromnode* to *tonode*. The optional + *edge_data* is associated with the edge. + + *Fromnode* and *tonode* can either be node objects or the graphident + values for nodes. + +.. method:: removeReference(fromnode, tonode) + + Removes the reference from *fromnode* to *tonode* if it exists. + +.. method:: ObjectGraph.getRawIdent(node) + + Returns the *graphident* attribute of *node*, or the graph itself + when *node* is :data:`None`. + +.. method:: getIdent(node) + + Same as :meth:`getRawIdent`, but only if the node is part + of the graph. + + *Node* can either be an actual node object or the graphident of + a node. + +.. method:: ObjectGraph.findNode(node) + + Returns a given node in the graph, or :data:`Node` when it cannot + be found. + + *Node* is either an object with a *graphident* attribute or + the *graphident* attribute itself. + +.. method:: ObjectGraph.__contains__(node) + + Returns True if *node* is a member of the graph. *Node* is either an + object with a *graphident* attribute or the *graphident* attribute itself. + +.. method:: ObjectGraph.flatten([condition[, start]]) + + Yield all nodes that are entirely reachable by *condition* + starting fromt he given *start* node or the graph root. + + .. note:: objects are only reachable from the graph root + when there is a reference from the root to the node + (either directly or through another node) + +.. method:: ObjectGraph.nodes() + + Yield all nodes in the graph. + +.. method:: ObjectGraph.get_edges(node) + + Returns two iterators that yield the nodes reaching by + outgoing and incoming edges. + +.. method:: ObjectGraph.filterStack(filters) + + Filter the ObjectGraph in-place by removing all edges to nodes that + do not match every filter in the given filter list + + Returns a tuple containing the number of: + (*nodes_visited*, *nodes_removed*, *nodes_orphaned*) + + +Debug output +------------ + +.. data:: ObjectGraph.debug + + The current debug level. + +.. method:: ObjectGraph.msg(level, text, \*args) + + Print a debug message at the current indentation level when the current + debug level is *level* or less. + +.. method:: ObjectGraph.msgin(level, text, \*args) + + Print a debug message when the current debug level is *level* or less, + and increase the indentation level. + +.. method:: ObjectGraph.msgout(level, text, \*args) + + Decrease the indentation level and print a debug message when the + current debug level is *level* or less. diff --git a/python/altgraph/doc/_build/html/_static/ajax-loader.gif b/python/altgraph/doc/_build/html/_static/ajax-loader.gif new file mode 100644 index 000000000..61faf8cab Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/ajax-loader.gif differ diff --git a/python/altgraph/doc/_build/html/_static/basic.css b/python/altgraph/doc/_build/html/_static/basic.css new file mode 100644 index 000000000..c959cf0db --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/basic.css @@ -0,0 +1,537 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 170px; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + width: 30px; +} + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- general body styles --------------------------------------------------- */ + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/_static/comment-bright.png b/python/altgraph/doc/_build/html/_static/comment-bright.png new file mode 100644 index 000000000..551517b8c Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/comment-bright.png differ diff --git a/python/altgraph/doc/_build/html/_static/comment-close.png b/python/altgraph/doc/_build/html/_static/comment-close.png new file mode 100644 index 000000000..09b54be46 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/comment-close.png differ diff --git a/python/altgraph/doc/_build/html/_static/comment.png b/python/altgraph/doc/_build/html/_static/comment.png new file mode 100644 index 000000000..92feb52b8 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/comment.png differ diff --git a/python/altgraph/doc/_build/html/_static/doctools.js b/python/altgraph/doc/_build/html/_static/doctools.js new file mode 100644 index 000000000..2036e5f5f --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/doctools.js @@ -0,0 +1,238 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this); + }); + } + } + return this.each(function() { + highlight(this); + }); +}; + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated == 'undefined') + return string; + return (typeof translated == 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated == 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + if (!body.length) { + body = $('body'); + } + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff --git a/python/altgraph/doc/_build/html/_static/down-pressed.png b/python/altgraph/doc/_build/html/_static/down-pressed.png new file mode 100644 index 000000000..6f7ad7827 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/down-pressed.png differ diff --git a/python/altgraph/doc/_build/html/_static/down.png b/python/altgraph/doc/_build/html/_static/down.png new file mode 100644 index 000000000..3003a8877 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/down.png differ diff --git a/python/altgraph/doc/_build/html/_static/file.png b/python/altgraph/doc/_build/html/_static/file.png new file mode 100644 index 000000000..d18082e39 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/file.png differ diff --git a/python/altgraph/doc/_build/html/_static/jquery.js b/python/altgraph/doc/_build/html/_static/jquery.js new file mode 100644 index 000000000..83589daa7 --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v1.8.3 jquery.com | jquery.org/license */ +(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t
a",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="
t
",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="
",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;ti.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="
",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="

",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t0)for(i=r;i=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*\s*$/g,Nt={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X
","
"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1>");try{for(;r1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]===""&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("
").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/_static/minus.png b/python/altgraph/doc/_build/html/_static/minus.png new file mode 100644 index 000000000..da1c5620d Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/minus.png differ diff --git a/python/altgraph/doc/_build/html/_static/nature.css b/python/altgraph/doc/_build/html/_static/nature.css new file mode 100644 index 000000000..f46081870 --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/nature.css @@ -0,0 +1,245 @@ +/* + * nature.css_t + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- nature theme. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: Arial, sans-serif; + font-size: 100%; + background-color: #111; + color: #555; + margin: 0; + padding: 0; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +hr { + border: 1px solid #B1B4B6; +} + +div.document { + background-color: #eee; +} + +div.body { + background-color: #ffffff; + color: #3E4349; + padding: 0 30px 30px 30px; + font-size: 0.9em; +} + +div.footer { + color: #555; + width: 100%; + padding: 13px 0; + text-align: center; + font-size: 75%; +} + +div.footer a { + color: #444; + text-decoration: underline; +} + +div.related { + background-color: #6BA81E; + line-height: 32px; + color: #fff; + text-shadow: 0px 1px 0 #444; + font-size: 0.9em; +} + +div.related a { + color: #E2F3CC; +} + +div.sphinxsidebar { + font-size: 0.75em; + line-height: 1.5em; +} + +div.sphinxsidebarwrapper{ + padding: 20px 0; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4 { + font-family: Arial, sans-serif; + color: #222; + font-size: 1.2em; + font-weight: normal; + margin: 0; + padding: 5px 10px; + background-color: #ddd; + text-shadow: 1px 1px 0 white +} + +div.sphinxsidebar h4{ + font-size: 1.1em; +} + +div.sphinxsidebar h3 a { + color: #444; +} + + +div.sphinxsidebar p { + color: #888; + padding: 5px 20px; +} + +div.sphinxsidebar p.topless { +} + +div.sphinxsidebar ul { + margin: 10px 20px; + padding: 0; + color: #000; +} + +div.sphinxsidebar a { + color: #444; +} + +div.sphinxsidebar input { + border: 1px solid #ccc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar input[type=text]{ + margin-left: 20px; +} + +/* -- body styles ----------------------------------------------------------- */ + +a { + color: #005B81; + text-decoration: none; +} + +a:hover { + color: #E32E00; + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: Arial, sans-serif; + background-color: #BED4EB; + font-weight: normal; + color: #212224; + margin: 30px 0px 10px 0px; + padding: 5px 0 5px 10px; + text-shadow: 0px 1px 0 white +} + +div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } +div.body h2 { font-size: 150%; background-color: #C8D5E3; } +div.body h3 { font-size: 120%; background-color: #D8DEE3; } +div.body h4 { font-size: 110%; background-color: #D8DEE3; } +div.body h5 { font-size: 100%; background-color: #D8DEE3; } +div.body h6 { font-size: 100%; background-color: #D8DEE3; } + +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li { + line-height: 1.5em; +} + +div.admonition p.admonition-title + p { + display: inline; +} + +div.highlight{ + background-color: white; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +div.topic { + background-color: #eee; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre { + padding: 10px; + background-color: White; + color: #222; + line-height: 1.2em; + border: 1px solid #C6C9CB; + font-size: 1.1em; + margin: 1.5em 0 1.5em 0; + -webkit-box-shadow: 1px 1px 1px #d8d8d8; + -moz-box-shadow: 1px 1px 1px #d8d8d8; +} + +tt { + background-color: #ecf0f3; + color: #222; + /* padding: 1px 2px; */ + font-size: 1.1em; + font-family: monospace; +} + +.viewcode-back { + font-family: Arial, sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/_static/plus.png b/python/altgraph/doc/_build/html/_static/plus.png new file mode 100644 index 000000000..b3cb37425 Binary files /dev/null and b/python/altgraph/doc/_build/html/_static/plus.png differ diff --git a/python/altgraph/doc/_build/html/_static/pygments.css b/python/altgraph/doc/_build/html/_static/pygments.css new file mode 100644 index 000000000..d79caa151 --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/pygments.css @@ -0,0 +1,62 @@ +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/_static/searchtools.js b/python/altgraph/doc/_build/html/_static/searchtools.js new file mode 100644 index 000000000..f5c7e5fee --- /dev/null +++ b/python/altgraph/doc/_build/html/_static/searchtools.js @@ -0,0 +1,622 @@ +/* + * searchtools.js_t + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilties for the full-text search. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + + + +/** + * Simple result scoring code. + */ +var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [filename, title, anchor, descr, score] + // and returns the new score. + /* + score: function(result) { + return result[4]; + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: {0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5}, // used to be unimportantResults + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + // query found in terms + term: 5 +}; + + +/** + * Search Module + */ +var Search = { + + _index : null, + _queued_query : null, + _pulse_status : -1, + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + $('input[name="q"]')[0].value = query; + this.performSearch(query); + } + }, + + loadIndex : function(url) { + $.ajax({type: "GET", url: url, data: null, + dataType: "script", cache: true, + complete: function(jqxhr, textstatus) { + if (textstatus != "success") { + document.getElementById("searchindexloader").src = url; + } + }}); + }, + + setIndex : function(index) { + var q; + this._index = index; + if ((q = this._queued_query) !== null) { + this._queued_query = null; + Search.query(q); + } + }, + + hasIndex : function() { + return this._index !== null; + }, + + deferQuery : function(query) { + this._queued_query = query; + }, + + stopPulse : function() { + this._pulse_status = 0; + }, + + startPulse : function() { + if (this._pulse_status >= 0) + return; + function pulse() { + var i; + Search._pulse_status = (Search._pulse_status + 1) % 4; + var dotString = ''; + for (i = 0; i < Search._pulse_status; i++) + dotString += '.'; + Search.dots.text(dotString); + if (Search._pulse_status > -1) + window.setTimeout(pulse, 500); + } + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch : function(query) { + // create the required interface elements + this.out = $('#search-results'); + this.title = $('

' + _('Searching') + '

').appendTo(this.out); + this.dots = $('').appendTo(this.title); + this.status = $('

').appendTo(this.out); + this.output = $('
'); + } + // Prettify the comment rating. + comment.pretty_rating = comment.rating + ' point' + + (comment.rating == 1 ? '' : 's'); + // Make a class (for displaying not yet moderated comments differently) + comment.css_class = comment.displayed ? '' : ' moderate'; + // Create a div for this comment. + var context = $.extend({}, opts, comment); + var div = $(renderTemplate(commentTemplate, context)); + + // If the user has voted on this comment, highlight the correct arrow. + if (comment.vote) { + var direction = (comment.vote == 1) ? 'u' : 'd'; + div.find('#' + direction + 'v' + comment.id).hide(); + div.find('#' + direction + 'u' + comment.id).show(); + } + + if (opts.moderator || comment.text != '[deleted]') { + div.find('a.reply').show(); + if (comment.proposal_diff) + div.find('#sp' + comment.id).show(); + if (opts.moderator && !comment.displayed) + div.find('#cm' + comment.id).show(); + if (opts.moderator || (opts.username == comment.username)) + div.find('#dc' + comment.id).show(); + } + return div; + } + + /** + * A simple template renderer. Placeholders such as <%id%> are replaced + * by context['id'] with items being escaped. Placeholders such as <#id#> + * are not escaped. + */ + function renderTemplate(template, context) { + var esc = $(document.createElement('div')); + + function handle(ph, escape) { + var cur = context; + $.each(ph.split('.'), function() { + cur = cur[this]; + }); + return escape ? esc.text(cur || "").html() : cur; + } + + return template.replace(/<([%#])([\w\.]*)\1>/g, function() { + return handle(arguments[2], arguments[1] == '%' ? true : false); + }); + } + + /** Flash an error message briefly. */ + function showError(message) { + $(document.createElement('div')).attr({'class': 'popup-error'}) + .append($(document.createElement('div')) + .attr({'class': 'error-message'}).text(message)) + .appendTo('body') + .fadeIn("slow") + .delay(2000) + .fadeOut("slow"); + } + + /** Add a link the user uses to open the comments popup. */ + $.fn.comment = function() { + return this.each(function() { + var id = $(this).attr('id').substring(1); + var count = COMMENT_METADATA[id]; + var title = count + ' comment' + (count == 1 ? '' : 's'); + var image = count > 0 ? opts.commentBrightImage : opts.commentImage; + var addcls = count == 0 ? ' nocomment' : ''; + $(this) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-open' + addcls, + id: 'ao' + id + }) + .append($(document.createElement('img')).attr({ + src: image, + alt: 'comment', + title: title + })) + .click(function(event) { + event.preventDefault(); + show($(this).attr('id').substring(2)); + }) + ) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-close hidden', + id: 'ah' + id + }) + .append($(document.createElement('img')).attr({ + src: opts.closeCommentImage, + alt: 'close', + title: 'close' + })) + .click(function(event) { + event.preventDefault(); + hide($(this).attr('id').substring(2)); + }) + ); + }); + }; + + var opts = { + processVoteURL: '/_process_vote', + addCommentURL: '/_add_comment', + getCommentsURL: '/_get_comments', + acceptCommentURL: '/_accept_comment', + deleteCommentURL: '/_delete_comment', + commentImage: '/static/_static/comment.png', + closeCommentImage: '/static/_static/comment-close.png', + loadingImage: '/static/_static/ajax-loader.gif', + commentBrightImage: '/static/_static/comment-bright.png', + upArrow: '/static/_static/up.png', + downArrow: '/static/_static/down.png', + upArrowPressed: '/static/_static/up-pressed.png', + downArrowPressed: '/static/_static/down-pressed.png', + voting: false, + moderator: false + }; + + if (typeof COMMENT_OPTIONS != "undefined") { + opts = jQuery.extend(opts, COMMENT_OPTIONS); + } + + var popupTemplate = '\ +
\ +

\ + Sort by:\ + best rated\ + newest\ + oldest\ +

\ +
Comments
\ +
\ + loading comments...
\ +
    \ +
    \ +

    Add a comment\ + (markup):

    \ +
    \ + reStructured text markup: *emph*, **strong**, \ + ``code``, \ + code blocks: :: and an indented block after blank line
    \ +
    \ + \ +

    \ + \ + Propose a change ▹\ + \ + \ + Propose a change ▿\ + \ +

    \ + \ + \ + \ + \ + \ +
    \ +
    '; + + var commentTemplate = '\ +
    \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ +
    \ +

    \ + <%username%>\ + <%pretty_rating%>\ + <%time.delta%>\ +

    \ +
    <#text#>
    \ +

    \ + \ + reply ▿\ + proposal ▹\ + proposal ▿\ + \ + \ +

    \ +
    \
    +<#proposal_diff#>\
    +        
    \ +
      \ +
      \ +
      \ +
      \ + '; + + var replyTemplate = '\ +
    • \ +
      \ +
      \ + \ + \ + \ + \ + \ + \ +
      \ +
    • '; + + $(document).ready(function() { + init(); + }); +})(jQuery); + +$(document).ready(function() { + // add comment anchors for all paragraphs that are commentable + $('.sphinx-has-comment').comment(); + + // highlight search words in search results + $("div.context").each(function() { + var params = $.getQueryParameters(); + var terms = (params.q) ? params.q[0].split(/\s+/) : []; + var result = $(this); + $.each(terms, function() { + result.highlightText(this.toLowerCase(), 'highlighted'); + }); + }); + + // directly open comment window if requested + var anchor = document.location.hash; + if (anchor.substring(0, 9) == '#comment-') { + $('#ao' + anchor.substring(9)).click(); + document.location.hash = '#s' + anchor.substring(9); + } +}); diff --git a/python/altgraph/doc/_build/html/changelog.html b/python/altgraph/doc/_build/html/changelog.html new file mode 100644 index 000000000..8f0f459ea --- /dev/null +++ b/python/altgraph/doc/_build/html/changelog.html @@ -0,0 +1,271 @@ + + + + + + + + Release history — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      Release history¶

      +
      +

      0.11¶

      +
        +
      • Stabilize the order of elements in dot file exports, +patch from bitbucket user ‘pombredanne’.
      • +
      • Tweak setup.py file to remove dependency on distribute (but +keep the dependency on setuptools)
      • +
      +
      +
      +

      0.10.2¶

      +
        +
      • There where no classifiers in the package metadata due to a bug +in setup.py
      • +
      +
      +
      +

      0.10.1¶

      +

      This is a bugfix release

      +

      Bug fixes:

      +
        +
      • Issue #3: The source archive contains a README.txt +while the setup file refers to ReadMe.txt.

        +

        This is caused by a misfeature in distutils, as a +workaround I’ve renamed ReadMe.txt to README.txt +in the source tree and setup file.

        +
      • +
      +
      +
      +

      0.10¶

      +

      This is a minor feature release

      +

      Features:

      +
        +
      • Do not use “2to3” to support Python 3.

        +

        As a side effect of this altgraph now supports +Python 2.6 and later, and no longer supports +earlier releases of Python.

        +
      • +
      • The order of attributes in the Dot output +is now always alphabetical.

        +

        With this change the output will be consistent +between runs and Python versions.

        +
      • +
      +
      +
      +

      0.9¶

      +

      This is a minor bugfix release

      +

      Features:

      +
        +
      • Added altgraph.ObjectGraph.ObjectGraph.nodes, a method +yielding all nodes in an object graph.
      • +
      +

      Bugfixes:

      +
        +
      • The 0.8 release didn’t work with py2app when using +python 3.x.
      • +
      +
      +
      +

      0.8¶

      +

      This is a minor feature release. The major new feature +is a extensive set of unittests, which explains almost +all other changes in this release.

      +

      Bugfixes:

      +
        +
      • Installing failed with Python 2.5 due to using a distutils +class that isn’t available in that version of Python +(issue #1 on the issue tracker)
      • +
      • altgraph.GraphStat.degree_dist now actually works
      • +
      • altgraph.Graph.add_edge(a, b, create_nodes=False) will +no longer create the edge when one of the nodes doesn’t +exist.
      • +
      • altgraph.Graph.forw_topo_sort failed for some sparse graphs.
      • +
      • altgraph.Graph.back_topo_sort was completely broken in +previous releases.
      • +
      • altgraph.Graph.forw_bfs_subgraph now actually works.
      • +
      • altgraph.Graph.back_bfs_subgraph now actually works.
      • +
      • altgraph.Graph.iterdfs now returns the correct result +when the forward argument is False.
      • +
      • altgraph.Graph.iterdata now returns the correct result +when the forward argument is False.
      • +
      +

      Features:

      +
        +
      • The altgraph.Graph constructor now accepts an argument +that contains 2- and 3-tuples instead of requireing that +all items have the same size. The (optional) argument can now +also be any iterator.
      • +
      • altgraph.Graph.Graph.add_node has no effect when you +add a hidden node.
      • +
      • The private method altgraph.Graph._bfs is no longer +present.
      • +
      • The private method altgraph.Graph._dfs is no longer +present.
      • +
      • altgraph.ObjectGraph now has a __contains__ methods, +which means you can use the in operator to check if a +node is part of a graph.
      • +
      • altgraph.GraphUtil.generate_random_graph will raise +GraphError instead of looping forever when it is +impossible to create the requested graph.
      • +
      • altgraph.Dot.edge_style raises GraphError when +one of the nodes is not present in the graph. The method +silently added the tail in the past, but without ensuring +a consistent graph state.
      • +
      • altgraph.Dot.save_img now works when the mode is +"neato".
      • +
      +
      +
      +

      0.7.2¶

      +

      This is a minor bugfix release

      +

      Bugfixes:

      +
        +
      • distutils didn’t include the documentation subtree
      • +
      +
      +
      +

      0.7.1¶

      +

      This is a minor feature release

      +

      Features:

      +
        +
      • Documentation is now generated using sphinx +and can be viewed at <http://packages.python.org/altgraph>.
      • +
      • The repository has moved to bitbucket
      • +
      • altgraph.GraphStat.avg_hops is no longer present, the function had no +implementation and no specified behaviour.
      • +
      • the module altgraph.compat is gone, which means altgraph will no +longer work with Python 2.3.
      • +
      +
      +
      +

      0.7.0¶

      +

      This is a minor feature release.

      +

      Features:

      +
        +
      • Support for Python 3

        +
      • +
      • It is now possible to run tests using ‘python setup.py test’

        +

        (The actual testsuite is still very minimal though)

        +
      • +
      +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Previous topic

      +

      Altgraph - A basic graph library

      +

      Next topic

      +

      License

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/core.html b/python/altgraph/doc/_build/html/core.html new file mode 100644 index 000000000..3d2ee1737 --- /dev/null +++ b/python/altgraph/doc/_build/html/core.html @@ -0,0 +1,130 @@ + + + + + + + + altgraph — A Python Graph Library — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph — A Python Graph Library¶

      +

      altgraph is a fork of graphlib tailored +to use newer Python 2.3+ features, including additional support used by the +py2app suite (modulegraph and macholib, specifically).

      +

      altgraph is a python based graph (network) representation and manipulation package. +It has started out as an extension to the graph_lib module +written by Nathan Denny it has been significantly optimized and expanded.

      +

      The altgraph.Graph.Graph class is loosely modeled after the LEDA +(Library of Efficient Datatypes) representation. The library +includes methods for constructing graphs, BFS and DFS traversals, +topological sort, finding connected components, shortest paths as well as a number +graph statistics functions. The library can also visualize graphs +via graphviz.

      +
      +
      +exception altgraph.GraphError¶
      +

      Exception raised when methods are called with bad values of +an inconsistent state.

      +
      + +
      + + +
      +
      +
      +
      +
      +

      Previous topic

      +

      License

      +

      Next topic

      +

      altgraph.Graph — Basic directional graphs

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/dot.html b/python/altgraph/doc/_build/html/dot.html new file mode 100644 index 000000000..87fabc077 --- /dev/null +++ b/python/altgraph/doc/_build/html/dot.html @@ -0,0 +1,332 @@ + + + + + + + + altgraph.Dot — Interface to the dot language — altgraph 0.11 documentation + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.Dot — Interface to the dot language¶

      +

      The Dot module provides a simple interface to the +file format used in the graphviz program. The module is intended to +offload the most tedious part of the process (the dot file generation) +while transparently exposing most of its features.

      +

      To display the graphs or to generate image files the graphviz +package needs to be installed on the system, moreover the dot and dotty programs must +be accesible in the program path so that they can be ran from processes spawned +within the module.

      +
      +

      Example usage¶

      +

      Here is a typical usage:

      +
      from altgraph import Graph, Dot
      +
      +# create a graph
      +edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
      +graph = Graph.Graph(edges)
      +
      +# create a dot representation of the graph
      +dot = Dot.Dot(graph)
      +
      +# display the graph
      +dot.display()
      +
      +# save the dot representation into the mydot.dot file
      +dot.save_dot(file_name='mydot.dot')
      +
      +# save dot file as gif image into the graph.gif file
      +dot.save_img(file_name='graph', file_type='gif')
      +
      +
      +
      +
      +

      Directed graph and non-directed graph¶

      +

      Dot class can use for both directed graph and non-directed graph +by passing graphtype parameter.

      +

      Example:

      +
      # create directed graph(default)
      +dot = Dot.Dot(graph, graphtype="digraph")
      +
      +# create non-directed graph
      +dot = Dot.Dot(graph, graphtype="graph")
      +
      +
      +
      +
      +

      Customizing the output¶

      +

      The graph drawing process may be customized by passing +valid dot parameters for the nodes and edges. For a list of all +parameters see the graphviz documentation.

      +

      Example:

      +
      # customizing the way the overall graph is drawn
      +dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
      +
      +# customizing node drawing
      +dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
      +dot.node_style(2, style='filled', fillcolor='red')
      +
      +# customizing edge drawing
      +dot.edge_style(1, 2, style='dotted')
      +dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
      +dot.edge_style(4, 5, arrowsize=2, style='bold')
      +
      +
      +.. note::
      +
      +   dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
      +   display all graphics styles. To verify the output save it to an image
      +   file and look at it that way.
      +
      +
      +
      +
      +

      Valid attributes¶

      +
        +
      • dot styles, passed via the Dot.style() method:

        +
        rankdir = 'LR'   (draws the graph horizontally, left to right)
        +ranksep = number (rank separation in inches)
        +
        +
        +
      • +
      • node attributes, passed via the Dot.node_style() method:

        +
        style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
        +shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
        +
        +
        +
      • +
      • edge attributes, passed via the Dot.edge_style() method:

        +
        style     = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
        +arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee'
        +weight    = number (the larger the number the closer the nodes will be)
        +
        +
        +
      • +
      • valid graphviz colors

        +
      • +
      • for more details on how to control the graph drawing process see the +graphviz reference.

        +
      • +
      +
      +
      +

      Class interface¶

      +
      +
      +class altgraph.Dot.Dot(graph[, nodes[, edgefn[, nodevisitor[, edgevisitor[, name[, dot[, dotty[, neato[, graphtype]]]]]]]]])¶
      +

      Creates a new Dot generator based on the specified +Graph. The Dot generator won’t reference +the graph once it is constructed.

      +

      If the nodes argument is present it is the list of nodes to include +in the graph, otherwise all nodes in graph are included.

      +

      If the edgefn argument is present it is a function that yields the +nodes connected to another node, this defaults to +graph.out_nbr. The constructor won’t +add edges to the dot file unless both the head and tail of the edge +are in nodes.

      +

      If the name is present it specifies the name of the graph in the resulting +dot file. The default is "G".

      +

      The functions nodevisitor and edgevisitor return the default style +for a given edge or node (both default to functions that return an empty +style).

      +

      The arguments dot, dotty and neato are used to pass the path to +the corresponding graphviz command.

      +
      + +
      +

      Updating graph attributes¶

      +
      +
      +Dot.style(**attr)¶
      +

      Sets the overall style (graph attributes) to the given attributes.

      +

      See Valid Attributes for more information about the attributes.

      +
      + +
      +
      +Dot.node_style(node, **attr)¶
      +

      Sets the style for node to the given attributes.

      +

      This method will add node to the graph when it isn’t already +present.

      +

      See Valid Attributes for more information about the attributes.

      +
      + +
      +
      +Dot.all_node_style(**attr)¶
      +

      Replaces the current style for all nodes

      +
      + +
      +
      +altgraph.Dot.edge_style(head, tail, **attr)¶
      +

      Sets the style of an edge to the given attributes. The edge will +be added to the graph when it isn’t already present, but head +and tail must both be valid nodes.

      +

      See Valid Attributes for more information about the attributes.

      +
      + +
      +
      +

      Emitting output¶

      +
      +
      +Dot.display([mode])¶
      +

      Displays the current graph via dotty.

      +

      If the mode is "neato" the dot file is processed with +the neato command before displaying.

      +

      This method won’t return until the dotty command exits.

      +
      + +
      +
      +altgraph.Dot.save_dot(filename)¶
      +

      Saves the current graph representation into the given file.

      +
      +

      Note

      +

      For backward compatibility reasons this method can also +be called without an argument, it will then write the graph +into a fixed filename (present in the attribute Graph.temp_dot).

      +

      This feature is deprecated and should not be used.

      +
      +
      + +
      +
      +altgraph.Dot.save_image(file_name[, file_type[, mode]])¶
      +

      Saves the current graph representation as an image file. The output +is written into a file whose basename is file_name and whose suffix +is file_type.

      +

      The file_type specifies the type of file to write, the default +is "gif".

      +

      If the mode is "neato" the dot file is processed with +the neato command before displaying.

      +
      +

      Note

      +

      For backward compatibility reasons this method can also +be called without an argument, it will then write the graph +with a fixed basename ("out").

      +

      This feature is deprecated and should not be used.

      +
      +
      + +
      +
      +altgraph.Dot.iterdot()¶
      +

      Yields all lines of a graphviz input file (including line endings).

      +
      + +
      +
      +altgraph.Dot.__iter__()¶
      +

      Alias for the iterdot() method.

      +
      + +
      +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Previous topic

      +

      altgraph.GraphUtil — Utility functions

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/genindex.html b/python/altgraph/doc/_build/html/genindex.html new file mode 100644 index 000000000..16977c712 --- /dev/null +++ b/python/altgraph/doc/_build/html/genindex.html @@ -0,0 +1,604 @@ + + + + + + + + + Index — altgraph 0.11 documentation + + + + + + + + + + + + + +
      +
      +
      +
      + + +

      Index

      + +
      + _ + | A + | B + | C + | D + | E + | F + | G + | H + | I + | M + | N + | O + | R + | S + | T + +
      +

      _

      +
      + + +
      + +
      __contains__() (altgraph.Graph.Graph method) +
      + +
      + +
      (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      +
      + +
      __iter__() (altgraph.Graph.Graph method) +
      + +
      + +
      (in module altgraph.Dot) +
      + +
      +
      + +

      A

      + + + +
      + +
      add_edge() (altgraph.Graph.Graph method) +
      + + +
      add_node() (altgraph.Graph.Graph method) +
      + + +
      addNode() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      all_degree() (altgraph.Graph.Graph method) +
      + + +
      all_edges() (altgraph.Graph.Graph method) +
      + + +
      all_nbrs() (altgraph.Graph.Graph method) +
      + + +
      all_node_style() (altgraph.Dot.Dot method) +
      + +
      + +
      altgraph (module) +
      + + +
      altgraph.Dot (module) +
      + + +
      altgraph.Graph (module) +
      + + +
      altgraph.GraphAlgo (module) +
      + + +
      altgraph.GraphStat (module) +
      + + +
      altgraph.GraphUtil (module) +
      + + +
      altgraph.ObjectGraph (module) +
      + +
      + +

      B

      + + + +
      + +
      back_bfs() (altgraph.Graph.Graph method) +
      + + +
      back_bfs_subgraph() (altgraph.Graph.Graph method) +
      + +
      + +
      back_topo_sort() (altgraph.Graph.Graph method) +
      + +
      + +

      C

      + + + +
      + +
      clust_coef() (altgraph.Graph.Graph method) +
      + + +
      connected() (altgraph.Graph.Graph method) +
      + +
      + +
      createNode() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      createReferences() (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      + +

      D

      + + + +
      + +
      degree_dist() (in module altgraph.GraphStat) +
      + + +
      describe_edge() (altgraph.Graph.Graph method) +
      + + +
      describe_node() (altgraph.Graph.Graph method) +
      + +
      + +
      dijkstra() (in module altgraph.GraphAlgo) +
      + + +
      display() (altgraph.Dot.Dot method) +
      + + +
      Dot (class in altgraph.Dot) +
      + +
      + +

      E

      + + + +
      + +
      edge_by_id() (altgraph.Graph.Graph method) +
      + + +
      edge_by_node() (altgraph.Graph.Graph method) +
      + + +
      edge_data() (altgraph.Graph.Graph method) +
      + +
      + +
      edge_list() (altgraph.Graph.Graph method) +
      + + +
      edge_style() (in module altgraph.Dot) +
      + +
      + +

      F

      + + + +
      + +
      filter_stack() (in module altgraph.GraphUtil) +
      + + +
      filterStack() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      findNode() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      flatten() (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      + +
      forw_bfs() (altgraph.Graph.Graph method) +
      + + +
      forw_bfs_subgraph() (altgraph.Graph.Graph method) +
      + + +
      forw_topo_sort() (altgraph.Graph.Graph method) +
      + +
      + +

      G

      + + + +
      + +
      generate_random_graph() (in module altgraph.GraphUtil) +
      + + +
      generate_scale_free_graph() (in module altgraph.GraphUtil) +
      + + +
      get_edges() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      get_hops() (altgraph.Graph.Graph method) +
      + +
      + +
      getIdent() (in module altgraph.ObjectGraph) +
      + + +
      getRawIdent() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      Graph (class in altgraph.Graph) +
      + + +
      GraphError +
      + +
      + +

      H

      + + + +
      + +
      head() (altgraph.Graph.Graph method) +
      + + +
      hidden_edge_list() (altgraph.Graph.Graph method) +
      + + +
      hidden_node_list() (altgraph.Graph.Graph method) +
      + +
      + +
      hide_edge() (altgraph.Graph.Graph method) +
      + + +
      hide_node() (altgraph.Graph.Graph method) +
      + +
      + +

      I

      + + + +
      + +
      inc_degree() (altgraph.Graph.Graph method) +
      + + +
      inc_edges() (altgraph.Graph.Graph method) +
      + + +
      inc_nbrs() (altgraph.Graph.Graph method) +
      + +
      + +
      iterdata() (altgraph.Graph.Graph method) +
      + + +
      iterdfs() (altgraph.Graph.Graph method) +
      + + +
      iterdot() (in module altgraph.Dot) +
      + +
      + +

      M

      + + + +
      + +
      msg() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      msgin() (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      + +
      msgout() (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      + +

      N

      + + + +
      + +
      node_data() (in module altgraph.Graph) +
      + + +
      node_list() (altgraph.Graph.Graph method) +
      + + +
      node_style() (altgraph.Dot.Dot method) +
      + + +
      nodes() (altgraph.ObjectGraph.ObjectGraph method) +
      + +
      + +
      number_of_edges() (altgraph.Graph.Graph method) +
      + + +
      number_of_hidden_edges() (altgraph.Graph.Graph method) +
      + + +
      number_of_hidden_nodes() (altgraph.Graph.Graph method) +
      + + +
      number_of_nodes() (altgraph.Graph.Graph method) +
      + +
      + +

      O

      + + + +
      + +
      ObjectGraph (class in altgraph.ObjectGraph) +
      + + +
      ObjectGraph.debug (in module altgraph.ObjectGraph) +
      + + +
      ObjectGraph.graph (in module altgraph.ObjectGraph) +
      + +
      + +
      out_degree() (altgraph.Graph.Graph method) +
      + + +
      out_edges() (altgraph.Graph.Graph method) +
      + + +
      out_nbrs() (altgraph.Graph.Graph method) +
      + +
      + +

      R

      + + + +
      + +
      removeNode() (altgraph.ObjectGraph.ObjectGraph method) +
      + + +
      removeReference() (in module altgraph.ObjectGraph) +
      + + +
      restore_all_edges() (altgraph.Graph.Graph method) +
      + +
      + +
      restore_all_nodes() (altgraph.Graph.Graph method) +
      + + +
      restore_edge() (altgraph.Graph.Graph method) +
      + + +
      restore_node() (altgraph.Graph.Graph method) +
      + +
      + +

      S

      + + + +
      + +
      save_dot() (in module altgraph.Dot) +
      + + +
      save_image() (in module altgraph.Dot) +
      + +
      + +
      shortest_path() (in module altgraph.GraphAlgo) +
      + + +
      style() (altgraph.Dot.Dot method) +
      + +
      + +

      T

      + + +
      + +
      tail() (altgraph.Graph.Graph method) +
      + +
      + + + + + + +
      +
      + + + + + +
      +
      +
      + + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/graph.html b/python/altgraph/doc/_build/html/graph.html new file mode 100644 index 000000000..72dd1cdef --- /dev/null +++ b/python/altgraph/doc/_build/html/graph.html @@ -0,0 +1,491 @@ + + + + + + + + altgraph.Graph — Basic directional graphs — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.Graph — Basic directional graphs¶

      +

      The module altgraph.Graph provides a class Graph that +represents a directed graph with N nodes and E edges.

      +
      +
      +class altgraph.Graph.Graph([edges])¶
      +

      Constructs a new empty Graph object. If the optional +edges parameter is supplied, updates the graph by adding the +specified edges.

      +

      All of the elements in edges should be tuples with two or three +elements. The first two elements of the tuple are the source and +destination node of the edge, the optional third element is the +edge data. The source and destination nodes are added to the graph +when the aren’t already present.

      +
      + + + +
      +

      Graph traversal¶

      +
      +
      +Graph.out_nbrs(node)¶
      +

      Return a list of all nodes connected by outgoing edges.

      +
      + +
      +
      +Graph.inc_nbrs(node)¶
      +

      Return a list of all nodes connected by incoming edges.

      +
      + +
      +
      +Graph.all_nbrs(node)¶
      +

      Returns a list of nodes connected by an incoming or outgoing edge.

      +
      + +
      +
      +Graph.forw_topo_sort()¶
      +

      Return a list of nodes where the successors (based on outgoing +edges) of any given node apear in the sequence after that node.

      +
      + +
      +
      +Graph.back_topo_sort()¶
      +

      Return a list of nodes where the successors (based on incoming +edges) of any given node apear in the sequence after that node.

      +
      + +
      +
      +Graph.forw_bfs_subgraph(start_id)¶
      +

      Return a subgraph consisting of the breadth first +reachable nodes from start_id based on their outgoing edges.

      +
      + +
      +
      +Graph.back_bfs_subgraph(start_id)¶
      +

      Return a subgraph consisting of the breadth first +reachable nodes from start_id based on their incoming edges.

      +
      + +
      +
      +Graph.iterdfs(start[, end[, forward]])¶
      +

      Yield nodes in a depth first traversal starting at the start +node.

      +

      If end is specified traversal stops when reaching that node.

      +

      If forward is True (the default) edges are traversed in forward +direction, otherwise they are traversed in reverse direction.

      +
      + +
      +
      +Graph.iterdata(start[, end[, forward[, condition]]])¶
      +

      Yield the associated data for nodes in a depth first traversal +starting at the start node. This method will not yield values for nodes +without associated data.

      +

      If end is specified traversal stops when reaching that node.

      +

      If condition is specified and the condition callable returns +False for the associated data this method will not yield the +associated data and will not follow the edges for the node.

      +

      If forward is True (the default) edges are traversed in forward +direction, otherwise they are traversed in reverse direction.

      +
      + +
      +
      +Graph.forw_bfs(start[, end])¶
      +

      Returns a list of nodes starting at start in some bread first +search order (following outgoing edges).

      +

      When end is specified iteration stops at that node.

      +
      + +
      +
      +Graph.back_bfs(start[, end])¶
      +

      Returns a list of nodes starting at start in some bread first +search order (following incoming edges).

      +

      When end is specified iteration stops at that node.

      +
      + +
      +
      +Graph.get_hops(start[, end[, forward]])¶
      +

      Computes the hop distance to all nodes centered around a specified node.

      +

      First order neighbours are at hop 1, their neigbours are at hop 2 etc. +Uses forw_bfs() or back_bfs() depending on the value of +the forward parameter.

      +

      If the distance between all neighbouring nodes is 1 the hop number +corresponds to the shortest distance between the nodes.

      +

      Typical usage:

      +
      >>> print graph.get_hops(1, 8)
      +>>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
      +# node 1 is at 0 hops
      +# node 2 is at 1 hop
      +# ...
      +# node 8 is at 5 hops
      +
      +
      +
      + +
      +
      +

      Graph statistics¶

      +
      +
      +Graph.connected()¶
      +

      Returns True iff every node in the graph can be reached from +every other node.

      +
      + +
      +
      +Graph.clust_coef(node)¶
      +

      Returns the local clustering coefficient of node.

      +

      The local cluster coefficient is the proportion of the actual number +of edges between neighbours of node and the maximum number of +edges between those nodes.

      +
      + +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Previous topic

      +

      altgraph — A Python Graph Library

      +

      Next topic

      +

      altgraph.ObjectGraph — Graphs of objecs with an identifier

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/graphalgo.html b/python/altgraph/doc/_build/html/graphalgo.html new file mode 100644 index 000000000..07800d977 --- /dev/null +++ b/python/altgraph/doc/_build/html/graphalgo.html @@ -0,0 +1,134 @@ + + + + + + + + altgraph.GraphAlgo — Graph algorithms — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.GraphAlgo — Graph algorithms¶

      +
      +
      +altgraph.GraphAlgo.dijkstra(graph, start[, end])¶
      +

      Dijkstra’s algorithm for shortest paths.

      +

      Find shortest paths from the start node to all nodes nearer +than or equal to the end node. The edge data is assumed to be the edge length.

      +
      +

      Note

      +

      Dijkstra’s algorithm is only guaranteed to work correctly when all edge lengths are positive. +This code does not verify this property for all edges (only the edges examined until the end +vertex is reached), but will correctly compute shortest paths even for some graphs with negative +edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake.

      +
      +
      + +
      +
      +altgraph.GraphAlgo.shortest_path(graph, start, end)¶
      +

      Find a single shortest path from the given start node to the given end node. +The input has the same conventions as dijkstra(). The output is a list +of the nodes in order along the shortest path.

      +
      + +
      + + +
      +
      +
      +
      +
      +

      Previous topic

      +

      altgraph.ObjectGraph — Graphs of objecs with an identifier

      +

      Next topic

      +

      altgraph.GraphStat — Functions providing various graph statistics

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/graphstat.html b/python/altgraph/doc/_build/html/graphstat.html new file mode 100644 index 000000000..49a89bf51 --- /dev/null +++ b/python/altgraph/doc/_build/html/graphstat.html @@ -0,0 +1,130 @@ + + + + + + + + altgraph.GraphStat — Functions providing various graph statistics — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.GraphStat — Functions providing various graph statistics¶

      +

      The module altgraph.GraphStat provides function that calculate +graph statistics. Currently there is only one such function, more may +be added later.

      +
      +
      +altgraph.GraphStat.degree_dist(graph[, limits[, bin_num[, mode]]])¶
      +

      Groups the number of edges per node into bin_num bins +and returns the list of those bins. Every item in the result +is a tuple with the center of the bin and the number of items +in that bin.

      +

      When the limits argument is present it must be a tuple with +the mininum and maximum number of edges that get binned (that +is, when limits is (4, 10) only nodes with between 4 +and 10 edges get counted.

      +

      The mode argument is used to count incoming ('inc') or +outgoing ('out') edges. The default is to count the outgoing +edges.

      +
      + +
      + + +
      +
      +
      +
      +
      +

      Previous topic

      +

      altgraph.GraphAlgo — Graph algorithms

      +

      Next topic

      +

      altgraph.GraphUtil — Utility functions

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/graphutil.html b/python/altgraph/doc/_build/html/graphutil.html new file mode 100644 index 000000000..774efcfdf --- /dev/null +++ b/python/altgraph/doc/_build/html/graphutil.html @@ -0,0 +1,162 @@ + + + + + + + + altgraph.GraphUtil — Utility functions — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.GraphUtil — Utility functions¶

      +

      The module altgraph.GraphUtil performs a number of more +or less useful utility functions.

      +
      +
      +altgraph.GraphUtil.generate_random_graph(node_num, edge_num[, self_loops[, multi_edges])¶
      +

      Generates and returns a Graph instance +with node_num nodes randomly connected by edge_num edges.

      +

      When self_loops is present and True there can be edges that point from +a node to itself.

      +

      When multi_edge is present and True there can be duplicate edges.

      +

      This method raises GraphError <altgraph.GraphError when +a graph with the requested configuration cannot be created.

      +
      + +
      +
      +altgraph.GraphUtil.generate_scale_free_graph(steps, growth_num[, self_loops[, multi_edges]])¶
      +

      Generates and returns a Graph instance that +will have steps*growth_n um nodes and a scale free (powerlaw) +connectivity.

      +

      Starting with a fully connected graph with growth_num nodes +at every step growth_num nodes are added to the graph and are connected +to existing nodes with a probability proportional to the degree of these +existing nodes.

      +
      +

      Warning

      +

      The current implementation is basically untested, although +code inspection seems to indicate an implementation that is consistent +with the description at +Wolfram MathWorld

      +
      +
      + +
      +
      +altgraph.GraphUtil.filter_stack(graph, head, filters)¶
      +

      Perform a depth-first oder walk of the graph starting at head and +apply all filter functions in filters on the node data of the nodes +found.

      +

      Returns (visited, removes, orphans), where

      +
        +
      • visited: the set of visited nodes
      • +
      • removes: the list of nodes where the node data doesn’t match +all filters.
      • +
      • orphans: list of tuples (last_good, node), where +node is not in removes and one of the nodes that is connected +by an incoming edge is in removes. Last_good is the +closest upstream node that is not in removes.
      • +
      +
      + +
      + + +
      +
      +
      +
      +
      +

      Previous topic

      +

      altgraph.GraphStat — Functions providing various graph statistics

      +

      Next topic

      +

      altgraph.Dot — Interface to the dot language

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/index.html b/python/altgraph/doc/_build/html/index.html new file mode 100644 index 000000000..83c8c9a25 --- /dev/null +++ b/python/altgraph/doc/_build/html/index.html @@ -0,0 +1,142 @@ + + + + + + + + Altgraph - A basic graph library — altgraph 0.11 documentation + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      Altgraph - A basic graph library¶

      +

      altgraph is a fork of graphlib: a graph (network) package for constructing +graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with +graphviz output.

      +

      The primary users of this package are macholib and modulegraph.

      + + +
      +

      Indices and tables¶

      + +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Next topic

      +

      Release history

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/license.html b/python/altgraph/doc/_build/html/license.html new file mode 100644 index 000000000..546f6e66a --- /dev/null +++ b/python/altgraph/doc/_build/html/license.html @@ -0,0 +1,136 @@ + + + + + + + + License — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      License¶

      +

      Copyright (c) 2004 Istvan Albert unless otherwise noted.

      +

      Parts are copyright (c) Bob Ippolito

      +

      Parts are copyright (c) 2010-2014 Ronald Oussoren

      +
      +

      MIT License¶

      +

      Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the “Software”), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do +so.

      +

      THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.

      +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Previous topic

      +

      Release history

      +

      Next topic

      +

      altgraph — A Python Graph Library

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/objectgraph.html b/python/altgraph/doc/_build/html/objectgraph.html new file mode 100644 index 000000000..c9879f656 --- /dev/null +++ b/python/altgraph/doc/_build/html/objectgraph.html @@ -0,0 +1,283 @@ + + + + + + + + altgraph.ObjectGraph — Graphs of objecs with an identifier — altgraph 0.11 documentation + + + + + + + + + + + + + + + +
      +
      +
      +
      + +
      +

      altgraph.ObjectGraph — Graphs of objecs with an identifier¶

      +
      +
      +class altgraph.ObjectGraph.ObjectGraph([graph[, debug]])¶
      +

      A graph of objects that have a “graphident” attribute. The +value of this attribute is the key for the object in the +graph.

      +

      The optional graph is a previously constructed +Graph.

      +

      The optional debug level controls the amount of debug output +(see msg(), msgin() and msgout()).

      +
      +

      Note

      +

      the altgraph library does not generate output, the +debug attribute and message methods are present for use +by subclasses.

      +
      +
      + +
      +
      +ObjectGraph.graph¶
      +

      An Graph object that contains +the graph data.

      +
      + +
      +
      +ObjectGraph.addNode(node)¶
      +

      Adds a node to the graph.

      +
      +

      Note

      +

      re-adding a node that was previously removed +using removeNode() will reinstate the previously +removed node.

      +
      +
      + +
      +
      +ObjectGraph.createNode(self, cls, name, *args, **kwds)¶
      +

      Creates a new node using cls(*args, **kwds) and adds that +node using addNode().

      +

      Returns the newly created node.

      +
      + +
      +
      +ObjectGraph.removeNode(node)¶
      +

      Removes a node from the graph when it exists. The node argument +is either a node object, or the graphident of a node.

      +
      + +
      +
      +ObjectGraph.createReferences(fromnode, tonode[, edge_data])¶
      +

      Creates a reference from fromnode to tonode. The optional +edge_data is associated with the edge.

      +

      Fromnode and tonode can either be node objects or the graphident +values for nodes.

      +
      + +
      +
      +altgraph.ObjectGraph.removeReference(fromnode, tonode)¶
      +

      Removes the reference from fromnode to tonode if it exists.

      +
      + +
      +
      +ObjectGraph.getRawIdent(node)¶
      +

      Returns the graphident attribute of node, or the graph itself +when node is None.

      +
      + +
      +
      +altgraph.ObjectGraph.getIdent(node)¶
      +

      Same as getRawIdent(), but only if the node is part +of the graph.

      +

      Node can either be an actual node object or the graphident of +a node.

      +
      + +
      +
      +ObjectGraph.findNode(node)¶
      +

      Returns a given node in the graph, or Node when it cannot +be found.

      +

      Node is either an object with a graphident attribute or +the graphident attribute itself.

      +
      + +
      +
      +ObjectGraph.__contains__(node)¶
      +

      Returns True if node is a member of the graph. Node is either an +object with a graphident attribute or the graphident attribute itself.

      +
      + +
      +
      +ObjectGraph.flatten([condition[, start]])¶
      +

      Yield all nodes that are entirely reachable by condition +starting fromt he given start node or the graph root.

      +
      +

      Note

      +

      objects are only reachable from the graph root +when there is a reference from the root to the node +(either directly or through another node)

      +
      +
      + +
      +
      +ObjectGraph.nodes()¶
      +

      Yield all nodes in the graph.

      +
      + +
      +
      +ObjectGraph.get_edges(node)¶
      +

      Returns two iterators that yield the nodes reaching by +outgoing and incoming edges.

      +
      + +
      +
      +ObjectGraph.filterStack(filters)¶
      +

      Filter the ObjectGraph in-place by removing all edges to nodes that +do not match every filter in the given filter list

      +

      Returns a tuple containing the number of: +(nodes_visited, nodes_removed, nodes_orphaned)

      +
      + +
      +

      Debug output¶

      +
      +
      +ObjectGraph.debug¶
      +

      The current debug level.

      +
      + +
      +
      +ObjectGraph.msg(level, text, *args)¶
      +

      Print a debug message at the current indentation level when the current +debug level is level or less.

      +
      + +
      +
      +ObjectGraph.msgin(level, text, *args)¶
      +

      Print a debug message when the current debug level is level or less, +and increase the indentation level.

      +
      + +
      +
      +ObjectGraph.msgout(level, text, *args)¶
      +

      Decrease the indentation level and print a debug message when the +current debug level is level or less.

      +
      + +
      +
      + + +
      +
      +
      +
      +
      +

      Table Of Contents

      + + +

      Previous topic

      +

      altgraph.Graph — Basic directional graphs

      +

      Next topic

      +

      altgraph.GraphAlgo — Graph algorithms

      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/objects.inv b/python/altgraph/doc/_build/html/objects.inv new file mode 100644 index 000000000..7c8a150fb Binary files /dev/null and b/python/altgraph/doc/_build/html/objects.inv differ diff --git a/python/altgraph/doc/_build/html/py-modindex.html b/python/altgraph/doc/_build/html/py-modindex.html new file mode 100644 index 000000000..dd60a9b2c --- /dev/null +++ b/python/altgraph/doc/_build/html/py-modindex.html @@ -0,0 +1,139 @@ + + + + + + + + Python Module Index — altgraph 0.11 documentation + + + + + + + + + + + + + + + + +
      +
      +
      +
      + + +

      Python Module Index

      + +
      + a +
      + + + + + + + + + + + + + + + + + + + + + + + + + +
       
      + a
      + altgraph + A directional graph for python
          + altgraph.Dot + Interface to the dot language as used by Graphviz..
          + altgraph.Graph + Basic directional graphs.
          + altgraph.GraphAlgo + Basic graphs algoritms
          + altgraph.GraphStat + Functions providing various graph statistics
          + altgraph.GraphUtil + Utility functions
          + altgraph.ObjectGraph + A graph of objects that have a "graphident" attribute.
      + + +
      +
      +
      +
      +
      + + +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/search.html b/python/altgraph/doc/_build/html/search.html new file mode 100644 index 000000000..93acf8dc5 --- /dev/null +++ b/python/altgraph/doc/_build/html/search.html @@ -0,0 +1,105 @@ + + + + + + + + Search — altgraph 0.11 documentation + + + + + + + + + + + + + + + + + + + +
      +
      +
      +
      + +

      Search

      +
      + +

      + Please activate JavaScript to enable the search + functionality. +

      +
      +

      + From here you can search these documents. Enter your search + words into the box below and click "search". Note that the search + function will automatically search for all of the words. Pages + containing fewer words won't appear in the result list. +

      +
      + + + +
      + +
      + +
      + +
      +
      +
      +
      +
      +
      +
      +
      +
      + + + + \ No newline at end of file diff --git a/python/altgraph/doc/_build/html/searchindex.js b/python/altgraph/doc/_build/html/searchindex.js new file mode 100644 index 000000000..972760abe --- /dev/null +++ b/python/altgraph/doc/_build/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({envversion:42,terms:{represent:[0,9],all:[2,3,5,6,8,9],code:[6,5],edg:[9,2],breadth:3,ellips:9,per:4,follow:3,degree_dist:[4,2],whose:9,privat:2,depend:[3,2],graph:2,graph_lib:0,tail:[3,9,2],program:9,present:[2,3,4,6,8,9],aris:1,hidden_node_list:3,merchant:1,sourc:[3,2],everi:[3,4,8,6],fals:[3,2],objec:7,upstream:6,veri:2,edgefn:9,level:8,py2app:[0,2],iter:[3,8,2],item:[4,2],describe_nod:3,round:9,self_loop:6,impli:1,crow:9,tee:9,past:2,pass:9,blue:9,index:7,hide:3,neg:5,abl:9,invok:9,current:[3,4,8,9,6],version:2,"new":[3,8,9,2],growth_num:6,oussoren:1,method:[0,9,2],restore_all_edg:3,metadata:2,subtre:2,gener:[6,8,9,2],even:5,coeffici:3,here:9,behaviour:2,same:[8,5,2],path:[0,5,9,7],along:5,modifi:[3,1],valu:[0,8,3],invis:9,search:[3,7],nodes_remov:8,larger:9,reason:9,amount:8,edge_data:[3,8],permit:1,action:1,implement:[6,2],getrawid:8,via:[0,9,3],repositori:[7,2],tweak:2,modul:[2,0,7,3,4,6,9],nodes_orphan:8,"while":[9,2],clust_coef:3,iterdot:9,filenam:9,visibl:3,instal:[9,2],txt:2,from:[1,2,3,5,6,8,9],etc:[3,7],visit:6,two:[3,8],suit:0,vee:9,call:[0,9],msg:8,type:9,until:[5,9],minor:2,more:[6,4,9],reachabl:[3,8],diamond:9,minim:2,altgraph:2,visual:0,indic:6,examin:5,particular:1,sourcecod:7,herebi:1,must:[3,4,9],none:[3,8,9],graphic:9,graphid:8,restor:3,alia:9,setup:2,work:[5,2],archiv:2,can:[0,2,3,6,8,9],purpos:1,root:8,control:[8,9],claim:1,process:9,graphstat:2,accept:2,all_nbr:3,alwai:2,end:[3,5,9],anoth:[8,9],"__iter__":[3,9],classifi:2,write:9,how:9,hop:3,instead:2,simpl:9,wolfram:6,head_id:3,after:[0,3],ronald:1,befor:[3,9],forw_bf:3,mai:[3,4,9],generate_scale_free_graph:6,associ:[3,8,1],stabil:2,third:3,bind:9,author:1,correspond:[3,9],shortest_path:5,caus:[5,2],inform:9,tailor:0,callabl:3,untest:6,order:[3,5,2],furnish:1,edge_list:3,move:2,save_imag:9,graphviz:[0,9,7],dijkstra:5,through:[3,8],add_edg:[3,2],misfeatur:2,out_nbr:[3,9],still:2,paramet:[3,9],style:9,group:4,fit:1,fix:[9,2],tort:1,restore_nod:3,requir:2,hidden:[3,2],unpredicat:3,"return":[2,3,4,6,8,9],thei:[3,9],python:2,overal:9,createrefer:8,avg_hop:2,generate_random_graph:[6,2],fillcolor:9,verifi:[5,9],all_node_styl:9,now:2,bread:3,diagon:9,document:[9,1,2],name:[8,9],didn:2,arrows:9,iterdf:[3,2],leda:0,iff:3,mode:[4,9,2],found:[6,8],nodes_visit:8,side:2,mean:2,weight:9,list:[3,4,5,6,8,9],ensur:2,connect:[0,6,9,1,3],event:1,out:[0,4,9,1],all_degre:3,network:[0,7],newli:8,publish:1,neigbour:3,graphutil:2,invi:9,print:[3,8],correct:2,red:9,file_nam:9,edgevisitor:9,earlier:2,out_degre:3,manipul:0,free:[6,1],number_of_hidden_nod:3,base:[0,9,3],org:2,shortest:[0,5,7,3],modulegraph:[0,7],indent:8,convent:5,where:[3,6,2],keep:2,filter:[6,8],length:5,place:8,isn:[9,2],imposs:2,first:[3,6],oper:[3,2],softwar:1,major:2,back_bf:3,suffix:9,directli:8,save_dot:9,forw_topo_sort:[3,2],onc:9,number:[0,3,4,6,8,9],rank:9,restrict:1,fromt:8,alreadi:[3,9],messag:8,primari:7,size:[9,2],given:[8,3,5,9],silent:2,workaround:2,gif:9,data:[8,3,5,6],licens:7,system:9,construct:[8,0,7,9,3],attach:3,circl:9,testsuit:2,graphtyp:9,option:[3,8,2],copi:1,albert:1,travers:0,specifi:[3,9,2],mathworld:6,part:[8,9,1,2],holder:1,than:5,kind:1,nathan:0,provid:[3,9],remov:[6,8,2],tree:2,horizont:9,were:3,posit:5,randomli:6,fork:[0,7],macholib:[0,7],comput:[3,5],ran:9,well:0,ani:[3,1,2],dash:9,packag:[0,7,9,2],have:[6,8,2],need:9,seem:6,element:[3,2],sell:1,issu:[7,2],moreov:9,note:[9,1],also:[0,3,9,2],which:2,tupl:[8,3,4,6,2],singl:5,compat:[9,2],offload:9,distribut:[1,2],though:2,setuptool:2,previou:2,reach:[8,3,5],discov:5,most:9,sublicens:1,describe_edg:3,"class":[0,2],charg:1,renam:2,mininum:4,later:[3,4,2],request:[6,2],doe:[8,5],left:9,dot:2,base_nod:9,text:8,restore_edg:3,permiss:1,"_bf":2,find:[0,5],nearer:5,onli:[8,4,5],get_hop:3,copyright:1,explain:2,configur:6,forev:2,should:[3,9],add_nod:[3,2],neato:[9,2],local:3,gone:2,get:4,express:1,stop:3,getid:8,filter_stack:6,acces:9,cannot:[6,8],edge_styl:[9,2],drawn:9,increas:8,liabl:1,createnod:8,hide_edg:3,yield:[3,8,9,2],patch:2,"default":[3,4,9],bad:0,statist:0,contain:[8,2],attribut:2,grapherror:[0,6,3,2],view:2,set:[6,9,2],all_edg:3,orphan:6,iterdata:[3,2],displai:9,datatyp:0,see:[8,9],result:[3,4,9,2],arg:8,fail:2,closer:9,correctli:5,label:9,written:[0,9],won:[3,9],between:[3,4,2],"import":9,msgout:8,spars:2,kei:8,reinstat:8,entir:8,addit:0,both:9,findnod:8,extens:[0,2],hashabl:3,equal:5,contract:1,get_edg:8,multi_edg:6,instanc:6,edge_num:6,node_data:3,kwd:8,point:[6,9],color:9,unittest:2,walk:6,suppli:3,mistak:5,assum:5,duplic:6,liabil:1,creat:[6,8,9,2],oder:6,due:2,been:0,compon:0,edge_by_nod:3,box:9,imag:9,argument:[3,4,8,9,2],neighbour:3,func:9,ranksep:9,those:[3,4],number_of_nod:3,save:9,look:9,solid:9,removenod:8,properti:5,save_img:[9,2],node_styl:9,back_bfs_subgraph:[3,2],calcul:4,behavior:3,exist:[8,3,6,2],loos:0,loop:2,spawn:9,almost:2,readm:2,destin:3,cluster:3,itself:[6,8],incom:[3,4,8,6],tediou:9,rankdir:9,grant:1,perform:6,alphabet:2,make:5,format:9,back_topo_sort:[3,2],node_list:3,member:8,inconsist:0,inch:9,nodevisitor:9,temp_dot:9,inc:4,complet:2,http:2,closest:6,optim:0,effect:2,rais:[6,0,5,3,2],user:[7,2],distutil:2,typic:[3,9],expand:0,noninfring:1,center:[3,4],hidden_edge_list:3,fromnod:8,edge_by_id:3,person:1,without:[3,9,1,2],command:9,thi:[7,1,2,3,5,6,8,9],model:0,self:8,tonod:8,distanc:3,identifi:7,less:[6,8],when:[0,2,3,4,5,6,8,9],obtain:1,out_edg:3,shape:9,previous:[3,8],msgin:8,expos:9,tail_id:3,had:2,except:[0,5,3],add:[3,8,9,2],appli:6,input:[5,9],successor:3,match:[6,8],bin:4,around:3,transpar:9,start_id:3,removerefer:8,objectgraph:[7,2],specif:0,deprec:9,arbitrari:3,either:8,fill:9,output:2,page:[7,9],revers:3,deal:1,some:[3,5,2],"_df":2,"export":2,flatten:8,number_of_hidden_edg:3,file_typ:9,separ:9,scale:6,"__contains__":[3,8,2],pombredann:2,shall:1,subclass:8,tracker:[7,2],exit:9,condit:[3,8],damag:1,topolog:[0,7],refer:[3,8,9,2],ippolito:1,object:[3,8,2],run:2,bold:9,inspect:6,broken:2,step:6,powerlaw:6,although:6,fulli:6,about:9,actual:[3,8,2],dotti:9,restore_all_nod:3,degre:6,outgo:[3,4,8],constructor:[9,2],denni:0,effici:0,forw_bfs_subgraph:[3,2],digraph:9,basenam:9,within:9,three:3,warranti:1,right:[9,1],empti:[3,9],inc_nbr:3,chang:2,merg:1,triangl:9,wai:9,whom:1,aren:3,addnod:8,support:[0,2],avail:2,start:[8,0,5,6,3],includ:[0,9,1,2],replac:9,forward:[3,2],"function":[0,9,2],head:[3,9,6],subgraph:3,last_good:6,bin_num:4,state:[0,2],newer:0,mydot:9,inc_edg:3,line:9,"true":[3,8,6],bug:2,count:4,attr:9,consist:[3,6,2],possibl:2,whether:1,bugfix:2,access:3,maximum:[3,4],limit:[4,1],otherwis:[3,9,1],significantli:0,sort:[0,7],featur:[0,9,2],growth_n:6,inv:9,doesn:[6,2],repres:3,decreas:8,file:[9,1,2],guarante:5,bob:1,proport:[3,6],check:2,probabl:6,arrowhead:9,unhidden:3,"2to3":2,detail:9,hide_nod:3,other:[3,1,2],node_num:6,test:2,you:2,labelangl:9,node:[9,2],draw:9,intend:9,inc_degre:3,sequenc:3,vertex:5,istvan:1,create_nod:[3,2],graphlib:[0,7],unless:[9,1],bitbucket:[7,2],sphinx:2,longer:2,filterstack:8,number_of_edg:3,descript:6,apear:3,depth:[3,6],time:3,backward:9},objtypes:{"0":"py:module","1":"py:method","2":"py:class","3":"py:function","4":"py:data","5":"py:exception"},objnames:{"0":["py","module","Python module"],"1":["py","method","Python method"],"2":["py","class","Python class"],"3":["py","function","Python function"],"4":["py","data","Python data"],"5":["py","exception","Python exception"]},filenames:["core","license","changelog","graph","graphstat","graphalgo","graphutil","index","objectgraph","dot"],titles:["altgraph — A Python Graph Library","License","Release history","altgraph.Graph — Basic directional graphs","altgraph.GraphStat — Functions providing various graph statistics","altgraph.GraphAlgo — Graph algorithms","altgraph.GraphUtil — Utility functions","Altgraph - A basic graph library","altgraph.ObjectGraph — Graphs of objecs with an identifier","altgraph.Dot — Interface to the dot language"],objects:{"":{altgraph:[0,0,0,"-"]},"altgraph.Graph":{Graph:[3,2,1,""],node_data:[3,1,1,""]},"altgraph.Dot.Dot":{node_style:[9,1,1,""],style:[9,1,1,""],all_node_style:[9,1,1,""],display:[9,1,1,""]},"altgraph.Graph.Graph":{number_of_hidden_nodes:[3,1,1,""],out_edges:[3,1,1,""],inc_degree:[3,1,1,""],forw_bfs_subgraph:[3,1,1,""],edge_list:[3,1,1,""],add_node:[3,1,1,""],hidden_node_list:[3,1,1,""],iterdata:[3,1,1,""],clust_coef:[3,1,1,""],all_edges:[3,1,1,""],describe_edge:[3,1,1,""],all_degree:[3,1,1,""],get_hops:[3,1,1,""],hide_node:[3,1,1,""],out_degree:[3,1,1,""],edge_by_node:[3,1,1,""],"__contains__":[3,1,1,""],restore_edge:[3,1,1,""],describe_node:[3,1,1,""],back_topo_sort:[3,1,1,""],node_list:[3,1,1,""],forw_topo_sort:[3,1,1,""],tail:[3,1,1,""],restore_all_nodes:[3,1,1,""],all_nbrs:[3,1,1,""],restore_all_edges:[3,1,1,""],restore_node:[3,1,1,""],head:[3,1,1,""],number_of_nodes:[3,1,1,""],iterdfs:[3,1,1,""],"__iter__":[3,1,1,""],connected:[3,1,1,""],forw_bfs:[3,1,1,""],add_edge:[3,1,1,""],number_of_hidden_edges:[3,1,1,""],inc_nbrs:[3,1,1,""],out_nbrs:[3,1,1,""],hidden_edge_list:[3,1,1,""],inc_edges:[3,1,1,""],number_of_edges:[3,1,1,""],back_bfs_subgraph:[3,1,1,""],edge_data:[3,1,1,""],hide_edge:[3,1,1,""],edge_by_id:[3,1,1,""],back_bfs:[3,1,1,""]},"altgraph.GraphAlgo":{dijkstra:[5,3,1,""],shortest_path:[5,3,1,""]},altgraph:{Graph:[3,0,0,"-"],GraphStat:[4,0,0,"-"],GraphAlgo:[5,0,0,"-"],GraphUtil:[6,0,0,"-"],GraphError:[0,5,1,""],ObjectGraph:[8,0,0,"-"],Dot:[9,0,0,"-"]},"altgraph.Dot":{save_image:[9,1,1,""],save_dot:[9,1,1,""],iterdot:[9,1,1,""],"__iter__":[9,1,1,""],edge_style:[9,1,1,""],Dot:[9,2,1,""]},"altgraph.GraphStat":{degree_dist:[4,3,1,""]},"altgraph.ObjectGraph":{removeReference:[8,1,1,""],getIdent:[8,1,1,""],ObjectGraph:[8,2,1,""]},"altgraph.GraphUtil":{generate_random_graph:[6,3,1,""],generate_scale_free_graph:[6,3,1,""],filter_stack:[6,3,1,""]},"altgraph.ObjectGraph.ObjectGraph":{findNode:[8,1,1,""],createNode:[8,1,1,""],msgin:[8,1,1,""],removeNode:[8,1,1,""],addNode:[8,1,1,""],"__contains__":[8,1,1,""],graph:[8,4,1,""],createReferences:[8,1,1,""],filterStack:[8,1,1,""],msg:[8,1,1,""],flatten:[8,1,1,""],debug:[8,4,1,""],get_edges:[8,1,1,""],getRawIdent:[8,1,1,""],nodes:[8,1,1,""],msgout:[8,1,1,""]}},titleterms:{node:3,identifi:8,edg:3,graphstat:4,direct:[3,9],indic:7,tabl:7,onlin:7,histori:2,librari:[0,7],graph:[0,7,3,4,5,8,9],licens:1,custom:9,graphutil:6,valid:9,interfac:9,languag:9,basic:[3,7],method:3,updat:9,"function":[6,4],non:9,altgraph:[0,7,3,4,5,6,8,9],resourc:7,variou:4,python:0,relat:3,usag:9,util:6,objec:8,releas:2,objectgraph:8,"class":9,travers:3,algorithm:5,provid:4,graphalgo:5,mit:1,exampl:9,statist:[3,4],debug:8,output:[8,9],attribut:9,emit:9,dot:9}}) \ No newline at end of file diff --git a/python/altgraph/doc/changelog.rst b/python/altgraph/doc/changelog.rst new file mode 100644 index 000000000..02fd412d6 --- /dev/null +++ b/python/altgraph/doc/changelog.rst @@ -0,0 +1,185 @@ +Release history +=============== + +0.12 +---- + +- Added ``ObjectGraph.edgeData`` to retrieve the edge data + from a specific edge. + +- Added ``AltGraph.update_edge_data`` and ``ObjectGraph.updateEdgeData`` + to update the data associated with a graph edge. + +0.11 +---- + +- Stabilize the order of elements in dot file exports, + patch from bitbucket user 'pombredanne'. + +- Tweak setup.py file to remove dependency on distribute (but + keep the dependency on setuptools) + + +0.10.2 +------ + +- There where no classifiers in the package metadata due to a bug + in setup.py + +0.10.1 +------ + +This is a bugfix release + +Bug fixes: + +- Issue #3: The source archive contains a README.txt + while the setup file refers to ReadMe.txt. + + This is caused by a misfeature in distutils, as a + workaround I've renamed ReadMe.txt to README.txt + in the source tree and setup file. + + +0.10 +----- + +This is a minor feature release + +Features: + +- Do not use "2to3" to support Python 3. + + As a side effect of this altgraph now supports + Python 2.6 and later, and no longer supports + earlier releases of Python. + +- The order of attributes in the Dot output + is now always alphabetical. + + With this change the output will be consistent + between runs and Python versions. + +0.9 +--- + +This is a minor bugfix release + +Features: + +- Added ``altgraph.ObjectGraph.ObjectGraph.nodes``, a method + yielding all nodes in an object graph. + +Bugfixes: + +- The 0.8 release didn't work with py2app when using + python 3.x. + + +0.8 +----- + +This is a minor feature release. The major new feature +is a extensive set of unittests, which explains almost +all other changes in this release. + +Bugfixes: + +- Installing failed with Python 2.5 due to using a distutils + class that isn't available in that version of Python + (issue #1 on the issue tracker) + +- ``altgraph.GraphStat.degree_dist`` now actually works + +- ``altgraph.Graph.add_edge(a, b, create_nodes=False)`` will + no longer create the edge when one of the nodes doesn't + exist. + +- ``altgraph.Graph.forw_topo_sort`` failed for some sparse graphs. + +- ``altgraph.Graph.back_topo_sort`` was completely broken in + previous releases. + +- ``altgraph.Graph.forw_bfs_subgraph`` now actually works. + +- ``altgraph.Graph.back_bfs_subgraph`` now actually works. + +- ``altgraph.Graph.iterdfs`` now returns the correct result + when the ``forward`` argument is ``False``. + +- ``altgraph.Graph.iterdata`` now returns the correct result + when the ``forward`` argument is ``False``. + + +Features: + +- The ``altgraph.Graph`` constructor now accepts an argument + that contains 2- and 3-tuples instead of requireing that + all items have the same size. The (optional) argument can now + also be any iterator. + +- ``altgraph.Graph.Graph.add_node`` has no effect when you + add a hidden node. + +- The private method ``altgraph.Graph._bfs`` is no longer + present. + +- The private method ``altgraph.Graph._dfs`` is no longer + present. + +- ``altgraph.ObjectGraph`` now has a ``__contains__`` methods, + which means you can use the ``in`` operator to check if a + node is part of a graph. + +- ``altgraph.GraphUtil.generate_random_graph`` will raise + ``GraphError`` instead of looping forever when it is + impossible to create the requested graph. + +- ``altgraph.Dot.edge_style`` raises ``GraphError`` when + one of the nodes is not present in the graph. The method + silently added the tail in the past, but without ensuring + a consistent graph state. + +- ``altgraph.Dot.save_img`` now works when the mode is + ``"neato"``. + +0.7.2 +----- + +This is a minor bugfix release + +Bugfixes: + +- distutils didn't include the documentation subtree + +0.7.1 +----- + +This is a minor feature release + +Features: + +- Documentation is now generated using `sphinx `_ + and can be viewed at . + +- The repository has moved to bitbucket + +- ``altgraph.GraphStat.avg_hops`` is no longer present, the function had no + implementation and no specified behaviour. + +- the module ``altgraph.compat`` is gone, which means altgraph will no + longer work with Python 2.3. + + +0.7.0 +----- + +This is a minor feature release. + +Features: + +- Support for Python 3 + +- It is now possible to run tests using 'python setup.py test' + + (The actual testsuite is still very minimal though) diff --git a/python/altgraph/doc/conf.py b/python/altgraph/doc/conf.py new file mode 100644 index 000000000..cd3fd9912 --- /dev/null +++ b/python/altgraph/doc/conf.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- +# +# altgraph documentation build configuration file, created by +# sphinx-quickstart on Tue Aug 31 11:04:49 2010. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +def get_version(): + fn = os.path.join( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))), + 'setup.cfg') + for ln in open(fn): + if ln.startswith('version'): + version = ln.split('=')[-1].strip() + return version + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.append(os.path.abspath('.')) +sys.path.insert(0, + os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +# -- General configuration ----------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.autodoc' ] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'altgraph' +copyright = u'2010-2011, Ronald Oussoren, Bob Ippolito, 2004 Istvan Albert' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = get_version() +# The full version, including alpha/beta/rc tags. +release = version + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of documents that shouldn't be included in the build. +#unused_docs = [] + +# List of directories, relative to source directory, that shouldn't be searched +# for source files. +exclude_trees = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +html_theme = 'nature' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_use_modindex = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +html_show_sourcelink = False + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = '' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'altgraphdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +# The paper size ('letter' or 'a4'). +#latex_paper_size = 'letter' + +# The font size ('10pt', '11pt' or '12pt'). +#latex_font_size = '10pt' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'altgraph.tex', u'altgraph Documentation', + u'Ronald Oussoren', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# Additional stuff for the LaTeX preamble. +#latex_preamble = '' + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_use_modindex = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'python': ('http://docs.python.org/', None) } diff --git a/python/altgraph/doc/core.rst b/python/altgraph/doc/core.rst new file mode 100644 index 000000000..8288f6a94 --- /dev/null +++ b/python/altgraph/doc/core.rst @@ -0,0 +1,26 @@ +:mod:`altgraph` --- A Python Graph Library +================================================== + +.. module:: altgraph + :synopsis: A directional graph for python + +altgraph is a fork of `graphlib `_ tailored +to use newer Python 2.3+ features, including additional support used by the +py2app suite (modulegraph and macholib, specifically). + +altgraph is a python based graph (network) representation and manipulation package. +It has started out as an extension to the `graph_lib module `_ +written by Nathan Denny it has been significantly optimized and expanded. + +The :class:`altgraph.Graph.Graph` class is loosely modeled after the `LEDA `_ +(Library of Efficient Datatypes) representation. The library +includes methods for constructing graphs, BFS and DFS traversals, +topological sort, finding connected components, shortest paths as well as a number +graph statistics functions. The library can also visualize graphs +via `graphviz `_. + + +.. exception:: GraphError + + Exception raised when methods are called with bad values of + an inconsistent state. diff --git a/python/altgraph/doc/dot.rst b/python/altgraph/doc/dot.rst new file mode 100644 index 000000000..3848c488a --- /dev/null +++ b/python/altgraph/doc/dot.rst @@ -0,0 +1,224 @@ +:mod:`altgraph.Dot` --- Interface to the dot language +===================================================== + +.. module:: altgraph.Dot + :synopsis: Interface to the dot language as used by Graphviz.. + +The :py:mod:`~altgraph.Dot` module provides a simple interface to the +file format used in the `graphviz`_ program. The module is intended to +offload the most tedious part of the process (the **dot** file generation) +while transparently exposing most of its features. + +.. _`graphviz`: `_ + +To display the graphs or to generate image files the `graphviz`_ +package needs to be installed on the system, moreover the :command:`dot` and :command:`dotty` programs must +be accesible in the program path so that they can be ran from processes spawned +within the module. + +Example usage +------------- + +Here is a typical usage:: + + from altgraph import Graph, Dot + + # create a graph + edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ] + graph = Graph.Graph(edges) + + # create a dot representation of the graph + dot = Dot.Dot(graph) + + # display the graph + dot.display() + + # save the dot representation into the mydot.dot file + dot.save_dot(file_name='mydot.dot') + + # save dot file as gif image into the graph.gif file + dot.save_img(file_name='graph', file_type='gif') + + +Directed graph and non-directed graph +------------------------------------- + +Dot class can use for both directed graph and non-directed graph +by passing *graphtype* parameter. + +Example:: + + # create directed graph(default) + dot = Dot.Dot(graph, graphtype="digraph") + + # create non-directed graph + dot = Dot.Dot(graph, graphtype="graph") + + +Customizing the output +---------------------- + +The graph drawing process may be customized by passing +valid :command:`dot` parameters for the nodes and edges. For a list of all +parameters see the `graphviz`_ documentation. + +Example:: + + # customizing the way the overall graph is drawn + dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75) + + # customizing node drawing + dot.node_style(1, label='BASE_NODE',shape='box', color='blue' ) + dot.node_style(2, style='filled', fillcolor='red') + + # customizing edge drawing + dot.edge_style(1, 2, style='dotted') + dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90') + dot.edge_style(4, 5, arrowsize=2, style='bold') + + + .. note:: + + dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to + display all graphics styles. To verify the output save it to an image + file and look at it that way. + +Valid attributes +---------------- + +- dot styles, passed via the :py:meth:`Dot.style` method:: + + rankdir = 'LR' (draws the graph horizontally, left to right) + ranksep = number (rank separation in inches) + +- node attributes, passed via the :py:meth:`Dot.node_style` method:: + + style = 'filled' | 'invisible' | 'diagonals' | 'rounded' + shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle' + +- edge attributes, passed via the :py:meth:`Dot.edge_style` method:: + + style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold' + arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none' | 'tee' | 'vee' + weight = number (the larger the number the closer the nodes will be) + +- valid `graphviz colors `_ + +- for more details on how to control the graph drawing process see the + `graphviz reference `_. + + +Class interface +--------------- + +.. class:: Dot(graph[, nodes[, edgefn[, nodevisitor[, edgevisitor[, name[, dot[, dotty[, neato[, graphtype]]]]]]]]]) + + Creates a new Dot generator based on the specified + :class:`Graph `. The Dot generator won't reference + the *graph* once it is constructed. + + If the *nodes* argument is present it is the list of nodes to include + in the graph, otherwise all nodes in *graph* are included. + + If the *edgefn* argument is present it is a function that yields the + nodes connected to another node, this defaults to + :meth:`graph.out_nbr `. The constructor won't + add edges to the dot file unless both the head and tail of the edge + are in *nodes*. + + If the *name* is present it specifies the name of the graph in the resulting + dot file. The default is ``"G"``. + + The functions *nodevisitor* and *edgevisitor* return the default style + for a given edge or node (both default to functions that return an empty + style). + + The arguments *dot*, *dotty* and *neato* are used to pass the path to + the corresponding `graphviz`_ command. + + +Updating graph attributes +......................... + +.. method:: Dot.style(\**attr) + + Sets the overall style (graph attributes) to the given attributes. + + See `Valid Attributes`_ for more information about the attributes. + +.. method:: Dot.node_style(node, \**attr) + + Sets the style for *node* to the given attributes. + + This method will add *node* to the graph when it isn't already + present. + + See `Valid Attributes`_ for more information about the attributes. + +.. method:: Dot.all_node_style(\**attr) + + Replaces the current style for all nodes + + +.. method:: edge_style(head, tail, \**attr) + + Sets the style of an edge to the given attributes. The edge will + be added to the graph when it isn't already present, but *head* + and *tail* must both be valid nodes. + + See `Valid Attributes`_ for more information about the attributes. + + + +Emitting output +............... + +.. method:: Dot.display([mode]) + + Displays the current graph via dotty. + + If the *mode* is ``"neato"`` the dot file is processed with + the neato command before displaying. + + This method won't return until the dotty command exits. + +.. method:: save_dot(filename) + + Saves the current graph representation into the given file. + + .. note:: + + For backward compatibility reasons this method can also + be called without an argument, it will then write the graph + into a fixed filename (present in the attribute :data:`Graph.temp_dot`). + + This feature is deprecated and should not be used. + + +.. method:: save_image(file_name[, file_type[, mode]]) + + Saves the current graph representation as an image file. The output + is written into a file whose basename is *file_name* and whose suffix + is *file_type*. + + The *file_type* specifies the type of file to write, the default + is ``"gif"``. + + If the *mode* is ``"neato"`` the dot file is processed with + the neato command before displaying. + + .. note:: + + For backward compatibility reasons this method can also + be called without an argument, it will then write the graph + with a fixed basename (``"out"``). + + This feature is deprecated and should not be used. + +.. method:: iterdot() + + Yields all lines of a `graphviz`_ input file (including line endings). + +.. method:: __iter__() + + Alias for the :meth:`iterdot` method. diff --git a/python/altgraph/doc/graph.rst b/python/altgraph/doc/graph.rst new file mode 100644 index 000000000..502a2186a --- /dev/null +++ b/python/altgraph/doc/graph.rst @@ -0,0 +1,305 @@ +:mod:`altgraph.Graph` --- Basic directional graphs +================================================== + +.. module:: altgraph.Graph + :synopsis: Basic directional graphs. + +The module :mod:`altgraph.Graph` provides a class :class:`Graph` that +represents a directed graph with *N* nodes and *E* edges. + +.. class:: Graph([edges]) + + Constructs a new empty :class:`Graph` object. If the optional + *edges* parameter is supplied, updates the graph by adding the + specified edges. + + All of the elements in *edges* should be tuples with two or three + elements. The first two elements of the tuple are the source and + destination node of the edge, the optional third element is the + edge data. The source and destination nodes are added to the graph + when the aren't already present. + + +Node related methods +-------------------- + +.. method:: Graph.add_node(node[, node_data]) + + Adds a new node to the graph if it is not already present. The new + node must be a hashable object. + + Arbitrary data can be attached to the node via the optional *node_data* + argument. + + .. note:: the node also won't be added to the graph when it is + present but currently hidden. + + +.. method:: Graph.hide_node(node) + + Hides a *node* from the graph. The incoming and outgoing edges of + the node will also be hidden. + + Raises :class:`altgraph.GraphError` when the node is not (visible) + node of the graph. + + +.. method:: Graph.restore_node(node) + + Restores a previously hidden *node*. The incoming and outgoing + edges of the node are also restored. + + Raises :class:`altgraph.GraphError` when the node is not a hidden + node of the graph. + +.. method:: Graph.restore_all_nodes() + + Restores all hidden nodes. + +.. method:: Graph.number_of_nodes() + + Return the number of visible nodes in the graph. + +.. method:: Graph.number_of_hidden_nodes() + + Return the number of hidden nodes in the graph. + +.. method:: Graph.node_list() + + Return a list with all visible nodes in the graph. + +.. method:: Graph.hidden_node_list() + + Return a list with all hidden nodes in the graph. + +.. method:: node_data(node) + + Return the data associated with the *node* when it was + added. + +.. method:: Graph.describe_node(node) + + Returns *node*, the node's data and the lists of outgoing + and incoming edges for the node. + + .. note:: + + the edge lists should not be modified, doing so + can result in unpredicatable behavior. + +.. method:: Graph.__contains__(node) + + Returns True iff *node* is a node in the graph. This + method is accessed through the *in* operator. + +.. method:: Graph.__iter__() + + Yield all nodes in the graph. + +.. method:: Graph.out_edges(node) + + Return the list of outgoing edges for *node* + +.. method:: Graph.inc_edges(node) + + Return the list of incoming edges for *node* + +.. method:: Graph.all_edges(node) + + Return the list of incoming and outgoing edges for *node* + +.. method:: Graph.out_degree(node) + + Return the number of outgoing edges for *node*. + +.. method:: Graph.inc_degree(node) + + Return the number of incoming edges for *node*. + +.. method:: Graph.all_degree(node) + + Return the number of edges (incoming or outgoing) for *node*. + +Edge related methods +-------------------- + +.. method:: Graph.add_edge(head_id, tail_id [, edge data [, create_nodes]]) + + Adds a directed edge from *head_id* to *tail_id*. Arbitrary data can + be added via *edge_data*. When *create_nodes* is *True* (the default), + *head_id* and *tail_id* will be added to the graph when the aren't + already present. + +.. method:: Graph.hide_edge(edge) + + Hides an edge from the graph. The edge may be unhidden at some later + time. + +.. method:: Graph.restore_edge(edge) + + Restores a previously hidden *edge*. + +.. method:: Graph.restore_all_edges() + + Restore all edges that were hidden before, except for edges + referring to hidden nodes. + +.. method:: Graph.edge_by_node(head, tail) + + Return the edge ID for an edge from *head* to *tail*, + or :data:`None` when no such edge exists. + +.. method:: Graph.edge_by_id(edge) + + Return the head and tail of the *edge* + +.. method:: Graph.edge_data(edge) + + Return the data associated with the *edge*. + +.. method:: Graph.update_edge_data(edge, data) + + Replace the edge data for *edge* by *data*. Raises + :exc:`KeyError` when the edge does not exist. + + .. versionadded:: 0.12 + +.. method:: Graph.head(edge) + + Return the head of an *edge* + +.. method:: Graph.tail(edge) + + Return the tail of an *edge* + +.. method:: Graph.describe_edge(edge) + + Return the *edge*, the associated data, its head and tail. + +.. method:: Graph.number_of_edges() + + Return the number of visible edges. + +.. method:: Graph.number_of_hidden_edges() + + Return the number of hidden edges. + +.. method:: Graph.edge_list() + + Returns a list with all visible edges in the graph. + +.. method:: Graph.hidden_edge_list() + + Returns a list with all hidden edges in the graph. + +Graph traversal +--------------- + +.. method:: Graph.out_nbrs(node) + + Return a list of all nodes connected by outgoing edges. + +.. method:: Graph.inc_nbrs(node) + + Return a list of all nodes connected by incoming edges. + +.. method:: Graph.all_nbrs(node) + + Returns a list of nodes connected by an incoming or outgoing edge. + +.. method:: Graph.forw_topo_sort() + + Return a list of nodes where the successors (based on outgoing + edges) of any given node apear in the sequence after that node. + +.. method:: Graph.back_topo_sort() + + Return a list of nodes where the successors (based on incoming + edges) of any given node apear in the sequence after that node. + +.. method:: Graph.forw_bfs_subgraph(start_id) + + Return a subgraph consisting of the breadth first + reachable nodes from *start_id* based on their outgoing edges. + + +.. method:: Graph.back_bfs_subgraph(start_id) + + Return a subgraph consisting of the breadth first + reachable nodes from *start_id* based on their incoming edges. + +.. method:: Graph.iterdfs(start[, end[, forward]]) + + Yield nodes in a depth first traversal starting at the *start* + node. + + If *end* is specified traversal stops when reaching that node. + + If forward is True (the default) edges are traversed in forward + direction, otherwise they are traversed in reverse direction. + +.. method:: Graph.iterdata(start[, end[, forward[, condition]]]) + + Yield the associated data for nodes in a depth first traversal + starting at the *start* node. This method will not yield values for nodes + without associated data. + + If *end* is specified traversal stops when reaching that node. + + If *condition* is specified and the condition callable returns + False for the associated data this method will not yield the + associated data and will not follow the edges for the node. + + If forward is True (the default) edges are traversed in forward + direction, otherwise they are traversed in reverse direction. + +.. method:: Graph.forw_bfs(start[, end]) + + Returns a list of nodes starting at *start* in some bread first + search order (following outgoing edges). + + When *end* is specified iteration stops at that node. + +.. method:: Graph.back_bfs(start[, end]) + + Returns a list of nodes starting at *start* in some bread first + search order (following incoming edges). + + When *end* is specified iteration stops at that node. + +.. method:: Graph.get_hops(start[, end[, forward]]) + + Computes the hop distance to all nodes centered around a specified node. + + First order neighbours are at hop 1, their neigbours are at hop 2 etc. + Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value of + the forward parameter. + + If the distance between all neighbouring nodes is 1 the hop number + corresponds to the shortest distance between the nodes. + + Typical usage:: + + >>> print graph.get_hops(1, 8) + >>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)] + # node 1 is at 0 hops + # node 2 is at 1 hop + # ... + # node 8 is at 5 hops + + +Graph statistics +---------------- + +.. method:: Graph.connected() + + Returns True iff every node in the graph can be reached from + every other node. + +.. method:: Graph.clust_coef(node) + + Returns the local clustering coefficient of node. + + The local cluster coefficient is the proportion of the actual number + of edges between neighbours of node and the maximum number of + edges between those nodes. diff --git a/python/altgraph/doc/graphalgo.rst b/python/altgraph/doc/graphalgo.rst new file mode 100644 index 000000000..84d492f44 --- /dev/null +++ b/python/altgraph/doc/graphalgo.rst @@ -0,0 +1,26 @@ +:mod:`altgraph.GraphAlgo` --- Graph algorithms +================================================== + +.. module:: altgraph.GraphAlgo + :synopsis: Basic graphs algoritms + +.. function:: dijkstra(graph, start[, end]) + + Dijkstra's algorithm for shortest paths. + + Find shortest paths from the start node to all nodes nearer + than or equal to the *end* node. The edge data is assumed to be the edge length. + + .. note:: + + Dijkstra's algorithm is only guaranteed to work correctly when all edge lengths are positive. + This code does not verify this property for all edges (only the edges examined until the end + vertex is reached), but will correctly compute shortest paths even for some graphs with negative + edges, and will raise an exception if it discovers that a negative edge has caused it to make a mistake. + + +.. function:: shortest_path(graph, start, end) + + Find a single shortest path from the given start node to the given end node. + The input has the same conventions as :func:`dijkstra`. The output is a list + of the nodes in order along the shortest path. diff --git a/python/altgraph/doc/graphstat.rst b/python/altgraph/doc/graphstat.rst new file mode 100644 index 000000000..0931a12dd --- /dev/null +++ b/python/altgraph/doc/graphstat.rst @@ -0,0 +1,25 @@ +:mod:`altgraph.GraphStat` --- Functions providing various graph statistics +========================================================================== + +.. module:: altgraph.GraphStat + :synopsis: Functions providing various graph statistics + +The module :mod:`altgraph.GraphStat` provides function that calculate +graph statistics. Currently there is only one such function, more may +be added later. + +.. function:: degree_dist(graph[, limits[, bin_num[, mode]]]) + + Groups the number of edges per node into *bin_num* bins + and returns the list of those bins. Every item in the result + is a tuple with the center of the bin and the number of items + in that bin. + + When the *limits* argument is present it must be a tuple with + the mininum and maximum number of edges that get binned (that + is, when *limits* is ``(4, 10)`` only nodes with between 4 + and 10 edges get counted. + + The *mode* argument is used to count incoming (``'inc'``) or + outgoing (``'out'``) edges. The default is to count the outgoing + edges. diff --git a/python/altgraph/doc/graphutil.rst b/python/altgraph/doc/graphutil.rst new file mode 100644 index 000000000..c07836df8 --- /dev/null +++ b/python/altgraph/doc/graphutil.rst @@ -0,0 +1,55 @@ +:mod:`altgraph.GraphUtil` --- Utility functions +================================================ + +.. module:: altgraph.GraphUtil + :synopsis: Utility functions + +The module :mod:`altgraph.GraphUtil` performs a number of more +or less useful utility functions. + +.. function:: generate_random_graph(node_num, edge_num[, self_loops[, multi_edges]) + + Generates and returns a :class:`Graph ` instance + with *node_num* nodes randomly connected by *edge_num* edges. + + When *self_loops* is present and True there can be edges that point from + a node to itself. + + When *multi_edge* is present and True there can be duplicate edges. + + This method raises :class:`GraphError `_ + +.. function:: filter_stack(graph, head, filters) + + Perform a depth-first oder walk of the graph starting at *head* and + apply all filter functions in *filters* on the node data of the nodes + found. + + Returns (*visited*, *removes*, *orphans*), where + + * *visited*: the set of visited nodes + + * *removes*: the list of nodes where the node data doesn't match + all *filters*. + + * *orphans*: list of tuples (*last_good*, *node*), where + node is not in *removes* and one of the nodes that is connected + by an incoming edge is in *removes*. *Last_good* is the + closest upstream node that is not in *removes*. diff --git a/python/altgraph/doc/index.rst b/python/altgraph/doc/index.rst new file mode 100644 index 000000000..1e8d504ed --- /dev/null +++ b/python/altgraph/doc/index.rst @@ -0,0 +1,41 @@ +.. altgraph documentation master file, created by + sphinx-quickstart on Tue Aug 31 11:04:49 2010. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Altgraph - A basic graph library +================================ + +altgraph is a fork of graphlib: a graph (network) package for constructing +graphs, BFS and DFS traversals, topological sort, shortest paths, etc. with +graphviz output. + +The primary users of this package are `macholib `_ and `modulegraph `_. + +.. toctree:: + :maxdepth: 1 + + changelog + license + core + graph + objectgraph + graphalgo + graphstat + graphutil + dot + +Online Resources +---------------- + +* `Sourcecode repository on bitbucket `_ + +* `The issue tracker `_ + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/python/altgraph/doc/license.rst b/python/altgraph/doc/license.rst new file mode 100644 index 000000000..498e60be0 --- /dev/null +++ b/python/altgraph/doc/license.rst @@ -0,0 +1,25 @@ +License +======= + +Copyright (c) 2004 Istvan Albert unless otherwise noted. + +Parts are copyright (c) Bob Ippolito + +Parts are copyright (c) 2010-2014 Ronald Oussoren + +MIT License +........... + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do +so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/python/altgraph/doc/objectgraph.rst b/python/altgraph/doc/objectgraph.rst new file mode 100644 index 000000000..e3df396b1 --- /dev/null +++ b/python/altgraph/doc/objectgraph.rst @@ -0,0 +1,146 @@ +:mod:`altgraph.ObjectGraph` --- Graphs of objecs with an identifier +=================================================================== + +.. module:: altgraph.ObjectGraph + :synopsis: A graph of objects that have a "graphident" attribute. + +.. class:: ObjectGraph([graph[, debug]]) + + A graph of objects that have a "graphident" attribute. The + value of this attribute is the key for the object in the + graph. + + The optional *graph* is a previously constructed + :class:`Graph `. + + The optional *debug* level controls the amount of debug output + (see :meth:`msg`, :meth:`msgin` and :meth:`msgout`). + + .. note:: the altgraph library does not generate output, the + debug attribute and message methods are present for use + by subclasses. + +.. data:: ObjectGraph.graph + + An :class:`Graph ` object that contains + the graph data. + + +.. method:: ObjectGraph.addNode(node) + + Adds a *node* to the graph. + + .. note:: re-adding a node that was previously removed + using :meth:`removeNode` will reinstate the previously + removed node. + +.. method:: ObjectGraph.createNode(self, cls, name, \*args, \**kwds) + + Creates a new node using ``cls(*args, **kwds)`` and adds that + node using :meth:`addNode`. + + Returns the newly created node. + +.. method:: ObjectGraph.removeNode(node) + + Removes a *node* from the graph when it exists. The *node* argument + is either a node object, or the graphident of a node. + +.. method:: ObjectGraph.createReferences(fromnode, tonode[, edge_data]) + + Creates a reference from *fromnode* to *tonode*. The optional + *edge_data* is associated with the edge. + + *Fromnode* and *tonode* can either be node objects or the graphident + values for nodes. + +.. method:: removeReference(fromnode, tonode) + + Removes the reference from *fromnode* to *tonode* if it exists. + +.. method:: ObjectGraph.getRawIdent(node) + + Returns the *graphident* attribute of *node*, or the graph itself + when *node* is :data:`None`. + +.. method:: getIdent(node) + + Same as :meth:`getRawIdent`, but only if the node is part + of the graph. + + *Node* can either be an actual node object or the graphident of + a node. + +.. method:: ObjectGraph.findNode(node) + + Returns a given node in the graph, or :data:`Node` when it cannot + be found. + + *Node* is either an object with a *graphident* attribute or + the *graphident* attribute itself. + +.. method:: ObjectGraph.__contains__(node) + + Returns True if *node* is a member of the graph. *Node* is either an + object with a *graphident* attribute or the *graphident* attribute itself. + +.. method:: ObjectGraph.flatten([condition[, start]]) + + Yield all nodes that are entirely reachable by *condition* + starting fromt he given *start* node or the graph root. + + .. note:: objects are only reachable from the graph root + when there is a reference from the root to the node + (either directly or through another node) + +.. method:: ObjectGraph.nodes() + + Yield all nodes in the graph. + +.. method:: ObjectGraph.get_edges(node) + + Returns two iterators that yield the nodes reaching by + outgoing and incoming edges. + +.. method:: ObjectGraph.filterStack(filters) + + Filter the ObjectGraph in-place by removing all edges to nodes that + do not match every filter in the given filter list + + Returns a tuple containing the number of: + (*nodes_visited*, *nodes_removed*, *nodes_orphaned*) + +.. method:: ObjectGraph.edgeData(fromNode, toNode): + Return the edge data associated with the edge from *fromNode* + to *toNode*. Raises :exc:`KeyError` when no such edge exists. + + .. versionadded: 0.12 + +.. method:: ObjectGraph.updateEdgeData(fromNode, toNode, edgeData) + + Replace the data associated with the edge from *fromNode* to + *toNode* by *edgeData*. + + Raises :exc:`KeyError` when the edge does not exist. + +Debug output +------------ + +.. data:: ObjectGraph.debug + + The current debug level. + +.. method:: ObjectGraph.msg(level, text, \*args) + + Print a debug message at the current indentation level when the current + debug level is *level* or less. + +.. method:: ObjectGraph.msgin(level, text, \*args) + + Print a debug message when the current debug level is *level* or less, + and increase the indentation level. + +.. method:: ObjectGraph.msgout(level, text, \*args) + + Decrease the indentation level and print a debug message when the + current debug level is *level* or less. diff --git a/python/altgraph/setup.cfg b/python/altgraph/setup.cfg new file mode 100644 index 000000000..9c6880e3c --- /dev/null +++ b/python/altgraph/setup.cfg @@ -0,0 +1,36 @@ +[metadata] +name = altgraph +version = 0.12 +description = Python graph (network) package +long_description_file = + README.txt + doc/changelog.rst +author = Ronald Oussoren +author_email = ronaldoussoren@mac.com +maintainer = Ronald Oussoren +maintainer_email = ronaldoussoren@mac.com +url = http://packages.python.org/altgraph +download_url = http://pypi.python.org/pypi/altgraph +license = MIT +classifiers = + Intended Audience :: Developers + License :: OSI Approved :: MIT License + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.3 + Programming Language :: Python :: 3.4 + Topic :: Software Development :: Libraries :: Python Modules + Topic :: Scientific/Engineering :: Mathematics + Topic :: Scientific/Engineering :: Visualization +keywords = graph +platforms = any +packages = altgraph +zip-safe = 1 + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/python/altgraph/setup.py b/python/altgraph/setup.py new file mode 100644 index 000000000..a1a4cb6eb --- /dev/null +++ b/python/altgraph/setup.py @@ -0,0 +1,867 @@ +""" +Shared setup file for simple python packages. Uses a setup.cfg that +is the same as the distutils2 project, unless noted otherwise. + +It exists for two reasons: +1) This makes it easier to reuse setup.py code between my own + projects + +2) Easier migration to distutils2 when that catches on. + +Additional functionality: + +* Section metadata: + requires-test: Same as 'tests_require' option for setuptools. + +""" + +import sys +import os +import re +import platform +from fnmatch import fnmatch +import os +import sys +import time +import tempfile +import tarfile +try: + import urllib.request as urllib +except ImportError: + import urllib +from distutils import log +try: + from hashlib import md5 + +except ImportError: + from md5 import md5 + +if sys.version_info[0] == 2: + from ConfigParser import RawConfigParser, NoOptionError, NoSectionError +else: + from configparser import RawConfigParser, NoOptionError, NoSectionError + +ROOTDIR = os.path.dirname(os.path.abspath(__file__)) + + +# +# +# +# Parsing the setup.cfg and converting it to something that can be +# used by setuptools.setup() +# +# +# + +def eval_marker(value): + """ + Evaluate an distutils2 environment marker. + + This code is unsafe when used with hostile setup.cfg files, + but that's not a problem for our own files. + """ + value = value.strip() + + class M: + def __init__(self, **kwds): + for k, v in kwds.items(): + setattr(self, k, v) + + variables = { + 'python_version': '%d.%d'%(sys.version_info[0], sys.version_info[1]), + 'python_full_version': sys.version.split()[0], + 'os': M( + name=os.name, + ), + 'sys': M( + platform=sys.platform, + ), + 'platform': M( + version=platform.version(), + machine=platform.machine(), + ), + } + + return bool(eval(value, variables, variables)) + + + return True + +def _opt_value(cfg, into, section, key, transform = None): + try: + v = cfg.get(section, key) + if transform != _as_lines and ';' in v: + v, marker = v.rsplit(';', 1) + if not eval_marker(marker): + return + + v = v.strip() + + if v: + if transform: + into[key] = transform(v.strip()) + else: + into[key] = v.strip() + + except (NoOptionError, NoSectionError): + pass + +def _as_bool(value): + if value.lower() in ('y', 'yes', 'on'): + return True + elif value.lower() in ('n', 'no', 'off'): + return False + elif value.isdigit(): + return bool(int(value)) + else: + raise ValueError(value) + +def _as_list(value): + return value.split() + +def _as_lines(value): + result = [] + for v in value.splitlines(): + if ';' in v: + v, marker = v.rsplit(';', 1) + if not eval_marker(marker): + continue + + v = v.strip() + if v: + result.append(v) + else: + result.append(v) + return result + +def _map_requirement(value): + m = re.search(r'(\S+)\s*(?:\((.*)\))?', value) + name = m.group(1) + version = m.group(2) + + if version is None: + return name + + else: + mapped = [] + for v in version.split(','): + v = v.strip() + if v[0].isdigit(): + # Checks for a specific version prefix + m = v.rsplit('.', 1) + mapped.append('>=%s,<%s.%s'%( + v, m[0], int(m[1])+1)) + + else: + mapped.append(v) + return '%s %s'%(name, ','.join(mapped),) + +def _as_requires(value): + requires = [] + for req in value.splitlines(): + if ';' in req: + req, marker = v.rsplit(';', 1) + if not eval_marker(marker): + continue + req = req.strip() + + if not req: + continue + requires.append(_map_requirement(req)) + return requires + +def parse_setup_cfg(): + cfg = RawConfigParser() + r = cfg.read([os.path.join(ROOTDIR, 'setup.cfg')]) + if len(r) != 1: + print("Cannot read 'setup.cfg'") + sys.exit(1) + + metadata = dict( + name = cfg.get('metadata', 'name'), + version = cfg.get('metadata', 'version'), + description = cfg.get('metadata', 'description'), + ) + + _opt_value(cfg, metadata, 'metadata', 'license') + _opt_value(cfg, metadata, 'metadata', 'maintainer') + _opt_value(cfg, metadata, 'metadata', 'maintainer_email') + _opt_value(cfg, metadata, 'metadata', 'author') + _opt_value(cfg, metadata, 'metadata', 'author_email') + _opt_value(cfg, metadata, 'metadata', 'url') + _opt_value(cfg, metadata, 'metadata', 'download_url') + _opt_value(cfg, metadata, 'metadata', 'classifiers', _as_lines) + _opt_value(cfg, metadata, 'metadata', 'platforms', _as_list) + _opt_value(cfg, metadata, 'metadata', 'packages', _as_list) + _opt_value(cfg, metadata, 'metadata', 'keywords', _as_list) + + try: + v = cfg.get('metadata', 'requires-dist') + + except (NoOptionError, NoSectionError): + pass + + else: + requires = _as_requires(v) + if requires: + metadata['install_requires'] = requires + + try: + v = cfg.get('metadata', 'requires-test') + + except (NoOptionError, NoSectionError): + pass + + else: + requires = _as_requires(v) + if requires: + metadata['tests_require'] = requires + + + try: + v = cfg.get('metadata', 'long_description_file') + except (NoOptionError, NoSectionError): + pass + + else: + parts = [] + for nm in v.split(): + fp = open(nm, 'rU') + parts.append(fp.read()) + fp.close() + + metadata['long_description'] = '\n\n'.join(parts) + + + try: + v = cfg.get('metadata', 'zip-safe') + except (NoOptionError, NoSectionError): + pass + + else: + metadata['zip_safe'] = _as_bool(v) + + try: + v = cfg.get('metadata', 'console_scripts') + except (NoOptionError, NoSectionError): + pass + + else: + if 'entry_points' not in metadata: + metadata['entry_points'] = {} + + metadata['entry_points']['console_scripts'] = v.splitlines() + + if sys.version_info[:2] <= (2,6): + try: + metadata['tests_require'] += ", unittest2" + except KeyError: + metadata['tests_require'] = "unittest2" + + return metadata + + +# +# +# +# Bootstrapping setuptools/distribute, based on +# a heavily modified version of distribute_setup.py +# +# +# + + +SETUPTOOLS_PACKAGE='setuptools' + + +try: + import subprocess + + def _python_cmd(*args): + args = (sys.executable,) + args + return subprocess.call(args) == 0 + +except ImportError: + def _python_cmd(*args): + args = (sys.executable,) + args + new_args = [] + for a in args: + new_args.append(a.replace("'", "'\"'\"'")) + os.system(' '.join(new_args)) == 0 + + +try: + import json + + def get_pypi_src_download(package): + url = 'https://pypi.python.org/pypi/%s/json'%(package,) + fp = urllib.urlopen(url) + try: + try: + data = fp.read() + + finally: + fp.close() + except urllib.error: + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + pkgdata = json.loads(data.decode('utf-8')) + if 'urls' not in pkgdata: + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + for info in pkgdata['urls']: + if info['packagetype'] == 'sdist' and info['url'].endswith('tar.gz'): + return (info.get('md5_digest'), info['url']) + + raise RuntimeError("Cannot determine downlink link for %s"%(package,)) + +except ImportError: + # Python 2.5 compatibility, no JSON in stdlib but luckily JSON syntax is + # simular enough to Python's syntax to be able to abuse the Python compiler + + import _ast as ast + + def get_pypi_src_download(package): + url = 'https://pypi.python.org/pypi/%s/json'%(package,) + fp = urllib.urlopen(url) + try: + try: + data = fp.read() + + finally: + fp.close() + except urllib.error: + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + + a = compile(data, '-', 'eval', ast.PyCF_ONLY_AST) + if not isinstance(a, ast.Expression): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + a = a.body + if not isinstance(a, ast.Dict): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + for k, v in zip(a.keys, a.values): + if not isinstance(k, ast.Str): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + k = k.s + if k == 'urls': + a = v + break + else: + raise RuntimeError("PyPI JSON for %s doesn't contain URLs section"%(package,)) + + if not isinstance(a, ast.List): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + for info in v.elts: + if not isinstance(info, ast.Dict): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + url = None + packagetype = None + chksum = None + + for k, v in zip(info.keys, info.values): + if not isinstance(k, ast.Str): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + + if k.s == 'url': + if not isinstance(v, ast.Str): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + url = v.s + + elif k.s == 'packagetype': + if not isinstance(v, ast.Str): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + packagetype = v.s + + elif k.s == 'md5_digest': + if not isinstance(v, ast.Str): + raise RuntimeError("Cannot determine download link for %s"%(package,)) + chksum = v.s + + if url is not None and packagetype == 'sdist' and url.endswith('.tar.gz'): + return (chksum, url) + + raise RuntimeError("Cannot determine download link for %s"%(package,)) + +def _build_egg(egg, tarball, to_dir): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # building an egg + log.warn('Building a %s egg in %s', egg, to_dir) + _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) + + finally: + os.chdir(old_wd) + # returning the result + log.warn(egg) + if not os.path.exists(egg): + raise IOError('Could not build the egg.') + + +def _do_download(to_dir, packagename=SETUPTOOLS_PACKAGE): + tarball = download_setuptools(packagename, to_dir) + version = tarball.split('-')[-1][:-7] + egg = os.path.join(to_dir, '%s-%s-py%d.%d.egg' + % (packagename, version, sys.version_info[0], sys.version_info[1])) + if not os.path.exists(egg): + _build_egg(egg, tarball, to_dir) + sys.path.insert(0, egg) + import setuptools + setuptools.bootstrap_install_from = egg + + +def use_setuptools(): + # making sure we use the absolute path + return _do_download(os.path.abspath(os.curdir)) + +def download_setuptools(packagename, to_dir): + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + try: + from urllib.request import urlopen + except ImportError: + from urllib2 import urlopen + + chksum, url = get_pypi_src_download(packagename) + tgz_name = os.path.basename(url) + saveto = os.path.join(to_dir, tgz_name) + + src = dst = None + if not os.path.exists(saveto): # Avoid repeated downloads + try: + log.warn("Downloading %s", url) + src = urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = src.read() + + if chksum is not None: + data_sum = md5(data).hexdigest() + if data_sum != chksum: + raise RuntimeError("Downloading %s failed: corrupt checksum"%(url,)) + + + dst = open(saveto, "wb") + dst.write(data) + finally: + if src: + src.close() + if dst: + dst.close() + return os.path.realpath(saveto) + + + +def _extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + import copy + import operator + from tarfile import ExtractError + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 448 # decimal for oct 0700 + self.extract(tarinfo, path) + + # Reverse sort directories. + if sys.version_info < (2, 4): + def sorter(dir1, dir2): + return cmp(dir1.name, dir2.name) + directories.sort(sorter) + directories.reverse() + else: + directories.sort(key=operator.attrgetter('name'), reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError: + e = sys.exc_info()[1] + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + +# +# +# +# Definitions of custom commands +# +# +# + +try: + import setuptools + +except ImportError: + use_setuptools() + +from setuptools import setup + +try: + from distutils.core import PyPIRCCommand +except ImportError: + PyPIRCCommand = None # Ancient python version + +from distutils.core import Command +from distutils.errors import DistutilsError +from distutils import log + +if PyPIRCCommand is None: + class upload_docs (Command): + description = "upload sphinx documentation" + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + raise DistutilsError("not supported on this version of python") + +else: + class upload_docs (PyPIRCCommand): + description = "upload sphinx documentation" + user_options = PyPIRCCommand.user_options + + def initialize_options(self): + PyPIRCCommand.initialize_options(self) + self.username = '' + self.password = '' + + + def finalize_options(self): + PyPIRCCommand.finalize_options(self) + config = self._read_pypirc() + if config != {}: + self.username = config['username'] + self.password = config['password'] + + + def run(self): + import subprocess + import shutil + import zipfile + import os + import urllib + import StringIO + from base64 import standard_b64encode + import httplib + import urlparse + + # Extract the package name from distutils metadata + meta = self.distribution.metadata + name = meta.get_name() + + # Run sphinx + if os.path.exists('doc/_build'): + shutil.rmtree('doc/_build') + os.mkdir('doc/_build') + + p = subprocess.Popen(['make', 'html'], + cwd='doc') + exit = p.wait() + if exit != 0: + raise DistutilsError("sphinx-build failed") + + # Collect sphinx output + if not os.path.exists('dist'): + os.mkdir('dist') + zf = zipfile.ZipFile('dist/%s-docs.zip'%(name,), 'w', + compression=zipfile.ZIP_DEFLATED) + + for toplevel, dirs, files in os.walk('doc/_build/html'): + for fn in files: + fullname = os.path.join(toplevel, fn) + relname = os.path.relpath(fullname, 'doc/_build/html') + + print ("%s -> %s"%(fullname, relname)) + + zf.write(fullname, relname) + + zf.close() + + # Upload the results, this code is based on the distutils + # 'upload' command. + content = open('dist/%s-docs.zip'%(name,), 'rb').read() + + data = { + ':action': 'doc_upload', + 'name': name, + 'content': ('%s-docs.zip'%(name,), content), + } + auth = "Basic " + standard_b64encode(self.username + ":" + + self.password) + + + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = '\n--' + boundary + end_boundary = sep_boundary + '--' + body = StringIO.StringIO() + for key, value in data.items(): + if not isinstance(value, list): + value = [value] + + for value in value: + if isinstance(value, tuple): + fn = ';filename="%s"'%(value[0]) + value = value[1] + else: + fn = '' + + body.write(sep_boundary) + body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write(fn) + body.write("\n\n") + body.write(value) + + body.write(end_boundary) + body.write('\n') + body = body.getvalue() + + self.announce("Uploading documentation to %s"%(self.repository,), log.INFO) + + schema, netloc, url, params, query, fragments = \ + urlparse.urlparse(self.repository) + + + if schema == 'http': + http = httplib.HTTPConnection(netloc) + elif schema == 'https': + http = httplib.HTTPSConnection(netloc) + else: + raise AssertionError("unsupported schema "+schema) + + data = '' + loglevel = log.INFO + try: + http.connect() + http.putrequest("POST", url) + http.putheader('Content-type', + 'multipart/form-data; boundary=%s'%boundary) + http.putheader('Content-length', str(len(body))) + http.putheader('Authorization', auth) + http.endheaders() + http.send(body) + except socket.error: + e = socket.exc_info()[1] + self.announce(str(e), log.ERROR) + return + + r = http.getresponse() + if r.status in (200, 301): + self.announce('Upload succeeded (%s): %s' % (r.status, r.reason), + log.INFO) + else: + self.announce('Upload failed (%s): %s' % (r.status, r.reason), + log.ERROR) + + print ('-'*75) + print (r.read()) + print ('-'*75) + + +def recursiveGlob(root, pathPattern): + """ + Recursively look for files matching 'pathPattern'. Return a list + of matching files/directories. + """ + result = [] + + for rootpath, dirnames, filenames in os.walk(root): + for fn in filenames: + if fnmatch(fn, pathPattern): + result.append(os.path.join(rootpath, fn)) + return result + + +def importExternalTestCases(unittest, + pathPattern="test_*.py", root=".", package=None): + """ + Import all unittests in the PyObjC tree starting at 'root' + """ + + testFiles = recursiveGlob(root, pathPattern) + testModules = map(lambda x:x[len(root)+1:-3].replace('/', '.'), testFiles) + if package is not None: + testModules = [(package + '.' + m) for m in testModules] + + suites = [] + + for modName in testModules: + try: + module = __import__(modName) + except ImportError: + print("SKIP %s: %s"%(modName, sys.exc_info()[1])) + continue + + if '.' in modName: + for elem in modName.split('.')[1:]: + module = getattr(module, elem) + + s = unittest.defaultTestLoader.loadTestsFromModule(module) + suites.append(s) + + return unittest.TestSuite(suites) + + + +class test (Command): + description = "run test suite" + user_options = [ + ('verbosity=', None, "print what tests are run"), + ] + + def initialize_options(self): + self.verbosity='1' + + def finalize_options(self): + if isinstance(self.verbosity, str): + self.verbosity = int(self.verbosity) + + + def cleanup_environment(self): + ei_cmd = self.get_finalized_command('egg_info') + egg_name = ei_cmd.egg_name.replace('-', '_') + + to_remove = [] + for dirname in sys.path: + bn = os.path.basename(dirname) + if bn.startswith(egg_name + "-"): + to_remove.append(dirname) + + for dirname in to_remove: + log.info("removing installed %r from sys.path before testing"%( + dirname,)) + sys.path.remove(dirname) + + def add_project_to_sys_path(self): + from pkg_resources import normalize_path, add_activation_listener + from pkg_resources import working_set, require + + self.reinitialize_command('egg_info') + self.run_command('egg_info') + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + + # Check if this distribution is already on sys.path + # and remove that version, this ensures that the right + # copy of the package gets tested. + + self.__old_path = sys.path[:] + self.__old_modules = sys.modules.copy() + + + ei_cmd = self.get_finalized_command('egg_info') + sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + sys.path.insert(1, os.path.dirname(__file__)) + + # Strip the namespace packages defined in this distribution + # from sys.modules, needed to reset the search path for + # those modules. + + nspkgs = getattr(self.distribution, 'namespace_packages') + if nspkgs is not None: + for nm in nspkgs: + del sys.modules[nm] + + # Reset pkg_resources state: + add_activation_listener(lambda dist: dist.activate()) + working_set.__init__() + require('%s==%s'%(ei_cmd.egg_name, ei_cmd.egg_version)) + + def remove_from_sys_path(self): + from pkg_resources import working_set + sys.path[:] = self.__old_path + sys.modules.clear() + sys.modules.update(self.__old_modules) + working_set.__init__() + + + def run(self): + import unittest + + # Ensure that build directory is on sys.path (py3k) + + self.cleanup_environment() + self.add_project_to_sys_path() + + try: + meta = self.distribution.metadata + name = meta.get_name() + test_pkg = name + "_tests" + suite = importExternalTestCases(unittest, + "test_*.py", test_pkg, test_pkg) + + runner = unittest.TextTestRunner(verbosity=self.verbosity) + result = runner.run(suite) + + # Print out summary. This is a structured format that + # should make it easy to use this information in scripts. + summary = dict( + count=result.testsRun, + fails=len(result.failures), + errors=len(result.errors), + xfails=len(getattr(result, 'expectedFailures', [])), + xpass=len(getattr(result, 'expectedSuccesses', [])), + skip=len(getattr(result, 'skipped', [])), + ) + print("SUMMARY: %s"%(summary,)) + + finally: + self.remove_from_sys_path() + +# +# +# +# And finally run the setuptools main entry point. +# +# +# + +metadata = parse_setup_cfg() + +setup( + cmdclass=dict( + upload_docs=upload_docs, + test=test, + ), + **metadata +) diff --git a/python/bitstring/PKG-INFO b/python/bitstring/PKG-INFO new file mode 100644 index 000000000..1036c45d7 --- /dev/null +++ b/python/bitstring/PKG-INFO @@ -0,0 +1,122 @@ +Metadata-Version: 1.1 +Name: bitstring +Version: 3.1.3 +Summary: Simple construction, analysis and modification of binary data. +Home-page: http://python-bitstring.googlecode.com +Author: Scott Griffiths +Author-email: scott@griffiths.name +License: The MIT License: http://www.opensource.org/licenses/mit-license.php +Download-URL: http://python-bitstring.googlecode.com +Description: ================ + bitstring module + ================ + + **bitstring** is a pure Python module designed to help make + the creation and analysis of binary data as simple and natural as possible. + + Bitstrings can be constructed from integers (big and little endian), hex, + octal, binary, strings or files. They can be sliced, joined, reversed, + inserted into, overwritten, etc. with simple functions or slice notation. + They can also be read from, searched and replaced, and navigated in, + similar to a file or stream. + + bitstring is open source software, and has been released under the MIT + licence. + + This version supports Python 2.6 and later (including Python 3). + For Python 2.4 and 2.5 you should instead download version 1.0. + + Documentation + ------------- + The manual for the bitstring module is available here + . It contains a walk-through of all + the features and a complete reference section. + + It is also available as a PDF as part of the source download. + + Installation + ------------ + If you have downloaded and unzipped the package then you need to run the + ``setup.py`` script with the 'install' argument:: + + python setup.py install + + You may need to run this with root privileges on Unix-like systems. + + + If you haven't yet downloaded the package then you can just try:: + + easy_install bitstring + + or :: + + pip install bitstring + + + Simple Examples + --------------- + Creation:: + + >>> a = BitArray(bin='00101') + >>> b = Bits(a_file_object) + >>> c = BitArray('0xff, 0b101, 0o65, uint:6=22') + >>> d = pack('intle:16, hex=a, 0b1', 100, a='0x34f') + >>> e = pack('<16h', *range(16)) + + Different interpretations, slicing and concatenation:: + + >>> a = BitArray('0x1af') + >>> a.hex, a.bin, a.uint + ('1af', '000110101111', 431) + >>> a[10:3:-1].bin + '1110101' + >>> 3*a + '0b100' + BitArray('0o0657056705674') + + Reading data sequentially:: + + >>> b = BitStream('0x160120f') + >>> b.read(12).hex + '160' + >>> b.pos = 0 + >>> b.read('uint:12') + 352 + >>> b.readlist('uint:12, bin:3') + [288, '111'] + + Searching, inserting and deleting:: + + >>> c = BitArray('0b00010010010010001111') # c.hex == '0x1248f' + >>> c.find('0x48') + (8,) + >>> c.replace('0b001', '0xabc') + >>> c.insert('0b0000') + >>> del c[12:16] + + Unit Tests + ---------- + + The 400+ unit tests should all pass for Python 2.6 and later. + + ---- + + The bitstring module has been released as open source under the MIT License. + Copyright (c) 2014 Scott Griffiths + + For more information see the project's homepage on Google Code: + + + +Platform: all +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/python/bitstring/README.txt b/python/bitstring/README.txt new file mode 100644 index 000000000..491c2f8cf --- /dev/null +++ b/python/bitstring/README.txt @@ -0,0 +1,99 @@ +================ +bitstring module +================ + +**bitstring** is a pure Python module designed to help make +the creation and analysis of binary data as simple and natural as possible. + +Bitstrings can be constructed from integers (big and little endian), hex, +octal, binary, strings or files. They can be sliced, joined, reversed, +inserted into, overwritten, etc. with simple functions or slice notation. +They can also be read from, searched and replaced, and navigated in, +similar to a file or stream. + +bitstring is open source software, and has been released under the MIT +licence. + +This version supports Python 2.6 and later (including Python 3). +For Python 2.4 and 2.5 you should instead download version 1.0. + +Documentation +------------- +The manual for the bitstring module is available here +. It contains a walk-through of all +the features and a complete reference section. + +It is also available as a PDF as part of the source download. + +Installation +------------ +If you have downloaded and unzipped the package then you need to run the +``setup.py`` script with the 'install' argument:: + + python setup.py install + +You may need to run this with root privileges on Unix-like systems. + + +If you haven't yet downloaded the package then you can just try:: + + easy_install bitstring + +or :: + + pip install bitstring + + +Simple Examples +--------------- +Creation:: + + >>> a = BitArray(bin='00101') + >>> b = Bits(a_file_object) + >>> c = BitArray('0xff, 0b101, 0o65, uint:6=22') + >>> d = pack('intle:16, hex=a, 0b1', 100, a='0x34f') + >>> e = pack('<16h', *range(16)) + +Different interpretations, slicing and concatenation:: + + >>> a = BitArray('0x1af') + >>> a.hex, a.bin, a.uint + ('1af', '000110101111', 431) + >>> a[10:3:-1].bin + '1110101' + >>> 3*a + '0b100' + BitArray('0o0657056705674') + +Reading data sequentially:: + + >>> b = BitStream('0x160120f') + >>> b.read(12).hex + '160' + >>> b.pos = 0 + >>> b.read('uint:12') + 352 + >>> b.readlist('uint:12, bin:3') + [288, '111'] + +Searching, inserting and deleting:: + + >>> c = BitArray('0b00010010010010001111') # c.hex == '0x1248f' + >>> c.find('0x48') + (8,) + >>> c.replace('0b001', '0xabc') + >>> c.insert('0b0000') + >>> del c[12:16] + +Unit Tests +---------- + +The 400+ unit tests should all pass for Python 2.6 and later. + +---- + +The bitstring module has been released as open source under the MIT License. +Copyright (c) 2014 Scott Griffiths + +For more information see the project's homepage on Google Code: + + diff --git a/python/bitstring/bitstring.py b/python/bitstring/bitstring.py new file mode 100644 index 000000000..86f969c7f --- /dev/null +++ b/python/bitstring/bitstring.py @@ -0,0 +1,4234 @@ +#!/usr/bin/env python +# cython: profile=True +""" +This package defines classes that simplify bit-wise creation, manipulation and +interpretation of data. + +Classes: + +Bits -- An immutable container for binary data. +BitArray -- A mutable container for binary data. +ConstBitStream -- An immutable container with streaming methods. +BitStream -- A mutable container with streaming methods. + + Bits (base class) + / \ + + mutating methods / \ + streaming methods + / \ + BitArray ConstBitStream + \ / + \ / + \ / + BitStream + +Functions: + +pack -- Create a BitStream from a format string. + +Exceptions: + +Error -- Module exception base class. +CreationError -- Error during creation. +InterpretError -- Inappropriate interpretation of binary data. +ByteAlignError -- Whole byte position or length needed. +ReadError -- Reading or peeking past the end of a bitstring. + +http://python-bitstring.googlecode.com +""" + +__licence__ = """ +The MIT License + +Copyright (c) 2006-2014 Scott Griffiths (scott@griffiths.name) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +__version__ = "3.1.3" + +__author__ = "Scott Griffiths" + +import numbers +import copy +import sys +import re +import binascii +import mmap +import os +import struct +import operator +import collections + +byteorder = sys.byteorder + +bytealigned = False +"""Determines whether a number of methods default to working only on byte boundaries.""" + +# Maximum number of digits to use in __str__ and __repr__. +MAX_CHARS = 250 + +# Maximum size of caches used for speed optimisations. +CACHE_SIZE = 1000 + +class Error(Exception): + """Base class for errors in the bitstring module.""" + + def __init__(self, *params): + self.msg = params[0] if params else '' + self.params = params[1:] + + def __str__(self): + if self.params: + return self.msg.format(*self.params) + return self.msg + + +class ReadError(Error, IndexError): + """Reading or peeking past the end of a bitstring.""" + + def __init__(self, *params): + Error.__init__(self, *params) + + +class InterpretError(Error, ValueError): + """Inappropriate interpretation of binary data.""" + + def __init__(self, *params): + Error.__init__(self, *params) + + +class ByteAlignError(Error): + """Whole-byte position or length needed.""" + + def __init__(self, *params): + Error.__init__(self, *params) + + +class CreationError(Error, ValueError): + """Inappropriate argument during bitstring creation.""" + + def __init__(self, *params): + Error.__init__(self, *params) + + +class ConstByteStore(object): + """Stores raw bytes together with a bit offset and length. + + Used internally - not part of public interface. + """ + + __slots__ = ('offset', '_rawarray', 'bitlength') + + def __init__(self, data, bitlength=None, offset=None): + """data is either a bytearray or a MmapByteArray""" + self._rawarray = data + if offset is None: + offset = 0 + if bitlength is None: + bitlength = 8 * len(data) - offset + self.offset = offset + self.bitlength = bitlength + + def getbit(self, pos): + assert 0 <= pos < self.bitlength + byte, bit = divmod(self.offset + pos, 8) + return bool(self._rawarray[byte] & (128 >> bit)) + + def getbyte(self, pos): + """Direct access to byte data.""" + return self._rawarray[pos] + + def getbyteslice(self, start, end): + """Direct access to byte data.""" + c = self._rawarray[start:end] + return c + + @property + def bytelength(self): + if not self.bitlength: + return 0 + sb = self.offset // 8 + eb = (self.offset + self.bitlength - 1) // 8 + return eb - sb + 1 + + def __copy__(self): + return ByteStore(self._rawarray[:], self.bitlength, self.offset) + + def _appendstore(self, store): + """Join another store on to the end of this one.""" + if not store.bitlength: + return + # Set new array offset to the number of bits in the final byte of current array. + store = offsetcopy(store, (self.offset + self.bitlength) % 8) + if store.offset: + # first do the byte with the join. + joinval = (self._rawarray.pop() & (255 ^ (255 >> store.offset)) | + (store.getbyte(0) & (255 >> store.offset))) + self._rawarray.append(joinval) + self._rawarray.extend(store._rawarray[1:]) + else: + self._rawarray.extend(store._rawarray) + self.bitlength += store.bitlength + + def _prependstore(self, store): + """Join another store on to the start of this one.""" + if not store.bitlength: + return + # Set the offset of copy of store so that it's final byte + # ends in a position that matches the offset of self, + # then join self on to the end of it. + store = offsetcopy(store, (self.offset - store.bitlength) % 8) + assert (store.offset + store.bitlength) % 8 == self.offset % 8 + bit_offset = self.offset % 8 + if bit_offset: + # first do the byte with the join. + store.setbyte(-1, (store.getbyte(-1) & (255 ^ (255 >> bit_offset)) | \ + (self._rawarray[self.byteoffset] & (255 >> bit_offset)))) + store._rawarray.extend(self._rawarray[self.byteoffset + 1: self.byteoffset + self.bytelength]) + else: + store._rawarray.extend(self._rawarray[self.byteoffset: self.byteoffset + self.bytelength]) + self._rawarray = store._rawarray + self.offset = store.offset + self.bitlength += store.bitlength + + @property + def byteoffset(self): + return self.offset // 8 + + @property + def rawbytes(self): + return self._rawarray + + +class ByteStore(ConstByteStore): + """Adding mutating methods to ConstByteStore + + Used internally - not part of public interface. + """ + __slots__ = () + + def setbit(self, pos): + assert 0 <= pos < self.bitlength + byte, bit = divmod(self.offset + pos, 8) + self._rawarray[byte] |= (128 >> bit) + + def unsetbit(self, pos): + assert 0 <= pos < self.bitlength + byte, bit = divmod(self.offset + pos, 8) + self._rawarray[byte] &= ~(128 >> bit) + + def invertbit(self, pos): + assert 0 <= pos < self.bitlength + byte, bit = divmod(self.offset + pos, 8) + self._rawarray[byte] ^= (128 >> bit) + + def setbyte(self, pos, value): + self._rawarray[pos] = value + + def setbyteslice(self, start, end, value): + self._rawarray[start:end] = value + + +def offsetcopy(s, newoffset): + """Return a copy of a ByteStore with the newoffset. + + Not part of public interface. + """ + assert 0 <= newoffset < 8 + if not s.bitlength: + return copy.copy(s) + else: + if newoffset == s.offset % 8: + return ByteStore(s.getbyteslice(s.byteoffset, s.byteoffset + s.bytelength), s.bitlength, newoffset) + newdata = [] + d = s._rawarray + assert newoffset != s.offset % 8 + if newoffset < s.offset % 8: + # We need to shift everything left + shiftleft = s.offset % 8 - newoffset + # First deal with everything except for the final byte + for x in range(s.byteoffset, s.byteoffset + s.bytelength - 1): + newdata.append(((d[x] << shiftleft) & 0xff) +\ + (d[x + 1] >> (8 - shiftleft))) + bits_in_last_byte = (s.offset + s.bitlength) % 8 + if not bits_in_last_byte: + bits_in_last_byte = 8 + if bits_in_last_byte > shiftleft: + newdata.append((d[s.byteoffset + s.bytelength - 1] << shiftleft) & 0xff) + else: # newoffset > s._offset % 8 + shiftright = newoffset - s.offset % 8 + newdata.append(s.getbyte(0) >> shiftright) + for x in range(s.byteoffset + 1, s.byteoffset + s.bytelength): + newdata.append(((d[x - 1] << (8 - shiftright)) & 0xff) +\ + (d[x] >> shiftright)) + bits_in_last_byte = (s.offset + s.bitlength) % 8 + if not bits_in_last_byte: + bits_in_last_byte = 8 + if bits_in_last_byte + shiftright > 8: + newdata.append((d[s.byteoffset + s.bytelength - 1] << (8 - shiftright)) & 0xff) + new_s = ByteStore(bytearray(newdata), s.bitlength, newoffset) + assert new_s.offset == newoffset + return new_s + + +def equal(a, b): + """Return True if ByteStores a == b. + + Not part of public interface. + """ + # We want to return False for inequality as soon as possible, which + # means we get lots of special cases. + # First the easy one - compare lengths: + a_bitlength = a.bitlength + b_bitlength = b.bitlength + if a_bitlength != b_bitlength: + return False + if not a_bitlength: + assert b_bitlength == 0 + return True + # Make 'a' the one with the smaller offset + if (a.offset % 8) > (b.offset % 8): + a, b = b, a + # and create some aliases + a_bitoff = a.offset % 8 + b_bitoff = b.offset % 8 + a_byteoffset = a.byteoffset + b_byteoffset = b.byteoffset + a_bytelength = a.bytelength + b_bytelength = b.bytelength + da = a._rawarray + db = b._rawarray + + # If they are pointing to the same data, they must be equal + if da is db and a.offset == b.offset: + return True + + if a_bitoff == b_bitoff: + bits_spare_in_last_byte = 8 - (a_bitoff + a_bitlength) % 8 + if bits_spare_in_last_byte == 8: + bits_spare_in_last_byte = 0 + # Special case for a, b contained in a single byte + if a_bytelength == 1: + a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength) + b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength) + return a_val == b_val + # Otherwise check first byte + if da[a_byteoffset] & (0xff >> a_bitoff) != db[b_byteoffset] & (0xff >> b_bitoff): + return False + # then everything up to the last + b_a_offset = b_byteoffset - a_byteoffset + for x in range(1 + a_byteoffset, a_byteoffset + a_bytelength - 1): + if da[x] != db[b_a_offset + x]: + return False + # and finally the last byte + return (da[a_byteoffset + a_bytelength - 1] >> bits_spare_in_last_byte == + db[b_byteoffset + b_bytelength - 1] >> bits_spare_in_last_byte) + + assert a_bitoff != b_bitoff + # This is how much we need to shift a to the right to compare with b: + shift = b_bitoff - a_bitoff + # Special case for b only one byte long + if b_bytelength == 1: + assert a_bytelength == 1 + a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength) + b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength) + return a_val == b_val + # Special case for a only one byte long + if a_bytelength == 1: + assert b_bytelength == 2 + a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength) + b_val = ((db[b_byteoffset] << 8) + db[b_byteoffset + 1]) << b_bitoff + b_val &= 0xffff + b_val >>= 16 - b_bitlength + return a_val == b_val + + # Compare first byte of b with bits from first byte of a + if (da[a_byteoffset] & (0xff >> a_bitoff)) >> shift != db[b_byteoffset] & (0xff >> b_bitoff): + return False + # Now compare every full byte of b with bits from 2 bytes of a + for x in range(1, b_bytelength - 1): + # Construct byte from 2 bytes in a to compare to byte in b + b_val = db[b_byteoffset + x] + a_val = ((da[a_byteoffset + x - 1] << 8) + da[a_byteoffset + x]) >> shift + a_val &= 0xff + if a_val != b_val: + return False + + # Now check bits in final byte of b + final_b_bits = (b.offset + b_bitlength) % 8 + if not final_b_bits: + final_b_bits = 8 + b_val = db[b_byteoffset + b_bytelength - 1] >> (8 - final_b_bits) + final_a_bits = (a.offset + a_bitlength) % 8 + if not final_a_bits: + final_a_bits = 8 + if b.bytelength > a_bytelength: + assert b_bytelength == a_bytelength + 1 + a_val = da[a_byteoffset + a_bytelength - 1] >> (8 - final_a_bits) + a_val &= 0xff >> (8 - final_b_bits) + return a_val == b_val + assert a_bytelength == b_bytelength + a_val = da[a_byteoffset + a_bytelength - 2] << 8 + a_val += da[a_byteoffset + a_bytelength - 1] + a_val >>= (8 - final_a_bits) + a_val &= 0xff >> (8 - final_b_bits) + return a_val == b_val + + +class MmapByteArray(object): + """Looks like a bytearray, but from an mmap. + + Not part of public interface. + """ + + __slots__ = ('filemap', 'filelength', 'source', 'byteoffset', 'bytelength') + + def __init__(self, source, bytelength=None, byteoffset=None): + self.source = source + source.seek(0, os.SEEK_END) + self.filelength = source.tell() + if byteoffset is None: + byteoffset = 0 + if bytelength is None: + bytelength = self.filelength - byteoffset + self.byteoffset = byteoffset + self.bytelength = bytelength + self.filemap = mmap.mmap(source.fileno(), 0, access=mmap.ACCESS_READ) + + def __getitem__(self, key): + try: + start = key.start + stop = key.stop + except AttributeError: + try: + assert 0 <= key < self.bytelength + return ord(self.filemap[key + self.byteoffset]) + except TypeError: + # for Python 3 + return self.filemap[key + self.byteoffset] + else: + if start is None: + start = 0 + if stop is None: + stop = self.bytelength + assert key.step is None + assert 0 <= start < self.bytelength + assert 0 <= stop <= self.bytelength + s = slice(start + self.byteoffset, stop + self.byteoffset) + return bytearray(self.filemap.__getitem__(s)) + + def __len__(self): + return self.bytelength + + +# This creates a dictionary for every possible byte with the value being +# the key with its bits reversed. +BYTE_REVERSAL_DICT = dict() + +# For Python 2.x/ 3.x coexistence +# Yes this is very very hacky. +try: + xrange + for i in range(256): + BYTE_REVERSAL_DICT[i] = chr(int("{0:08b}".format(i)[::-1], 2)) +except NameError: + for i in range(256): + BYTE_REVERSAL_DICT[i] = bytes([int("{0:08b}".format(i)[::-1], 2)]) + from io import IOBase as file + xrange = range + basestring = str + +# Python 2.x octals start with '0', in Python 3 it's '0o' +LEADING_OCT_CHARS = len(oct(1)) - 1 + +def tidy_input_string(s): + """Return string made lowercase and with all whitespace removed.""" + s = ''.join(s.split()).lower() + return s + +INIT_NAMES = ('uint', 'int', 'ue', 'se', 'sie', 'uie', 'hex', 'oct', 'bin', 'bits', + 'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne', + 'float', 'floatbe', 'floatle', 'floatne', 'bytes', 'bool', 'pad') + +TOKEN_RE = re.compile(r'(?P' + '|'.join(INIT_NAMES) + + r')((:(?P[^=]+)))?(=(?P.*))?$', re.IGNORECASE) +DEFAULT_UINT = re.compile(r'(?P[^=]+)?(=(?P.*))?$', re.IGNORECASE) + +MULTIPLICATIVE_RE = re.compile(r'(?P.*)\*(?P.+)') + +# Hex, oct or binary literals +LITERAL_RE = re.compile(r'(?P0(x|o|b))(?P.+)', re.IGNORECASE) + +# An endianness indicator followed by one or more struct.pack codes +STRUCT_PACK_RE = re.compile(r'(?P<|>|@)?(?P(?:\d*[bBhHlLqQfd])+)$') + +# A number followed by a single character struct.pack code +STRUCT_SPLIT_RE = re.compile(r'\d*[bBhHlLqQfd]') + +# These replicate the struct.pack codes +# Big-endian +REPLACEMENTS_BE = {'b': 'intbe:8', 'B': 'uintbe:8', + 'h': 'intbe:16', 'H': 'uintbe:16', + 'l': 'intbe:32', 'L': 'uintbe:32', + 'q': 'intbe:64', 'Q': 'uintbe:64', + 'f': 'floatbe:32', 'd': 'floatbe:64'} +# Little-endian +REPLACEMENTS_LE = {'b': 'intle:8', 'B': 'uintle:8', + 'h': 'intle:16', 'H': 'uintle:16', + 'l': 'intle:32', 'L': 'uintle:32', + 'q': 'intle:64', 'Q': 'uintle:64', + 'f': 'floatle:32', 'd': 'floatle:64'} + +# Size in bytes of all the pack codes. +PACK_CODE_SIZE = {'b': 1, 'B': 1, 'h': 2, 'H': 2, 'l': 4, 'L': 4, + 'q': 8, 'Q': 8, 'f': 4, 'd': 8} + +_tokenname_to_initialiser = {'hex': 'hex', '0x': 'hex', '0X': 'hex', 'oct': 'oct', + '0o': 'oct', '0O': 'oct', 'bin': 'bin', '0b': 'bin', + '0B': 'bin', 'bits': 'auto', 'bytes': 'bytes', 'pad': 'pad'} + +def structparser(token): + """Parse struct-like format string token into sub-token list.""" + m = STRUCT_PACK_RE.match(token) + if not m: + return [token] + else: + endian = m.group('endian') + if endian is None: + return [token] + # Split the format string into a list of 'q', '4h' etc. + formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt')) + # Now deal with mulitiplicative factors, 4h -> hhhh etc. + fmt = ''.join([f[-1] * int(f[:-1]) if len(f) != 1 else + f for f in formatlist]) + if endian == '@': + # Native endianness + if byteorder == 'little': + endian = '<' + else: + assert byteorder == 'big' + endian = '>' + if endian == '<': + tokens = [REPLACEMENTS_LE[c] for c in fmt] + else: + assert endian == '>' + tokens = [REPLACEMENTS_BE[c] for c in fmt] + return tokens + +def tokenparser(fmt, keys=None, token_cache={}): + """Divide the format string into tokens and parse them. + + Return stretchy token and list of [initialiser, length, value] + initialiser is one of: hex, oct, bin, uint, int, se, ue, 0x, 0o, 0b etc. + length is None if not known, as is value. + + If the token is in the keyword dictionary (keys) then it counts as a + special case and isn't messed with. + + tokens must be of the form: [factor*][initialiser][:][length][=value] + + """ + try: + return token_cache[(fmt, keys)] + except KeyError: + token_key = (fmt, keys) + # Very inefficient expanding of brackets. + fmt = expand_brackets(fmt) + # Split tokens by ',' and remove whitespace + # The meta_tokens can either be ordinary single tokens or multiple + # struct-format token strings. + meta_tokens = (''.join(f.split()) for f in fmt.split(',')) + return_values = [] + stretchy_token = False + for meta_token in meta_tokens: + # See if it has a multiplicative factor + m = MULTIPLICATIVE_RE.match(meta_token) + if not m: + factor = 1 + else: + factor = int(m.group('factor')) + meta_token = m.group('token') + # See if it's a struct-like format + tokens = structparser(meta_token) + ret_vals = [] + for token in tokens: + if keys and token in keys: + # Don't bother parsing it, it's a keyword argument + ret_vals.append([token, None, None]) + continue + value = length = None + if token == '': + continue + # Match literal tokens of the form 0x... 0o... and 0b... + m = LITERAL_RE.match(token) + if m: + name = m.group('name') + value = m.group('value') + ret_vals.append([name, length, value]) + continue + # Match everything else: + m1 = TOKEN_RE.match(token) + if not m1: + # and if you don't specify a 'name' then the default is 'uint': + m2 = DEFAULT_UINT.match(token) + if not m2: + raise ValueError("Don't understand token '{0}'.".format(token)) + if m1: + name = m1.group('name') + length = m1.group('len') + if m1.group('value'): + value = m1.group('value') + else: + assert m2 + name = 'uint' + length = m2.group('len') + if m2.group('value'): + value = m2.group('value') + if name == 'bool': + if length is not None: + raise ValueError("You can't specify a length with bool tokens - they are always one bit.") + length = 1 + if length is None and name not in ('se', 'ue', 'sie', 'uie'): + stretchy_token = True + if length is not None: + # Try converting length to int, otherwise check it's a key. + try: + length = int(length) + if length < 0: + raise Error + # For the 'bytes' token convert length to bits. + if name == 'bytes': + length *= 8 + except Error: + raise ValueError("Can't read a token with a negative length.") + except ValueError: + if not keys or length not in keys: + raise ValueError("Don't understand length '{0}' of token.".format(length)) + ret_vals.append([name, length, value]) + # This multiplies by the multiplicative factor, but this means that + # we can't allow keyword values as multipliers (e.g. n*uint:8). + # The only way to do this would be to return the factor in some fashion + # (we can't use the key's value here as it would mean that we couldn't + # sensibly continue to cache the function's results. (TODO). + return_values.extend(ret_vals * factor) + return_values = [tuple(x) for x in return_values] + if len(token_cache) < CACHE_SIZE: + token_cache[token_key] = stretchy_token, return_values + return stretchy_token, return_values + +# Looks for first number*( +BRACKET_RE = re.compile(r'(?P\d+)\*\(') + +def expand_brackets(s): + """Remove whitespace and expand all brackets.""" + s = ''.join(s.split()) + while True: + start = s.find('(') + if start == -1: + break + count = 1 # Number of hanging open brackets + p = start + 1 + while p < len(s): + if s[p] == '(': + count += 1 + if s[p] == ')': + count -= 1 + if not count: + break + p += 1 + if count: + raise ValueError("Unbalanced parenthesis in '{0}'.".format(s)) + if start == 0 or s[start - 1] != '*': + s = s[0:start] + s[start + 1:p] + s[p + 1:] + else: + m = BRACKET_RE.search(s) + if m: + factor = int(m.group('factor')) + matchstart = m.start('factor') + s = s[0:matchstart] + (factor - 1) * (s[start + 1:p] + ',') + s[start + 1:p] + s[p + 1:] + else: + raise ValueError("Failed to parse '{0}'.".format(s)) + return s + + +# This converts a single octal digit to 3 bits. +OCT_TO_BITS = ['{0:03b}'.format(i) for i in xrange(8)] + +# A dictionary of number of 1 bits contained in binary representation of any byte +BIT_COUNT = dict(zip(xrange(256), [bin(i).count('1') for i in xrange(256)])) + + +class Bits(object): + """A container holding an immutable sequence of bits. + + For a mutable container use the BitArray class instead. + + Methods: + + all() -- Check if all specified bits are set to 1 or 0. + any() -- Check if any of specified bits are set to 1 or 0. + count() -- Count the number of bits set to 1 or 0. + cut() -- Create generator of constant sized chunks. + endswith() -- Return whether the bitstring ends with a sub-string. + find() -- Find a sub-bitstring in the current bitstring. + findall() -- Find all occurrences of a sub-bitstring in the current bitstring. + join() -- Join bitstrings together using current bitstring. + rfind() -- Seek backwards to find a sub-bitstring. + split() -- Create generator of chunks split by a delimiter. + startswith() -- Return whether the bitstring starts with a sub-bitstring. + tobytes() -- Return bitstring as bytes, padding if needed. + tofile() -- Write bitstring to file, padding if needed. + unpack() -- Interpret bits using format string. + + Special methods: + + Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^. + + Properties: + + bin -- The bitstring as a binary string. + bool -- For single bit bitstrings, interpret as True or False. + bytes -- The bitstring as a bytes object. + float -- Interpret as a floating point number. + floatbe -- Interpret as a big-endian floating point number. + floatle -- Interpret as a little-endian floating point number. + floatne -- Interpret as a native-endian floating point number. + hex -- The bitstring as a hexadecimal string. + int -- Interpret as a two's complement signed integer. + intbe -- Interpret as a big-endian signed integer. + intle -- Interpret as a little-endian signed integer. + intne -- Interpret as a native-endian signed integer. + len -- Length of the bitstring in bits. + oct -- The bitstring as an octal string. + se -- Interpret as a signed exponential-Golomb code. + ue -- Interpret as an unsigned exponential-Golomb code. + sie -- Interpret as a signed interleaved exponential-Golomb code. + uie -- Interpret as an unsigned interleaved exponential-Golomb code. + uint -- Interpret as a two's complement unsigned integer. + uintbe -- Interpret as a big-endian unsigned integer. + uintle -- Interpret as a little-endian unsigned integer. + uintne -- Interpret as a native-endian unsigned integer. + + """ + + __slots__ = ('_datastore') + + def __init__(self, auto=None, length=None, offset=None, **kwargs): + """Either specify an 'auto' initialiser: + auto -- a string of comma separated tokens, an integer, a file object, + a bytearray, a boolean iterable or another bitstring. + + Or initialise via **kwargs with one (and only one) of: + bytes -- raw data as a string, for example read from a binary file. + bin -- binary string representation, e.g. '0b001010'. + hex -- hexadecimal string representation, e.g. '0x2ef' + oct -- octal string representation, e.g. '0o777'. + uint -- an unsigned integer. + int -- a signed integer. + float -- a floating point number. + uintbe -- an unsigned big-endian whole byte integer. + intbe -- a signed big-endian whole byte integer. + floatbe - a big-endian floating point number. + uintle -- an unsigned little-endian whole byte integer. + intle -- a signed little-endian whole byte integer. + floatle -- a little-endian floating point number. + uintne -- an unsigned native-endian whole byte integer. + intne -- a signed native-endian whole byte integer. + floatne -- a native-endian floating point number. + se -- a signed exponential-Golomb code. + ue -- an unsigned exponential-Golomb code. + sie -- a signed interleaved exponential-Golomb code. + uie -- an unsigned interleaved exponential-Golomb code. + bool -- a boolean (True or False). + filename -- a file which will be opened in binary read-only mode. + + Other keyword arguments: + length -- length of the bitstring in bits, if needed and appropriate. + It must be supplied for all integer and float initialisers. + offset -- bit offset to the data. These offset bits are + ignored and this is mainly intended for use when + initialising using 'bytes' or 'filename'. + + """ + pass + + def __new__(cls, auto=None, length=None, offset=None, _cache={}, **kwargs): + # For instances auto-initialised with a string we intern the + # instance for re-use. + try: + if isinstance(auto, basestring): + try: + return _cache[auto] + except KeyError: + x = object.__new__(Bits) + try: + _, tokens = tokenparser(auto) + except ValueError as e: + raise CreationError(*e.args) + x._datastore = ConstByteStore(bytearray(0), 0, 0) + for token in tokens: + x._datastore._appendstore(Bits._init_with_token(*token)._datastore) + assert x._assertsanity() + if len(_cache) < CACHE_SIZE: + _cache[auto] = x + return x + if isinstance(auto, Bits): + return auto + except TypeError: + pass + x = super(Bits, cls).__new__(cls) + x._initialise(auto, length, offset, **kwargs) + return x + + def _initialise(self, auto, length, offset, **kwargs): + if length is not None and length < 0: + raise CreationError("bitstring length cannot be negative.") + if offset is not None and offset < 0: + raise CreationError("offset must be >= 0.") + if auto is not None: + self._initialise_from_auto(auto, length, offset) + return + if not kwargs: + # No initialisers, so initialise with nothing or zero bits + if length is not None and length != 0: + data = bytearray((length + 7) // 8) + self._setbytes_unsafe(data, length, 0) + return + self._setbytes_unsafe(bytearray(0), 0, 0) + return + k, v = kwargs.popitem() + try: + init_without_length_or_offset[k](self, v) + if length is not None or offset is not None: + raise CreationError("Cannot use length or offset with this initialiser.") + except KeyError: + try: + init_with_length_only[k](self, v, length) + if offset is not None: + raise CreationError("Cannot use offset with this initialiser.") + except KeyError: + if offset is None: + offset = 0 + try: + init_with_length_and_offset[k](self, v, length, offset) + except KeyError: + raise CreationError("Unrecognised keyword '{0}' used to initialise.", k) + + def _initialise_from_auto(self, auto, length, offset): + if offset is None: + offset = 0 + self._setauto(auto, length, offset) + return + + def __copy__(self): + """Return a new copy of the Bits for the copy module.""" + # Note that if you want a new copy (different ID), use _copy instead. + # The copy can return self as it's immutable. + return self + + def __lt__(self, other): + raise TypeError("unorderable type: {0}".format(type(self).__name__)) + + def __gt__(self, other): + raise TypeError("unorderable type: {0}".format(type(self).__name__)) + + def __le__(self, other): + raise TypeError("unorderable type: {0}".format(type(self).__name__)) + + def __ge__(self, other): + raise TypeError("unorderable type: {0}".format(type(self).__name__)) + + def __add__(self, bs): + """Concatenate bitstrings and return new bitstring. + + bs -- the bitstring to append. + + """ + bs = Bits(bs) + if bs.len <= self.len: + s = self._copy() + s._append(bs) + else: + s = bs._copy() + s = self.__class__(s) + s._prepend(self) + return s + + def __radd__(self, bs): + """Append current bitstring to bs and return new bitstring. + + bs -- the string for the 'auto' initialiser that will be appended to. + + """ + bs = self._converttobitstring(bs) + return bs.__add__(self) + + def __getitem__(self, key): + """Return a new bitstring representing a slice of the current bitstring. + + Indices are in units of the step parameter (default 1 bit). + Stepping is used to specify the number of bits in each item. + + >>> print BitArray('0b00110')[1:4] + '0b011' + >>> print BitArray('0x00112233')[1:3:8] + '0x1122' + + """ + length = self.len + try: + step = key.step if key.step is not None else 1 + except AttributeError: + # single element + if key < 0: + key += length + if not 0 <= key < length: + raise IndexError("Slice index out of range.") + # Single bit, return True or False + return self._datastore.getbit(key) + else: + if step != 1: + # convert to binary string and use string slicing + bs = self.__class__() + bs._setbin_unsafe(self._getbin().__getitem__(key)) + return bs + start, stop = 0, length + if key.start is not None: + start = key.start + if key.start < 0: + start += stop + if key.stop is not None: + stop = key.stop + if key.stop < 0: + stop += length + start = max(start, 0) + stop = min(stop, length) + if start < stop: + return self._slice(start, stop) + else: + return self.__class__() + + def __len__(self): + """Return the length of the bitstring in bits.""" + return self._getlength() + + def __str__(self): + """Return approximate string representation of bitstring for printing. + + Short strings will be given wholly in hexadecimal or binary. Longer + strings may be part hexadecimal and part binary. Very long strings will + be truncated with '...'. + + """ + length = self.len + if not length: + return '' + if length > MAX_CHARS * 4: + # Too long for hex. Truncate... + return ''.join(('0x', self._readhex(MAX_CHARS * 4, 0), '...')) + # If it's quite short and we can't do hex then use bin + if length < 32 and length % 4 != 0: + return '0b' + self.bin + # If we can use hex then do so + if not length % 4: + return '0x' + self.hex + # Otherwise first we do as much as we can in hex + # then add on 1, 2 or 3 bits on at the end + bits_at_end = length % 4 + return ''.join(('0x', self._readhex(length - bits_at_end, 0), + ', ', '0b', + self._readbin(bits_at_end, length - bits_at_end))) + + def __repr__(self): + """Return representation that could be used to recreate the bitstring. + + If the returned string is too long it will be truncated. See __str__(). + + """ + length = self.len + if isinstance(self._datastore._rawarray, MmapByteArray): + offsetstring = '' + if self._datastore.byteoffset or self._offset: + offsetstring = ", offset=%d" % (self._datastore._rawarray.byteoffset * 8 + self._offset) + lengthstring = ", length=%d" % length + return "{0}(filename='{1}'{2}{3})".format(self.__class__.__name__, + self._datastore._rawarray.source.name, lengthstring, offsetstring) + else: + s = self.__str__() + lengthstring = '' + if s.endswith('...'): + lengthstring = " # length={0}".format(length) + return "{0}('{1}'){2}".format(self.__class__.__name__, s, lengthstring) + + def __eq__(self, bs): + """Return True if two bitstrings have the same binary representation. + + >>> BitArray('0b1110') == '0xe' + True + + """ + try: + bs = Bits(bs) + except TypeError: + return False + return equal(self._datastore, bs._datastore) + + def __ne__(self, bs): + """Return False if two bitstrings have the same binary representation. + + >>> BitArray('0b111') == '0x7' + False + + """ + return not self.__eq__(bs) + + def __invert__(self): + """Return bitstring with every bit inverted. + + Raises Error if the bitstring is empty. + + """ + if not self.len: + raise Error("Cannot invert empty bitstring.") + s = self._copy() + s._invert_all() + return s + + def __lshift__(self, n): + """Return bitstring with bits shifted by n to the left. + + n -- the number of bits to shift. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot shift by a negative amount.") + if not self.len: + raise ValueError("Cannot shift an empty bitstring.") + n = min(n, self.len) + s = self._slice(n, self.len) + s._append(Bits(n)) + return s + + def __rshift__(self, n): + """Return bitstring with bits shifted by n to the right. + + n -- the number of bits to shift. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot shift by a negative amount.") + if not self.len: + raise ValueError("Cannot shift an empty bitstring.") + if not n: + return self._copy() + s = self.__class__(length=min(n, self.len)) + s._append(self[:-n]) + return s + + def __mul__(self, n): + """Return bitstring consisting of n concatenations of self. + + Called for expression of the form 'a = b*3'. + n -- The number of concatenations. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot multiply by a negative integer.") + if not n: + return self.__class__() + s = self._copy() + s._imul(n) + return s + + def __rmul__(self, n): + """Return bitstring consisting of n concatenations of self. + + Called for expressions of the form 'a = 3*b'. + n -- The number of concatenations. Must be >= 0. + + """ + return self.__mul__(n) + + def __and__(self, bs): + """Bit-wise 'and' between two bitstrings. Returns new bitstring. + + bs -- The bitstring to '&' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for & operator.") + s = self._copy() + s._iand(bs) + return s + + def __rand__(self, bs): + """Bit-wise 'and' between two bitstrings. Returns new bitstring. + + bs -- the bitstring to '&' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + return self.__and__(bs) + + def __or__(self, bs): + """Bit-wise 'or' between two bitstrings. Returns new bitstring. + + bs -- The bitstring to '|' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for | operator.") + s = self._copy() + s._ior(bs) + return s + + def __ror__(self, bs): + """Bit-wise 'or' between two bitstrings. Returns new bitstring. + + bs -- The bitstring to '|' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + return self.__or__(bs) + + def __xor__(self, bs): + """Bit-wise 'xor' between two bitstrings. Returns new bitstring. + + bs -- The bitstring to '^' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for ^ operator.") + s = self._copy() + s._ixor(bs) + return s + + def __rxor__(self, bs): + """Bit-wise 'xor' between two bitstrings. Returns new bitstring. + + bs -- The bitstring to '^' with. + + Raises ValueError if the two bitstrings have differing lengths. + + """ + return self.__xor__(bs) + + def __contains__(self, bs): + """Return whether bs is contained in the current bitstring. + + bs -- The bitstring to search for. + + """ + # Don't want to change pos + try: + pos = self._pos + except AttributeError: + pass + found = Bits.find(self, bs, bytealigned=False) + try: + self._pos = pos + except AttributeError: + pass + return bool(found) + + def __hash__(self): + """Return an integer hash of the object.""" + # We can't in general hash the whole bitstring (it could take hours!) + # So instead take some bits from the start and end. + if self.len <= 160: + # Use the whole bitstring. + shorter = self + else: + # Take 10 bytes from start and end + shorter = self[:80] + self[-80:] + h = 0 + for byte in shorter.tobytes(): + try: + h = (h << 4) + ord(byte) + except TypeError: + # Python 3 + h = (h << 4) + byte + g = h & 0xf0000000 + if g & (1 << 31): + h ^= (g >> 24) + h ^= g + return h % 1442968193 + + # This is only used in Python 2.x... + def __nonzero__(self): + """Return True if any bits are set to 1, otherwise return False.""" + return self.any(True) + + # ...whereas this is used in Python 3.x + __bool__ = __nonzero__ + + def _assertsanity(self): + """Check internal self consistency as a debugging aid.""" + assert self.len >= 0 + assert 0 <= self._offset, "offset={0}".format(self._offset) + assert (self.len + self._offset + 7) // 8 == self._datastore.bytelength + self._datastore.byteoffset + return True + + @classmethod + def _init_with_token(cls, name, token_length, value): + if token_length is not None: + token_length = int(token_length) + if token_length == 0: + return cls() + # For pad token just return the length in zero bits + if name == 'pad': + return cls(token_length) + + if value is None: + if token_length is None: + error = "Token has no value ({0}=???).".format(name) + else: + error = "Token has no value ({0}:{1}=???).".format(name, token_length) + raise ValueError(error) + try: + b = cls(**{_tokenname_to_initialiser[name]: value}) + except KeyError: + if name in ('se', 'ue', 'sie', 'uie'): + b = cls(**{name: int(value)}) + elif name in ('uint', 'int', 'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne'): + b = cls(**{name: int(value), 'length': token_length}) + elif name in ('float', 'floatbe', 'floatle', 'floatne'): + b = cls(**{name: float(value), 'length': token_length}) + elif name == 'bool': + if value in (1, 'True', '1'): + b = cls(bool=True) + elif value in (0, 'False', '0'): + b = cls(bool=False) + else: + raise CreationError("bool token can only be 'True' or 'False'.") + else: + raise CreationError("Can't parse token name {0}.", name) + if token_length is not None and b.len != token_length: + msg = "Token with length {0} packed with value of length {1} ({2}:{3}={4})." + raise CreationError(msg, token_length, b.len, name, token_length, value) + return b + + def _clear(self): + """Reset the bitstring to an empty state.""" + self._datastore = ByteStore(bytearray(0)) + + def _setauto(self, s, length, offset): + """Set bitstring from a bitstring, file, bool, integer, iterable or string.""" + # As s can be so many different things it's important to do the checks + # in the correct order, as some types are also other allowed types. + # So basestring must be checked before Iterable + # and bytes/bytearray before Iterable but after basestring! + if isinstance(s, Bits): + if length is None: + length = s.len - offset + self._setbytes_unsafe(s._datastore.rawbytes, length, s._offset + offset) + return + if isinstance(s, file): + if offset is None: + offset = 0 + if length is None: + length = os.path.getsize(s.name) * 8 - offset + byteoffset, offset = divmod(offset, 8) + bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset + m = MmapByteArray(s, bytelength, byteoffset) + if length + byteoffset * 8 + offset > m.filelength * 8: + raise CreationError("File is not long enough for specified " + "length and offset.") + self._datastore = ConstByteStore(m, length, offset) + return + if length is not None: + raise CreationError("The length keyword isn't applicable to this initialiser.") + if offset: + raise CreationError("The offset keyword isn't applicable to this initialiser.") + if isinstance(s, basestring): + bs = self._converttobitstring(s) + assert bs._offset == 0 + self._setbytes_unsafe(bs._datastore.rawbytes, bs.length, 0) + return + if isinstance(s, (bytes, bytearray)): + self._setbytes_unsafe(bytearray(s), len(s) * 8, 0) + return + if isinstance(s, numbers.Integral): + # Initialise with s zero bits. + if s < 0: + msg = "Can't create bitstring of negative length {0}." + raise CreationError(msg, s) + data = bytearray((s + 7) // 8) + self._datastore = ByteStore(data, s, 0) + return + if isinstance(s, collections.Iterable): + # Evaluate each item as True or False and set bits to 1 or 0. + self._setbin_unsafe(''.join(str(int(bool(x))) for x in s)) + return + raise TypeError("Cannot initialise bitstring from {0}.".format(type(s))) + + def _setfile(self, filename, length, offset): + """Use file as source of bits.""" + source = open(filename, 'rb') + if offset is None: + offset = 0 + if length is None: + length = os.path.getsize(source.name) * 8 - offset + byteoffset, offset = divmod(offset, 8) + bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset + m = MmapByteArray(source, bytelength, byteoffset) + if length + byteoffset * 8 + offset > m.filelength * 8: + raise CreationError("File is not long enough for specified " + "length and offset.") + self._datastore = ConstByteStore(m, length, offset) + + def _setbytes_safe(self, data, length=None, offset=0): + """Set the data from a string.""" + data = bytearray(data) + if length is None: + # Use to the end of the data + length = len(data)*8 - offset + self._datastore = ByteStore(data, length, offset) + else: + if length + offset > len(data) * 8: + msg = "Not enough data present. Need {0} bits, have {1}." + raise CreationError(msg, length + offset, len(data) * 8) + if length == 0: + self._datastore = ByteStore(bytearray(0)) + else: + self._datastore = ByteStore(data, length, offset) + + def _setbytes_unsafe(self, data, length, offset): + """Unchecked version of _setbytes_safe.""" + self._datastore = ByteStore(data[:], length, offset) + assert self._assertsanity() + + def _readbytes(self, length, start): + """Read bytes and return them. Note that length is in bits.""" + assert length % 8 == 0 + assert start + length <= self.len + if not (start + self._offset) % 8: + return bytes(self._datastore.getbyteslice((start + self._offset) // 8, + (start + self._offset + length) // 8)) + return self._slice(start, start + length).tobytes() + + def _getbytes(self): + """Return the data as an ordinary string.""" + if self.len % 8: + raise InterpretError("Cannot interpret as bytes unambiguously - " + "not multiple of 8 bits.") + return self._readbytes(self.len, 0) + + def _setuint(self, uint, length=None): + """Reset the bitstring to have given unsigned int interpretation.""" + try: + if length is None: + # Use the whole length. Deliberately not using .len here. + length = self._datastore.bitlength + except AttributeError: + # bitstring doesn't have a _datastore as it hasn't been created! + pass + # TODO: All this checking code should be hoisted out of here! + if length is None or length == 0: + raise CreationError("A non-zero length must be specified with a " + "uint initialiser.") + if uint >= (1 << length): + msg = "{0} is too large an unsigned integer for a bitstring of length {1}. "\ + "The allowed range is [0, {2}]." + raise CreationError(msg, uint, length, (1 << length) - 1) + if uint < 0: + raise CreationError("uint cannot be initialsed by a negative number.") + s = hex(uint)[2:] + s = s.rstrip('L') + if len(s) & 1: + s = '0' + s + try: + data = bytes.fromhex(s) + except AttributeError: + # the Python 2.x way + data = binascii.unhexlify(s) + # Now add bytes as needed to get the right length. + extrabytes = ((length + 7) // 8) - len(data) + if extrabytes > 0: + data = b'\x00' * extrabytes + data + offset = 8 - (length % 8) + if offset == 8: + offset = 0 + self._setbytes_unsafe(bytearray(data), length, offset) + + def _readuint(self, length, start): + """Read bits and interpret as an unsigned int.""" + if not length: + raise InterpretError("Cannot interpret a zero length bitstring " + "as an integer.") + offset = self._offset + startbyte = (start + offset) // 8 + endbyte = (start + offset + length - 1) // 8 + + b = binascii.hexlify(bytes(self._datastore.getbyteslice(startbyte, endbyte + 1))) + assert b + i = int(b, 16) + final_bits = 8 - ((start + offset + length) % 8) + if final_bits != 8: + i >>= final_bits + i &= (1 << length) - 1 + return i + + def _getuint(self): + """Return data as an unsigned int.""" + return self._readuint(self.len, 0) + + def _setint(self, int_, length=None): + """Reset the bitstring to have given signed int interpretation.""" + # If no length given, and we've previously been given a length, use it. + if length is None and hasattr(self, 'len') and self.len != 0: + length = self.len + if length is None or length == 0: + raise CreationError("A non-zero length must be specified with an int initialiser.") + if int_ >= (1 << (length - 1)) or int_ < -(1 << (length - 1)): + raise CreationError("{0} is too large a signed integer for a bitstring of length {1}. " + "The allowed range is [{2}, {3}].", int_, length, -(1 << (length - 1)), + (1 << (length - 1)) - 1) + if int_ >= 0: + self._setuint(int_, length) + return + # TODO: We should decide whether to just use the _setuint, or to do the bit flipping, + # based upon which will be quicker. If the -ive number is less than half the maximum + # possible then it's probably quicker to do the bit flipping... + + # Do the 2's complement thing. Add one, set to minus number, then flip bits. + int_ += 1 + self._setuint(-int_, length) + self._invert_all() + + def _readint(self, length, start): + """Read bits and interpret as a signed int""" + ui = self._readuint(length, start) + if not ui >> (length - 1): + # Top bit not set, number is positive + return ui + # Top bit is set, so number is negative + tmp = (~(ui - 1)) & ((1 << length) - 1) + return -tmp + + def _getint(self): + """Return data as a two's complement signed int.""" + return self._readint(self.len, 0) + + def _setuintbe(self, uintbe, length=None): + """Set the bitstring to a big-endian unsigned int interpretation.""" + if length is not None and length % 8 != 0: + raise CreationError("Big-endian integers must be whole-byte. " + "Length = {0} bits.", length) + self._setuint(uintbe, length) + + def _readuintbe(self, length, start): + """Read bits and interpret as a big-endian unsigned int.""" + if length % 8: + raise InterpretError("Big-endian integers must be whole-byte. " + "Length = {0} bits.", length) + return self._readuint(length, start) + + def _getuintbe(self): + """Return data as a big-endian two's complement unsigned int.""" + return self._readuintbe(self.len, 0) + + def _setintbe(self, intbe, length=None): + """Set bitstring to a big-endian signed int interpretation.""" + if length is not None and length % 8 != 0: + raise CreationError("Big-endian integers must be whole-byte. " + "Length = {0} bits.", length) + self._setint(intbe, length) + + def _readintbe(self, length, start): + """Read bits and interpret as a big-endian signed int.""" + if length % 8: + raise InterpretError("Big-endian integers must be whole-byte. " + "Length = {0} bits.", length) + return self._readint(length, start) + + def _getintbe(self): + """Return data as a big-endian two's complement signed int.""" + return self._readintbe(self.len, 0) + + def _setuintle(self, uintle, length=None): + if length is not None and length % 8 != 0: + raise CreationError("Little-endian integers must be whole-byte. " + "Length = {0} bits.", length) + self._setuint(uintle, length) + self._reversebytes(0, self.len) + + def _readuintle(self, length, start): + """Read bits and interpret as a little-endian unsigned int.""" + if length % 8: + raise InterpretError("Little-endian integers must be whole-byte. " + "Length = {0} bits.", length) + assert start + length <= self.len + absolute_pos = start + self._offset + startbyte, offset = divmod(absolute_pos, 8) + val = 0 + if not offset: + endbyte = (absolute_pos + length - 1) // 8 + chunksize = 4 # for 'L' format + while endbyte - chunksize + 1 >= startbyte: + val <<= 8 * chunksize + val += struct.unpack('> (length - 1): + # Top bit not set, number is positive + return ui + # Top bit is set, so number is negative + tmp = (~(ui - 1)) & ((1 << length) - 1) + return -tmp + + def _getintle(self): + return self._readintle(self.len, 0) + + def _setfloat(self, f, length=None): + # If no length given, and we've previously been given a length, use it. + if length is None and hasattr(self, 'len') and self.len != 0: + length = self.len + if length is None or length == 0: + raise CreationError("A non-zero length must be specified with a " + "float initialiser.") + if length == 32: + b = struct.pack('>f', f) + elif length == 64: + b = struct.pack('>d', f) + else: + raise CreationError("floats can only be 32 or 64 bits long, " + "not {0} bits", length) + self._setbytes_unsafe(bytearray(b), length, 0) + + def _readfloat(self, length, start): + """Read bits and interpret as a float.""" + if not (start + self._offset) % 8: + startbyte = (start + self._offset) // 8 + if length == 32: + f, = struct.unpack('>f', bytes(self._datastore.getbyteslice(startbyte, startbyte + 4))) + elif length == 64: + f, = struct.unpack('>d', bytes(self._datastore.getbyteslice(startbyte, startbyte + 8))) + else: + if length == 32: + f, = struct.unpack('>f', self._readbytes(32, start)) + elif length == 64: + f, = struct.unpack('>d', self._readbytes(64, start)) + try: + return f + except NameError: + raise InterpretError("floats can only be 32 or 64 bits long, not {0} bits", length) + + def _getfloat(self): + """Interpret the whole bitstring as a float.""" + return self._readfloat(self.len, 0) + + def _setfloatle(self, f, length=None): + # If no length given, and we've previously been given a length, use it. + if length is None and hasattr(self, 'len') and self.len != 0: + length = self.len + if length is None or length == 0: + raise CreationError("A non-zero length must be specified with a " + "float initialiser.") + if length == 32: + b = struct.pack(' 0: + tmp >>= 1 + leadingzeros += 1 + remainingpart = i + 1 - (1 << leadingzeros) + binstring = '0' * leadingzeros + '1' + Bits(uint=remainingpart, + length=leadingzeros).bin + self._setbin_unsafe(binstring) + + def _readue(self, pos): + """Return interpretation of next bits as unsigned exponential-Golomb code. + + Raises ReadError if the end of the bitstring is encountered while + reading the code. + + """ + oldpos = pos + try: + while not self[pos]: + pos += 1 + except IndexError: + raise ReadError("Read off end of bitstring trying to read code.") + leadingzeros = pos - oldpos + codenum = (1 << leadingzeros) - 1 + if leadingzeros > 0: + if pos + leadingzeros + 1 > self.len: + raise ReadError("Read off end of bitstring trying to read code.") + codenum += self._readuint(leadingzeros, pos + 1) + pos += leadingzeros + 1 + else: + assert codenum == 0 + pos += 1 + return codenum, pos + + def _getue(self): + """Return data as unsigned exponential-Golomb code. + + Raises InterpretError if bitstring is not a single exponential-Golomb code. + + """ + try: + value, newpos = self._readue(0) + if value is None or newpos != self.len: + raise ReadError + except ReadError: + raise InterpretError("Bitstring is not a single exponential-Golomb code.") + return value + + def _setse(self, i): + """Initialise bitstring with signed exponential-Golomb code for integer i.""" + if i > 0: + u = (i * 2) - 1 + else: + u = -2 * i + self._setue(u) + + def _getse(self): + """Return data as signed exponential-Golomb code. + + Raises InterpretError if bitstring is not a single exponential-Golomb code. + + """ + try: + value, newpos = self._readse(0) + if value is None or newpos != self.len: + raise ReadError + except ReadError: + raise InterpretError("Bitstring is not a single exponential-Golomb code.") + return value + + def _readse(self, pos): + """Return interpretation of next bits as a signed exponential-Golomb code. + + Advances position to after the read code. + + Raises ReadError if the end of the bitstring is encountered while + reading the code. + + """ + codenum, pos = self._readue(pos) + m = (codenum + 1) // 2 + if not codenum % 2: + return -m, pos + else: + return m, pos + + def _setuie(self, i): + """Initialise bitstring with unsigned interleaved exponential-Golomb code for integer i. + + Raises CreationError if i < 0. + + """ + if i < 0: + raise CreationError("Cannot use negative initialiser for unsigned " + "interleaved exponential-Golomb.") + self._setbin_unsafe('1' if i == 0 else '0' + '0'.join(bin(i + 1)[3:]) + '1') + + def _readuie(self, pos): + """Return interpretation of next bits as unsigned interleaved exponential-Golomb code. + + Raises ReadError if the end of the bitstring is encountered while + reading the code. + + """ + try: + codenum = 1 + while not self[pos]: + pos += 1 + codenum <<= 1 + codenum += self[pos] + pos += 1 + pos += 1 + except IndexError: + raise ReadError("Read off end of bitstring trying to read code.") + codenum -= 1 + return codenum, pos + + def _getuie(self): + """Return data as unsigned interleaved exponential-Golomb code. + + Raises InterpretError if bitstring is not a single exponential-Golomb code. + + """ + try: + value, newpos = self._readuie(0) + if value is None or newpos != self.len: + raise ReadError + except ReadError: + raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.") + return value + + def _setsie(self, i): + """Initialise bitstring with signed interleaved exponential-Golomb code for integer i.""" + if not i: + self._setbin_unsafe('1') + else: + self._setuie(abs(i)) + self._append(Bits([i < 0])) + + def _getsie(self): + """Return data as signed interleaved exponential-Golomb code. + + Raises InterpretError if bitstring is not a single exponential-Golomb code. + + """ + try: + value, newpos = self._readsie(0) + if value is None or newpos != self.len: + raise ReadError + except ReadError: + raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.") + return value + + def _readsie(self, pos): + """Return interpretation of next bits as a signed interleaved exponential-Golomb code. + + Advances position to after the read code. + + Raises ReadError if the end of the bitstring is encountered while + reading the code. + + """ + codenum, pos = self._readuie(pos) + if not codenum: + return 0, pos + try: + if self[pos]: + return -codenum, pos + 1 + else: + return codenum, pos + 1 + except IndexError: + raise ReadError("Read off end of bitstring trying to read code.") + + def _setbool(self, value): + # We deliberately don't want to have implicit conversions to bool here. + # If we did then it would be difficult to deal with the 'False' string. + if value in (1, 'True'): + self._setbytes_unsafe(bytearray(b'\x80'), 1, 0) + elif value in (0, 'False'): + self._setbytes_unsafe(bytearray(b'\x00'), 1, 0) + else: + raise CreationError('Cannot initialise boolean with {0}.', value) + + def _getbool(self): + if self.length != 1: + msg = "For a bool interpretation a bitstring must be 1 bit long, not {0} bits." + raise InterpretError(msg, self.length) + return self[0] + + def _readbool(self, pos): + return self[pos], pos + 1 + + def _setbin_safe(self, binstring): + """Reset the bitstring to the value given in binstring.""" + binstring = tidy_input_string(binstring) + # remove any 0b if present + binstring = binstring.replace('0b', '') + self._setbin_unsafe(binstring) + + def _setbin_unsafe(self, binstring): + """Same as _setbin_safe, but input isn't sanity checked. binstring mustn't start with '0b'.""" + length = len(binstring) + # pad with zeros up to byte boundary if needed + boundary = ((length + 7) // 8) * 8 + padded_binstring = binstring + '0' * (boundary - length)\ + if len(binstring) < boundary else binstring + try: + bytelist = [int(padded_binstring[x:x + 8], 2) + for x in xrange(0, len(padded_binstring), 8)] + except ValueError: + raise CreationError("Invalid character in bin initialiser {0}.", binstring) + self._setbytes_unsafe(bytearray(bytelist), length, 0) + + def _readbin(self, length, start): + """Read bits and interpret as a binary string.""" + if not length: + return '' + # Get the byte slice containing our bit slice + startbyte, startoffset = divmod(start + self._offset, 8) + endbyte = (start + self._offset + length - 1) // 8 + b = self._datastore.getbyteslice(startbyte, endbyte + 1) + # Convert to a string of '0' and '1's (via a hex string an and int!) + try: + c = "{:0{}b}".format(int(binascii.hexlify(b), 16), 8*len(b)) + except TypeError: + # Hack to get Python 2.6 working + c = "{0:0{1}b}".format(int(binascii.hexlify(str(b)), 16), 8*len(b)) + # Finally chop off any extra bits. + return c[startoffset:startoffset + length] + + def _getbin(self): + """Return interpretation as a binary string.""" + return self._readbin(self.len, 0) + + def _setoct(self, octstring): + """Reset the bitstring to have the value given in octstring.""" + octstring = tidy_input_string(octstring) + # remove any 0o if present + octstring = octstring.replace('0o', '') + binlist = [] + for i in octstring: + try: + if not 0 <= int(i) < 8: + raise ValueError + binlist.append(OCT_TO_BITS[int(i)]) + except ValueError: + raise CreationError("Invalid symbol '{0}' in oct initialiser.", i) + self._setbin_unsafe(''.join(binlist)) + + def _readoct(self, length, start): + """Read bits and interpret as an octal string.""" + if length % 3: + raise InterpretError("Cannot convert to octal unambiguously - " + "not multiple of 3 bits.") + if not length: + return '' + # Get main octal bit by converting from int. + # Strip starting 0 or 0o depending on Python version. + end = oct(self._readuint(length, start))[LEADING_OCT_CHARS:] + if end.endswith('L'): + end = end[:-1] + middle = '0' * (length // 3 - len(end)) + return middle + end + + def _getoct(self): + """Return interpretation as an octal string.""" + return self._readoct(self.len, 0) + + def _sethex(self, hexstring): + """Reset the bitstring to have the value given in hexstring.""" + hexstring = tidy_input_string(hexstring) + # remove any 0x if present + hexstring = hexstring.replace('0x', '') + length = len(hexstring) + if length % 2: + hexstring += '0' + try: + try: + data = bytearray.fromhex(hexstring) + except TypeError: + # Python 2.6 needs a unicode string (a bug). 2.7 and 3.x work fine. + data = bytearray.fromhex(unicode(hexstring)) + except ValueError: + raise CreationError("Invalid symbol in hex initialiser.") + self._setbytes_unsafe(data, length * 4, 0) + + def _readhex(self, length, start): + """Read bits and interpret as a hex string.""" + if length % 4: + raise InterpretError("Cannot convert to hex unambiguously - " + "not multiple of 4 bits.") + if not length: + return '' + # This monstrosity is the only thing I could get to work for both 2.6 and 3.1. + # TODO: Is utf-8 really what we mean here? + s = str(binascii.hexlify(self._slice(start, start + length).tobytes()).decode('utf-8')) + # If there's one nibble too many then cut it off + return s[:-1] if (length // 4) % 2 else s + + def _gethex(self): + """Return the hexadecimal representation as a string prefixed with '0x'. + + Raises an InterpretError if the bitstring's length is not a multiple of 4. + + """ + return self._readhex(self.len, 0) + + def _getoffset(self): + return self._datastore.offset + + def _getlength(self): + """Return the length of the bitstring in bits.""" + return self._datastore.bitlength + + def _ensureinmemory(self): + """Ensure the data is held in memory, not in a file.""" + self._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength), + self.len, self._offset) + + @classmethod + def _converttobitstring(cls, bs, offset=0, cache={}): + """Convert bs to a bitstring and return it. + + offset gives the suggested bit offset of first significant + bit, to optimise append etc. + + """ + if isinstance(bs, Bits): + return bs + try: + return cache[(bs, offset)] + except KeyError: + if isinstance(bs, basestring): + b = cls() + try: + _, tokens = tokenparser(bs) + except ValueError as e: + raise CreationError(*e.args) + if tokens: + b._append(Bits._init_with_token(*tokens[0])) + b._datastore = offsetcopy(b._datastore, offset) + for token in tokens[1:]: + b._append(Bits._init_with_token(*token)) + assert b._assertsanity() + assert b.len == 0 or b._offset == offset + if len(cache) < CACHE_SIZE: + cache[(bs, offset)] = b + return b + except TypeError: + # Unhashable type + pass + return cls(bs) + + def _copy(self): + """Create and return a new copy of the Bits (always in memory).""" + s_copy = self.__class__() + s_copy._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength), + self.len, self._offset) + return s_copy + + def _slice(self, start, end): + """Used internally to get a slice, without error checking.""" + if end == start: + return self.__class__() + offset = self._offset + startbyte, newoffset = divmod(start + offset, 8) + endbyte = (end + offset - 1) // 8 + bs = self.__class__() + bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset) + return bs + + def _readtoken(self, name, pos, length): + """Reads a token from the bitstring and returns the result.""" + if length is not None and int(length) > self.length - pos: + raise ReadError("Reading off the end of the data. " + "Tried to read {0} bits when only {1} available.".format(int(length), self.length - pos)) + try: + val = name_to_read[name](self, length, pos) + return val, pos + length + except KeyError: + if name == 'pad': + return None, pos + length + raise ValueError("Can't parse token {0}:{1}".format(name, length)) + except TypeError: + # This is for the 'ue', 'se' and 'bool' tokens. They will also return the new pos. + return name_to_read[name](self, pos) + + def _append(self, bs): + """Append a bitstring to the current bitstring.""" + self._datastore._appendstore(bs._datastore) + + def _prepend(self, bs): + """Prepend a bitstring to the current bitstring.""" + self._datastore._prependstore(bs._datastore) + + def _reverse(self): + """Reverse all bits in-place.""" + # Reverse the contents of each byte + n = [BYTE_REVERSAL_DICT[b] for b in self._datastore.rawbytes] + # Then reverse the order of the bytes + n.reverse() + # The new offset is the number of bits that were unused at the end. + newoffset = 8 - (self._offset + self.len) % 8 + if newoffset == 8: + newoffset = 0 + self._setbytes_unsafe(bytearray().join(n), self.length, newoffset) + + def _truncatestart(self, bits): + """Truncate bits from the start of the bitstring.""" + assert 0 <= bits <= self.len + if not bits: + return + if bits == self.len: + self._clear() + return + bytepos, offset = divmod(self._offset + bits, 8) + self._setbytes_unsafe(self._datastore.getbyteslice(bytepos, self._datastore.bytelength), self.len - bits, + offset) + assert self._assertsanity() + + def _truncateend(self, bits): + """Truncate bits from the end of the bitstring.""" + assert 0 <= bits <= self.len + if not bits: + return + if bits == self.len: + self._clear() + return + newlength_in_bytes = (self._offset + self.len - bits + 7) // 8 + self._setbytes_unsafe(self._datastore.getbyteslice(0, newlength_in_bytes), self.len - bits, + self._offset) + assert self._assertsanity() + + def _insert(self, bs, pos): + """Insert bs at pos.""" + assert 0 <= pos <= self.len + if pos > self.len // 2: + # Inserting nearer end, so cut off end. + end = self._slice(pos, self.len) + self._truncateend(self.len - pos) + self._append(bs) + self._append(end) + else: + # Inserting nearer start, so cut off start. + start = self._slice(0, pos) + self._truncatestart(pos) + self._prepend(bs) + self._prepend(start) + try: + self._pos = pos + bs.len + except AttributeError: + pass + assert self._assertsanity() + + def _overwrite(self, bs, pos): + """Overwrite with bs at pos.""" + assert 0 <= pos < self.len + if bs is self: + # Just overwriting with self, so do nothing. + assert pos == 0 + return + firstbytepos = (self._offset + pos) // 8 + lastbytepos = (self._offset + pos + bs.len - 1) // 8 + bytepos, bitoffset = divmod(self._offset + pos, 8) + if firstbytepos == lastbytepos: + mask = ((1 << bs.len) - 1) << (8 - bs.len - bitoffset) + self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask)) + d = offsetcopy(bs._datastore, bitoffset) + self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask)) + else: + # Do first byte + mask = (1 << (8 - bitoffset)) - 1 + self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask)) + d = offsetcopy(bs._datastore, bitoffset) + self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask)) + # Now do all the full bytes + self._datastore.setbyteslice(firstbytepos + 1, lastbytepos, d.getbyteslice(1, lastbytepos - firstbytepos)) + # and finally the last byte + bitsleft = (self._offset + pos + bs.len) % 8 + if not bitsleft: + bitsleft = 8 + mask = (1 << (8 - bitsleft)) - 1 + self._datastore.setbyte(lastbytepos, self._datastore.getbyte(lastbytepos) & mask) + self._datastore.setbyte(lastbytepos, + self._datastore.getbyte(lastbytepos) | (d.getbyte(d.bytelength - 1) & ~mask)) + assert self._assertsanity() + + def _delete(self, bits, pos): + """Delete bits at pos.""" + assert 0 <= pos <= self.len + assert pos + bits <= self.len + if not pos: + # Cutting bits off at the start. + self._truncatestart(bits) + return + if pos + bits == self.len: + # Cutting bits off at the end. + self._truncateend(bits) + return + if pos > self.len - pos - bits: + # More bits before cut point than after it, so do bit shifting + # on the final bits. + end = self._slice(pos + bits, self.len) + assert self.len - pos > 0 + self._truncateend(self.len - pos) + self._append(end) + return + # More bits after the cut point than before it. + start = self._slice(0, pos) + self._truncatestart(pos + bits) + self._prepend(start) + return + + def _reversebytes(self, start, end): + """Reverse bytes in-place.""" + # Make the start occur on a byte boundary + # TODO: We could be cleverer here to avoid changing the offset. + newoffset = 8 - (start % 8) + if newoffset == 8: + newoffset = 0 + self._datastore = offsetcopy(self._datastore, newoffset) + # Now just reverse the byte data + toreverse = bytearray(self._datastore.getbyteslice((newoffset + start) // 8, (newoffset + end) // 8)) + toreverse.reverse() + self._datastore.setbyteslice((newoffset + start) // 8, (newoffset + end) // 8, toreverse) + + def _set(self, pos): + """Set bit at pos to 1.""" + assert 0 <= pos < self.len + self._datastore.setbit(pos) + + def _unset(self, pos): + """Set bit at pos to 0.""" + assert 0 <= pos < self.len + self._datastore.unsetbit(pos) + + def _invert(self, pos): + """Flip bit at pos 1<->0.""" + assert 0 <= pos < self.len + self._datastore.invertbit(pos) + + def _invert_all(self): + """Invert every bit.""" + set = self._datastore.setbyte + get = self._datastore.getbyte + for p in xrange(self._datastore.byteoffset, self._datastore.byteoffset + self._datastore.bytelength): + set(p, 256 + ~get(p)) + + def _ilshift(self, n): + """Shift bits by n to the left in place. Return self.""" + assert 0 < n <= self.len + self._append(Bits(n)) + self._truncatestart(n) + return self + + def _irshift(self, n): + """Shift bits by n to the right in place. Return self.""" + assert 0 < n <= self.len + self._prepend(Bits(n)) + self._truncateend(n) + return self + + def _imul(self, n): + """Concatenate n copies of self in place. Return self.""" + assert n >= 0 + if not n: + self._clear() + return self + m = 1 + old_len = self.len + while m * 2 < n: + self._append(self) + m *= 2 + self._append(self[0:(n - m) * old_len]) + return self + + def _inplace_logical_helper(self, bs, f): + """Helper function containing most of the __ior__, __iand__, __ixor__ code.""" + # Give the two bitstrings the same offset (modulo 8) + self_byteoffset, self_bitoffset = divmod(self._offset, 8) + bs_byteoffset, bs_bitoffset = divmod(bs._offset, 8) + if bs_bitoffset != self_bitoffset: + if not self_bitoffset: + bs._datastore = offsetcopy(bs._datastore, 0) + else: + self._datastore = offsetcopy(self._datastore, bs_bitoffset) + a = self._datastore.rawbytes + b = bs._datastore.rawbytes + for i in xrange(len(a)): + a[i] = f(a[i + self_byteoffset], b[i + bs_byteoffset]) + return self + + def _ior(self, bs): + return self._inplace_logical_helper(bs, operator.ior) + + def _iand(self, bs): + return self._inplace_logical_helper(bs, operator.iand) + + def _ixor(self, bs): + return self._inplace_logical_helper(bs, operator.xor) + + def _readbits(self, length, start): + """Read some bits from the bitstring and return newly constructed bitstring.""" + return self._slice(start, start + length) + + def _validate_slice(self, start, end): + """Validate start and end and return them as positive bit positions.""" + if start is None: + start = 0 + elif start < 0: + start += self.len + if end is None: + end = self.len + elif end < 0: + end += self.len + if not 0 <= end <= self.len: + raise ValueError("end is not a valid position in the bitstring.") + if not 0 <= start <= self.len: + raise ValueError("start is not a valid position in the bitstring.") + if end < start: + raise ValueError("end must not be less than start.") + return start, end + + def unpack(self, fmt, **kwargs): + """Interpret the whole bitstring using fmt and return list. + + fmt -- A single string or a list of strings with comma separated tokens + describing how to interpret the bits in the bitstring. Items + can also be integers, for reading new bitstring of the given length. + kwargs -- A dictionary or keyword-value pairs - the keywords used in the + format string will be replaced with their given value. + + Raises ValueError if the format is not understood. If not enough bits + are available then all bits to the end of the bitstring will be used. + + See the docstring for 'read' for token examples. + + """ + return self._readlist(fmt, 0, **kwargs)[0] + + def _readlist(self, fmt, pos, **kwargs): + tokens = [] + stretchy_token = None + if isinstance(fmt, basestring): + fmt = [fmt] + # Not very optimal this, but replace integers with 'bits' tokens + # TODO: optimise + for i, f in enumerate(fmt): + if isinstance(f, numbers.Integral): + fmt[i] = "bits:{0}".format(f) + for f_item in fmt: + stretchy, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys()))) + if stretchy: + if stretchy_token: + raise Error("It's not possible to have more than one 'filler' token.") + stretchy_token = stretchy + tokens.extend(tkns) + if not stretchy_token: + lst = [] + for name, length, _ in tokens: + if length in kwargs: + length = kwargs[length] + if name == 'bytes': + length *= 8 + if name in kwargs and length is None: + # Using default 'uint' - the name is really the length. + value, pos = self._readtoken('uint', pos, kwargs[name]) + lst.append(value) + continue + value, pos = self._readtoken(name, pos, length) + if value is not None: # Don't append pad tokens + lst.append(value) + return lst, pos + stretchy_token = False + bits_after_stretchy_token = 0 + for token in tokens: + name, length, _ = token + if length in kwargs: + length = kwargs[length] + if name == 'bytes': + length *= 8 + if name in kwargs and length is None: + # Default 'uint'. + length = kwargs[name] + if stretchy_token: + if name in ('se', 'ue', 'sie', 'uie'): + raise Error("It's not possible to parse a variable" + "length token after a 'filler' token.") + else: + if length is None: + raise Error("It's not possible to have more than " + "one 'filler' token.") + bits_after_stretchy_token += length + if length is None and name not in ('se', 'ue', 'sie', 'uie'): + assert not stretchy_token + stretchy_token = token + bits_left = self.len - pos + return_values = [] + for token in tokens: + name, length, _ = token + if token is stretchy_token: + # Set length to the remaining bits + length = max(bits_left - bits_after_stretchy_token, 0) + if length in kwargs: + length = kwargs[length] + if name == 'bytes': + length *= 8 + if name in kwargs and length is None: + # Default 'uint' + length = kwargs[name] + if length is not None: + bits_left -= length + value, pos = self._readtoken(name, pos, length) + if value is not None: + return_values.append(value) + return return_values, pos + + def _findbytes(self, bytes_, start, end, bytealigned): + """Quicker version of find when everything's whole byte + and byte aligned. + + """ + assert self._datastore.offset == 0 + assert bytealigned is True + # Extract data bytes from bitstring to be found. + bytepos = (start + 7) // 8 + found = False + p = bytepos + finalpos = end // 8 + increment = max(1024, len(bytes_) * 10) + buffersize = increment + len(bytes_) + while p < finalpos: + # Read in file or from memory in overlapping chunks and search the chunks. + buf = bytearray(self._datastore.getbyteslice(p, min(p + buffersize, finalpos))) + pos = buf.find(bytes_) + if pos != -1: + found = True + p += pos + break + p += increment + if not found: + return () + return (p * 8,) + + def _findregex(self, reg_ex, start, end, bytealigned): + """Find first occurrence of a compiled regular expression. + + Note that this doesn't support arbitrary regexes, in particular they + must match a known length. + + """ + p = start + length = len(reg_ex.pattern) + # We grab overlapping chunks of the binary representation and + # do an ordinary string search within that. + increment = max(4096, length * 10) + buffersize = increment + length + while p < end: + buf = self._readbin(min(buffersize, end - p), p) + # Test using regular expressions... + m = reg_ex.search(buf) + if m: + pos = m.start() + # pos = buf.find(targetbin) + # if pos != -1: + # if bytealigned then we only accept byte aligned positions. + if not bytealigned or (p + pos) % 8 == 0: + return (p + pos,) + if bytealigned: + # Advance to just beyond the non-byte-aligned match and try again... + p += pos + 1 + continue + p += increment + # Not found, return empty tuple + return () + + def find(self, bs, start=None, end=None, bytealigned=None): + """Find first occurrence of substring bs. + + Returns a single item tuple with the bit position if found, or an + empty tuple if not found. The bit position (pos property) will + also be set to the start of the substring if it is found. + + bs -- The bitstring to find. + start -- The bit position to start the search. Defaults to 0. + end -- The bit position one past the last bit to search. + Defaults to self.len. + bytealigned -- If True the bitstring will only be + found on byte boundaries. + + Raises ValueError if bs is empty, if start < 0, if end > self.len or + if end < start. + + >>> BitArray('0xc3e').find('0b1111') + (6,) + + """ + bs = Bits(bs) + if not bs.len: + raise ValueError("Cannot find an empty bitstring.") + start, end = self._validate_slice(start, end) + if bytealigned is None: + bytealigned = globals()['bytealigned'] + if bytealigned and not bs.len % 8 and not self._datastore.offset: + p = self._findbytes(bs.bytes, start, end, bytealigned) + else: + p = self._findregex(re.compile(bs._getbin()), start, end, bytealigned) + # If called from a class that has a pos, set it + try: + self._pos = p[0] + except (AttributeError, IndexError): + pass + return p + + def findall(self, bs, start=None, end=None, count=None, bytealigned=None): + """Find all occurrences of bs. Return generator of bit positions. + + bs -- The bitstring to find. + start -- The bit position to start the search. Defaults to 0. + end -- The bit position one past the last bit to search. + Defaults to self.len. + count -- The maximum number of occurrences to find. + bytealigned -- If True the bitstring will only be found on + byte boundaries. + + Raises ValueError if bs is empty, if start < 0, if end > self.len or + if end < start. + + Note that all occurrences of bs are found, even if they overlap. + + """ + if count is not None and count < 0: + raise ValueError("In findall, count must be >= 0.") + bs = Bits(bs) + start, end = self._validate_slice(start, end) + if bytealigned is None: + bytealigned = globals()['bytealigned'] + c = 0 + if bytealigned and not bs.len % 8 and not self._datastore.offset: + # Use the quick find method + f = self._findbytes + x = bs._getbytes() + else: + f = self._findregex + x = re.compile(bs._getbin()) + while True: + + p = f(x, start, end, bytealigned) + if not p: + break + if count is not None and c >= count: + return + c += 1 + try: + self._pos = p[0] + except AttributeError: + pass + yield p[0] + if bytealigned: + start = p[0] + 8 + else: + start = p[0] + 1 + if start >= end: + break + return + + def rfind(self, bs, start=None, end=None, bytealigned=None): + """Find final occurrence of substring bs. + + Returns a single item tuple with the bit position if found, or an + empty tuple if not found. The bit position (pos property) will + also be set to the start of the substring if it is found. + + bs -- The bitstring to find. + start -- The bit position to end the reverse search. Defaults to 0. + end -- The bit position one past the first bit to reverse search. + Defaults to self.len. + bytealigned -- If True the bitstring will only be found on byte + boundaries. + + Raises ValueError if bs is empty, if start < 0, if end > self.len or + if end < start. + + """ + bs = Bits(bs) + start, end = self._validate_slice(start, end) + if bytealigned is None: + bytealigned = globals()['bytealigned'] + if not bs.len: + raise ValueError("Cannot find an empty bitstring.") + # Search chunks starting near the end and then moving back + # until we find bs. + increment = max(8192, bs.len * 80) + buffersize = min(increment + bs.len, end - start) + pos = max(start, end - buffersize) + while True: + found = list(self.findall(bs, start=pos, end=pos + buffersize, + bytealigned=bytealigned)) + if not found: + if pos == start: + return () + pos = max(start, pos - increment) + continue + return (found[-1],) + + def cut(self, bits, start=None, end=None, count=None): + """Return bitstring generator by cutting into bits sized chunks. + + bits -- The size in bits of the bitstring chunks to generate. + start -- The bit position to start the first cut. Defaults to 0. + end -- The bit position one past the last bit to use in the cut. + Defaults to self.len. + count -- If specified then at most count items are generated. + Default is to cut as many times as possible. + + """ + start, end = self._validate_slice(start, end) + if count is not None and count < 0: + raise ValueError("Cannot cut - count must be >= 0.") + if bits <= 0: + raise ValueError("Cannot cut - bits must be >= 0.") + c = 0 + while count is None or c < count: + c += 1 + nextchunk = self._slice(start, min(start + bits, end)) + if nextchunk.len != bits: + return + assert nextchunk._assertsanity() + yield nextchunk + start += bits + return + + def split(self, delimiter, start=None, end=None, count=None, + bytealigned=None): + """Return bitstring generator by splittling using a delimiter. + + The first item returned is the initial bitstring before the delimiter, + which may be an empty bitstring. + + delimiter -- The bitstring used as the divider. + start -- The bit position to start the split. Defaults to 0. + end -- The bit position one past the last bit to use in the split. + Defaults to self.len. + count -- If specified then at most count items are generated. + Default is to split as many times as possible. + bytealigned -- If True splits will only occur on byte boundaries. + + Raises ValueError if the delimiter is empty. + + """ + delimiter = Bits(delimiter) + if not delimiter.len: + raise ValueError("split delimiter cannot be empty.") + start, end = self._validate_slice(start, end) + if bytealigned is None: + bytealigned = globals()['bytealigned'] + if count is not None and count < 0: + raise ValueError("Cannot split - count must be >= 0.") + if count == 0: + return + if bytealigned and not delimiter.len % 8 and not self._datastore.offset: + # Use the quick find method + f = self._findbytes + x = delimiter._getbytes() + else: + f = self._findregex + x = re.compile(delimiter._getbin()) + found = f(x, start, end, bytealigned) + if not found: + # Initial bits are the whole bitstring being searched + yield self._slice(start, end) + return + # yield the bytes before the first occurrence of the delimiter, even if empty + yield self._slice(start, found[0]) + startpos = pos = found[0] + c = 1 + while count is None or c < count: + pos += delimiter.len + found = f(x, pos, end, bytealigned) + if not found: + # No more occurrences, so return the rest of the bitstring + yield self._slice(startpos, end) + return + c += 1 + yield self._slice(startpos, found[0]) + startpos = pos = found[0] + # Have generated count bitstrings, so time to quit. + return + + def join(self, sequence): + """Return concatenation of bitstrings joined by self. + + sequence -- A sequence of bitstrings. + + """ + s = self.__class__() + i = iter(sequence) + try: + s._append(Bits(next(i))) + while True: + n = next(i) + s._append(self) + s._append(Bits(n)) + except StopIteration: + pass + return s + + def tobytes(self): + """Return the bitstring as bytes, padding with zero bits if needed. + + Up to seven zero bits will be added at the end to byte align. + + """ + d = offsetcopy(self._datastore, 0).rawbytes + # Need to ensure that unused bits at end are set to zero + unusedbits = 8 - self.len % 8 + if unusedbits != 8: + d[-1] &= (0xff << unusedbits) + return bytes(d) + + def tofile(self, f): + """Write the bitstring to a file object, padding with zero bits if needed. + + Up to seven zero bits will be added at the end to byte align. + + """ + # If the bitstring is file based then we don't want to read it all + # in to memory. + chunksize = 1024 * 1024 # 1 MB chunks + if not self._offset: + a = 0 + bytelen = self._datastore.bytelength + p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1)) + while len(p) == chunksize: + f.write(p) + a += chunksize + p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1)) + f.write(p) + # Now the final byte, ensuring that unused bits at end are set to 0. + bits_in_final_byte = self.len % 8 + if not bits_in_final_byte: + bits_in_final_byte = 8 + f.write(self[-bits_in_final_byte:].tobytes()) + else: + # Really quite inefficient... + a = 0 + b = a + chunksize * 8 + while b <= self.len: + f.write(self._slice(a, b)._getbytes()) + a += chunksize * 8 + b += chunksize * 8 + if a != self.len: + f.write(self._slice(a, self.len).tobytes()) + + def startswith(self, prefix, start=None, end=None): + """Return whether the current bitstring starts with prefix. + + prefix -- The bitstring to search for. + start -- The bit position to start from. Defaults to 0. + end -- The bit position to end at. Defaults to self.len. + + """ + prefix = Bits(prefix) + start, end = self._validate_slice(start, end) + if end < start + prefix.len: + return False + end = start + prefix.len + return self._slice(start, end) == prefix + + def endswith(self, suffix, start=None, end=None): + """Return whether the current bitstring ends with suffix. + + suffix -- The bitstring to search for. + start -- The bit position to start from. Defaults to 0. + end -- The bit position to end at. Defaults to self.len. + + """ + suffix = Bits(suffix) + start, end = self._validate_slice(start, end) + if start + suffix.len > end: + return False + start = end - suffix.len + return self._slice(start, end) == suffix + + def all(self, value, pos=None): + """Return True if one or many bits are all set to value. + + value -- If value is True then checks for bits set to 1, otherwise + checks for bits set to 0. + pos -- An iterable of bit positions. Negative numbers are treated in + the same way as slice indices. Defaults to the whole bitstring. + + """ + value = bool(value) + length = self.len + if pos is None: + pos = xrange(self.len) + for p in pos: + if p < 0: + p += length + if not 0 <= p < length: + raise IndexError("Bit position {0} out of range.".format(p)) + if not self._datastore.getbit(p) is value: + return False + return True + + def any(self, value, pos=None): + """Return True if any of one or many bits are set to value. + + value -- If value is True then checks for bits set to 1, otherwise + checks for bits set to 0. + pos -- An iterable of bit positions. Negative numbers are treated in + the same way as slice indices. Defaults to the whole bitstring. + + """ + value = bool(value) + length = self.len + if pos is None: + pos = xrange(self.len) + for p in pos: + if p < 0: + p += length + if not 0 <= p < length: + raise IndexError("Bit position {0} out of range.".format(p)) + if self._datastore.getbit(p) is value: + return True + return False + + def count(self, value): + """Return count of total number of either zero or one bits. + + value -- If True then bits set to 1 are counted, otherwise bits set + to 0 are counted. + + >>> Bits('0xef').count(1) + 7 + + """ + if not self.len: + return 0 + # count the number of 1s (from which it's easy to work out the 0s). + # Don't count the final byte yet. + count = sum(BIT_COUNT[self._datastore.getbyte(i)] for i in xrange(self._datastore.bytelength - 1)) + # adjust for bits at start that aren't part of the bitstring + if self._offset: + count -= BIT_COUNT[self._datastore.getbyte(0) >> (8 - self._offset)] + # and count the last 1 - 8 bits at the end. + endbits = self._datastore.bytelength * 8 - (self._offset + self.len) + count += BIT_COUNT[self._datastore.getbyte(self._datastore.bytelength - 1) >> endbits] + return count if value else self.len - count + + # Create native-endian functions as aliases depending on the byteorder + if byteorder == 'little': + _setfloatne = _setfloatle + _readfloatne = _readfloatle + _getfloatne = _getfloatle + _setuintne = _setuintle + _readuintne = _readuintle + _getuintne = _getuintle + _setintne = _setintle + _readintne = _readintle + _getintne = _getintle + else: + _setfloatne = _setfloat + _readfloatne = _readfloat + _getfloatne = _getfloat + _setuintne = _setuintbe + _readuintne = _readuintbe + _getuintne = _getuintbe + _setintne = _setintbe + _readintne = _readintbe + _getintne = _getintbe + + _offset = property(_getoffset) + + len = property(_getlength, + doc="""The length of the bitstring in bits. Read only. + """) + length = property(_getlength, + doc="""The length of the bitstring in bits. Read only. + """) + bool = property(_getbool, + doc="""The bitstring as a bool (True or False). Read only. + """) + hex = property(_gethex, + doc="""The bitstring as a hexadecimal string. Read only. + """) + bin = property(_getbin, + doc="""The bitstring as a binary string. Read only. + """) + oct = property(_getoct, + doc="""The bitstring as an octal string. Read only. + """) + bytes = property(_getbytes, + doc="""The bitstring as a bytes object. Read only. + """) + int = property(_getint, + doc="""The bitstring as a two's complement signed int. Read only. + """) + uint = property(_getuint, + doc="""The bitstring as a two's complement unsigned int. Read only. + """) + float = property(_getfloat, + doc="""The bitstring as a floating point number. Read only. + """) + intbe = property(_getintbe, + doc="""The bitstring as a two's complement big-endian signed int. Read only. + """) + uintbe = property(_getuintbe, + doc="""The bitstring as a two's complement big-endian unsigned int. Read only. + """) + floatbe = property(_getfloat, + doc="""The bitstring as a big-endian floating point number. Read only. + """) + intle = property(_getintle, + doc="""The bitstring as a two's complement little-endian signed int. Read only. + """) + uintle = property(_getuintle, + doc="""The bitstring as a two's complement little-endian unsigned int. Read only. + """) + floatle = property(_getfloatle, + doc="""The bitstring as a little-endian floating point number. Read only. + """) + intne = property(_getintne, + doc="""The bitstring as a two's complement native-endian signed int. Read only. + """) + uintne = property(_getuintne, + doc="""The bitstring as a two's complement native-endian unsigned int. Read only. + """) + floatne = property(_getfloatne, + doc="""The bitstring as a native-endian floating point number. Read only. + """) + ue = property(_getue, + doc="""The bitstring as an unsigned exponential-Golomb code. Read only. + """) + se = property(_getse, + doc="""The bitstring as a signed exponential-Golomb code. Read only. + """) + uie = property(_getuie, + doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read only. + """) + sie = property(_getsie, + doc="""The bitstring as a signed interleaved exponential-Golomb code. Read only. + """) + + +# Dictionary that maps token names to the function that reads them. +name_to_read = {'uint': Bits._readuint, + 'uintle': Bits._readuintle, + 'uintbe': Bits._readuintbe, + 'uintne': Bits._readuintne, + 'int': Bits._readint, + 'intle': Bits._readintle, + 'intbe': Bits._readintbe, + 'intne': Bits._readintne, + 'float': Bits._readfloat, + 'floatbe': Bits._readfloat, # floatbe is a synonym for float + 'floatle': Bits._readfloatle, + 'floatne': Bits._readfloatne, + 'hex': Bits._readhex, + 'oct': Bits._readoct, + 'bin': Bits._readbin, + 'bits': Bits._readbits, + 'bytes': Bits._readbytes, + 'ue': Bits._readue, + 'se': Bits._readse, + 'uie': Bits._readuie, + 'sie': Bits._readsie, + 'bool': Bits._readbool, + } + +# Dictionaries for mapping init keywords with init functions. +init_with_length_and_offset = {'bytes': Bits._setbytes_safe, + 'filename': Bits._setfile, + } + +init_with_length_only = {'uint': Bits._setuint, + 'int': Bits._setint, + 'float': Bits._setfloat, + 'uintbe': Bits._setuintbe, + 'intbe': Bits._setintbe, + 'floatbe': Bits._setfloat, + 'uintle': Bits._setuintle, + 'intle': Bits._setintle, + 'floatle': Bits._setfloatle, + 'uintne': Bits._setuintne, + 'intne': Bits._setintne, + 'floatne': Bits._setfloatne, + } + +init_without_length_or_offset = {'bin': Bits._setbin_safe, + 'hex': Bits._sethex, + 'oct': Bits._setoct, + 'ue': Bits._setue, + 'se': Bits._setse, + 'uie': Bits._setuie, + 'sie': Bits._setsie, + 'bool': Bits._setbool, + } + + +class BitArray(Bits): + """A container holding a mutable sequence of bits. + + Subclass of the immutable Bits class. Inherits all of its + methods (except __hash__) and adds mutating methods. + + Mutating methods: + + append() -- Append a bitstring. + byteswap() -- Change byte endianness in-place. + insert() -- Insert a bitstring. + invert() -- Flip bit(s) between one and zero. + overwrite() -- Overwrite a section with a new bitstring. + prepend() -- Prepend a bitstring. + replace() -- Replace occurrences of one bitstring with another. + reverse() -- Reverse bits in-place. + rol() -- Rotate bits to the left. + ror() -- Rotate bits to the right. + set() -- Set bit(s) to 1 or 0. + + Methods inherited from Bits: + + all() -- Check if all specified bits are set to 1 or 0. + any() -- Check if any of specified bits are set to 1 or 0. + count() -- Count the number of bits set to 1 or 0. + cut() -- Create generator of constant sized chunks. + endswith() -- Return whether the bitstring ends with a sub-string. + find() -- Find a sub-bitstring in the current bitstring. + findall() -- Find all occurrences of a sub-bitstring in the current bitstring. + join() -- Join bitstrings together using current bitstring. + rfind() -- Seek backwards to find a sub-bitstring. + split() -- Create generator of chunks split by a delimiter. + startswith() -- Return whether the bitstring starts with a sub-bitstring. + tobytes() -- Return bitstring as bytes, padding if needed. + tofile() -- Write bitstring to file, padding if needed. + unpack() -- Interpret bits using format string. + + Special methods: + + Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^= + in addition to the inherited [], ==, !=, +, *, ~, <<, >>, &, | and ^. + + Properties: + + bin -- The bitstring as a binary string. + bool -- For single bit bitstrings, interpret as True or False. + bytepos -- The current byte position in the bitstring. + bytes -- The bitstring as a bytes object. + float -- Interpret as a floating point number. + floatbe -- Interpret as a big-endian floating point number. + floatle -- Interpret as a little-endian floating point number. + floatne -- Interpret as a native-endian floating point number. + hex -- The bitstring as a hexadecimal string. + int -- Interpret as a two's complement signed integer. + intbe -- Interpret as a big-endian signed integer. + intle -- Interpret as a little-endian signed integer. + intne -- Interpret as a native-endian signed integer. + len -- Length of the bitstring in bits. + oct -- The bitstring as an octal string. + pos -- The current bit position in the bitstring. + se -- Interpret as a signed exponential-Golomb code. + ue -- Interpret as an unsigned exponential-Golomb code. + sie -- Interpret as a signed interleaved exponential-Golomb code. + uie -- Interpret as an unsigned interleaved exponential-Golomb code. + uint -- Interpret as a two's complement unsigned integer. + uintbe -- Interpret as a big-endian unsigned integer. + uintle -- Interpret as a little-endian unsigned integer. + uintne -- Interpret as a native-endian unsigned integer. + + """ + + __slots__ = () + + # As BitArray objects are mutable, we shouldn't allow them to be hashed. + __hash__ = None + + def __init__(self, auto=None, length=None, offset=None, **kwargs): + """Either specify an 'auto' initialiser: + auto -- a string of comma separated tokens, an integer, a file object, + a bytearray, a boolean iterable or another bitstring. + + Or initialise via **kwargs with one (and only one) of: + bytes -- raw data as a string, for example read from a binary file. + bin -- binary string representation, e.g. '0b001010'. + hex -- hexadecimal string representation, e.g. '0x2ef' + oct -- octal string representation, e.g. '0o777'. + uint -- an unsigned integer. + int -- a signed integer. + float -- a floating point number. + uintbe -- an unsigned big-endian whole byte integer. + intbe -- a signed big-endian whole byte integer. + floatbe - a big-endian floating point number. + uintle -- an unsigned little-endian whole byte integer. + intle -- a signed little-endian whole byte integer. + floatle -- a little-endian floating point number. + uintne -- an unsigned native-endian whole byte integer. + intne -- a signed native-endian whole byte integer. + floatne -- a native-endian floating point number. + se -- a signed exponential-Golomb code. + ue -- an unsigned exponential-Golomb code. + sie -- a signed interleaved exponential-Golomb code. + uie -- an unsigned interleaved exponential-Golomb code. + bool -- a boolean (True or False). + filename -- a file which will be opened in binary read-only mode. + + Other keyword arguments: + length -- length of the bitstring in bits, if needed and appropriate. + It must be supplied for all integer and float initialisers. + offset -- bit offset to the data. These offset bits are + ignored and this is intended for use when + initialising using 'bytes' or 'filename'. + + """ + # For mutable BitArrays we always read in files to memory: + if not isinstance(self._datastore, ByteStore): + self._ensureinmemory() + + def __new__(cls, auto=None, length=None, offset=None, **kwargs): + x = super(BitArray, cls).__new__(cls) + y = Bits.__new__(BitArray, auto, length, offset, **kwargs) + x._datastore = y._datastore + return x + + def __iadd__(self, bs): + """Append bs to current bitstring. Return self. + + bs -- the bitstring to append. + + """ + self.append(bs) + return self + + def __copy__(self): + """Return a new copy of the BitArray.""" + s_copy = BitArray() + if not isinstance(self._datastore, ByteStore): + # Let them both point to the same (invariant) array. + # If either gets modified then at that point they'll be read into memory. + s_copy._datastore = self._datastore + else: + s_copy._datastore = copy.copy(self._datastore) + return s_copy + + def __setitem__(self, key, value): + """Set item or range to new value. + + Indices are in units of the step parameter (default 1 bit). + Stepping is used to specify the number of bits in each item. + + If the length of the bitstring is changed then pos will be moved + to after the inserted section, otherwise it will remain unchanged. + + >>> s = BitArray('0xff') + >>> s[0:1:4] = '0xe' + >>> print s + '0xef' + >>> s[4:4] = '0x00' + >>> print s + '0xe00f' + + """ + try: + # A slice + start, step = 0, 1 + if key.step is not None: + step = key.step + except AttributeError: + # single element + if key < 0: + key += self.len + if not 0 <= key < self.len: + raise IndexError("Slice index out of range.") + if isinstance(value, numbers.Integral): + if not value: + self._unset(key) + return + if value in (1, -1): + self._set(key) + return + raise ValueError("Cannot set a single bit with integer {0}.".format(value)) + value = Bits(value) + if value.len == 1: + # TODO: this can't be optimal + if value[0]: + self._set(key) + else: + self._unset(key) + else: + self._delete(1, key) + self._insert(value, key) + return + else: + if step != 1: + # convert to binary string and use string slicing + # TODO: Horribly inefficent + temp = list(self._getbin()) + v = list(Bits(value)._getbin()) + temp.__setitem__(key, v) + self._setbin_unsafe(''.join(temp)) + return + + # If value is an integer then we want to set the slice to that + # value rather than initialise a new bitstring of that length. + if not isinstance(value, numbers.Integral): + try: + # TODO: Better way than calling constructor here? + value = Bits(value) + except TypeError: + raise TypeError("Bitstring, integer or string expected. " + "Got {0}.".format(type(value))) + if key.start is not None: + start = key.start + if key.start < 0: + start += self.len + if start < 0: + start = 0 + stop = self.len + if key.stop is not None: + stop = key.stop + if key.stop < 0: + stop += self.len + if start > stop: + # The standard behaviour for lists is to just insert at the + # start position if stop < start and step == 1. + stop = start + if isinstance(value, numbers.Integral): + if value >= 0: + value = self.__class__(uint=value, length=stop - start) + else: + value = self.__class__(int=value, length=stop - start) + stop = min(stop, self.len) + start = max(start, 0) + start = min(start, stop) + if (stop - start) == value.len: + if not value.len: + return + if step >= 0: + self._overwrite(value, start) + else: + self._overwrite(value.__getitem__(slice(None, None, 1)), start) + else: + # TODO: A delete then insert is wasteful - it could do unneeded shifts. + # Could be either overwrite + insert or overwrite + delete. + self._delete(stop - start, start) + if step >= 0: + self._insert(value, start) + else: + self._insert(value.__getitem__(slice(None, None, 1)), start) + # pos is now after the inserted piece. + return + + def __delitem__(self, key): + """Delete item or range. + + Indices are in units of the step parameter (default 1 bit). + Stepping is used to specify the number of bits in each item. + + >>> a = BitArray('0x001122') + >>> del a[1:2:8] + >>> print a + 0x0022 + + """ + try: + # A slice + start = 0 + step = key.step if key.step is not None else 1 + except AttributeError: + # single element + if key < 0: + key += self.len + if not 0 <= key < self.len: + raise IndexError("Slice index out of range.") + self._delete(1, key) + return + else: + if step != 1: + # convert to binary string and use string slicing + # TODO: Horribly inefficent + temp = list(self._getbin()) + temp.__delitem__(key) + self._setbin_unsafe(''.join(temp)) + return + stop = key.stop + if key.start is not None: + start = key.start + if key.start < 0 and stop is None: + start += self.len + if start < 0: + start = 0 + if stop is None: + stop = self.len + if start > stop: + return + stop = min(stop, self.len) + start = max(start, 0) + start = min(start, stop) + self._delete(stop - start, start) + return + + def __ilshift__(self, n): + """Shift bits by n to the left in place. Return self. + + n -- the number of bits to shift. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot shift by a negative amount.") + if not self.len: + raise ValueError("Cannot shift an empty bitstring.") + if not n: + return self + n = min(n, self.len) + return self._ilshift(n) + + def __irshift__(self, n): + """Shift bits by n to the right in place. Return self. + + n -- the number of bits to shift. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot shift by a negative amount.") + if not self.len: + raise ValueError("Cannot shift an empty bitstring.") + if not n: + return self + n = min(n, self.len) + return self._irshift(n) + + def __imul__(self, n): + """Concatenate n copies of self in place. Return self. + + Called for expressions of the form 'a *= 3'. + n -- The number of concatenations. Must be >= 0. + + """ + if n < 0: + raise ValueError("Cannot multiply by a negative integer.") + return self._imul(n) + + def __ior__(self, bs): + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for |= operator.") + return self._ior(bs) + + def __iand__(self, bs): + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for &= operator.") + return self._iand(bs) + + def __ixor__(self, bs): + bs = Bits(bs) + if self.len != bs.len: + raise ValueError("Bitstrings must have the same length " + "for ^= operator.") + return self._ixor(bs) + + def replace(self, old, new, start=None, end=None, count=None, + bytealigned=None): + """Replace all occurrences of old with new in place. + + Returns number of replacements made. + + old -- The bitstring to replace. + new -- The replacement bitstring. + start -- Any occurrences that start before this will not be replaced. + Defaults to 0. + end -- Any occurrences that finish after this will not be replaced. + Defaults to self.len. + count -- The maximum number of replacements to make. Defaults to + replace all occurrences. + bytealigned -- If True replacements will only be made on byte + boundaries. + + Raises ValueError if old is empty or if start or end are + out of range. + + """ + old = Bits(old) + new = Bits(new) + if not old.len: + raise ValueError("Empty bitstring cannot be replaced.") + start, end = self._validate_slice(start, end) + if bytealigned is None: + bytealigned = globals()['bytealigned'] + # Adjust count for use in split() + if count is not None: + count += 1 + sections = self.split(old, start, end, count, bytealigned) + lengths = [s.len for s in sections] + if len(lengths) == 1: + # Didn't find anything to replace. + return 0 # no replacements done + if new is self: + # Prevent self assignment woes + new = copy.copy(self) + positions = [lengths[0] + start] + for l in lengths[1:-1]: + # Next position is the previous one plus the length of the next section. + positions.append(positions[-1] + l) + # We have all the positions that need replacements. We do them + # in reverse order so that they won't move around as we replace. + positions.reverse() + try: + # Need to calculate new pos, if this is a bitstream + newpos = self._pos + for p in positions: + self[p:p + old.len] = new + if old.len != new.len: + diff = new.len - old.len + for p in positions: + if p >= newpos: + continue + if p + old.len <= newpos: + newpos += diff + else: + newpos = p + self._pos = newpos + except AttributeError: + for p in positions: + self[p:p + old.len] = new + assert self._assertsanity() + return len(lengths) - 1 + + def insert(self, bs, pos=None): + """Insert bs at bit position pos. + + bs -- The bitstring to insert. + pos -- The bit position to insert at. + + Raises ValueError if pos < 0 or pos > self.len. + + """ + bs = Bits(bs) + if not bs.len: + return self + if bs is self: + bs = self.__copy__() + if pos is None: + try: + pos = self._pos + except AttributeError: + raise TypeError("insert require a bit position for this type.") + if pos < 0: + pos += self.len + if not 0 <= pos <= self.len: + raise ValueError("Invalid insert position.") + self._insert(bs, pos) + + def overwrite(self, bs, pos=None): + """Overwrite with bs at bit position pos. + + bs -- The bitstring to overwrite with. + pos -- The bit position to begin overwriting from. + + Raises ValueError if pos < 0 or pos + bs.len > self.len + + """ + bs = Bits(bs) + if not bs.len: + return + if pos is None: + try: + pos = self._pos + except AttributeError: + raise TypeError("overwrite require a bit position for this type.") + if pos < 0: + pos += self.len + if pos < 0 or pos + bs.len > self.len: + raise ValueError("Overwrite exceeds boundary of bitstring.") + self._overwrite(bs, pos) + try: + self._pos = pos + bs.len + except AttributeError: + pass + + def append(self, bs): + """Append a bitstring to the current bitstring. + + bs -- The bitstring to append. + + """ + # The offset is a hint to make bs easily appendable. + bs = self._converttobitstring(bs, offset=(self.len + self._offset) % 8) + self._append(bs) + + def prepend(self, bs): + """Prepend a bitstring to the current bitstring. + + bs -- The bitstring to prepend. + + """ + bs = Bits(bs) + self._prepend(bs) + + def reverse(self, start=None, end=None): + """Reverse bits in-place. + + start -- Position of first bit to reverse. Defaults to 0. + end -- One past the position of the last bit to reverse. + Defaults to self.len. + + Using on an empty bitstring will have no effect. + + Raises ValueError if start < 0, end > self.len or end < start. + + """ + start, end = self._validate_slice(start, end) + if start == 0 and end == self.len: + self._reverse() + return + s = self._slice(start, end) + s._reverse() + self[start:end] = s + + def set(self, value, pos=None): + """Set one or many bits to 1 or 0. + + value -- If True bits are set to 1, otherwise they are set to 0. + pos -- Either a single bit position or an iterable of bit positions. + Negative numbers are treated in the same way as slice indices. + Defaults to the entire bitstring. + + Raises IndexError if pos < -self.len or pos >= self.len. + + """ + f = self._set if value else self._unset + if pos is None: + pos = xrange(self.len) + try: + length = self.len + for p in pos: + if p < 0: + p += length + if not 0 <= p < length: + raise IndexError("Bit position {0} out of range.".format(p)) + f(p) + except TypeError: + # Single pos + if pos < 0: + pos += self.len + if not 0 <= pos < length: + raise IndexError("Bit position {0} out of range.".format(pos)) + f(pos) + + def invert(self, pos=None): + """Invert one or many bits from 0 to 1 or vice versa. + + pos -- Either a single bit position or an iterable of bit positions. + Negative numbers are treated in the same way as slice indices. + + Raises IndexError if pos < -self.len or pos >= self.len. + + """ + if pos is None: + self._invert_all() + return + if not isinstance(pos, collections.Iterable): + pos = (pos,) + length = self.len + + for p in pos: + if p < 0: + p += length + if not 0 <= p < length: + raise IndexError("Bit position {0} out of range.".format(p)) + self._invert(p) + + def ror(self, bits, start=None, end=None): + """Rotate bits to the right in-place. + + bits -- The number of bits to rotate by. + start -- Start of slice to rotate. Defaults to 0. + end -- End of slice to rotate. Defaults to self.len. + + Raises ValueError if bits < 0. + + """ + if not self.len: + raise Error("Cannot rotate an empty bitstring.") + if bits < 0: + raise ValueError("Cannot rotate right by negative amount.") + start, end = self._validate_slice(start, end) + bits %= (end - start) + if not bits: + return + rhs = self._slice(end - bits, end) + self._delete(bits, end - bits) + self._insert(rhs, start) + + def rol(self, bits, start=None, end=None): + """Rotate bits to the left in-place. + + bits -- The number of bits to rotate by. + start -- Start of slice to rotate. Defaults to 0. + end -- End of slice to rotate. Defaults to self.len. + + Raises ValueError if bits < 0. + + """ + if not self.len: + raise Error("Cannot rotate an empty bitstring.") + if bits < 0: + raise ValueError("Cannot rotate left by negative amount.") + start, end = self._validate_slice(start, end) + bits %= (end - start) + if not bits: + return + lhs = self._slice(start, start + bits) + self._delete(bits, start) + self._insert(lhs, end - bits) + + def byteswap(self, fmt=None, start=None, end=None, repeat=True): + """Change the endianness in-place. Return number of repeats of fmt done. + + fmt -- A compact structure string, an integer number of bytes or + an iterable of integers. Defaults to 0, which byte reverses the + whole bitstring. + start -- Start bit position, defaults to 0. + end -- End bit position, defaults to self.len. + repeat -- If True (the default) the byte swapping pattern is repeated + as much as possible. + + """ + start, end = self._validate_slice(start, end) + if fmt is None or fmt == 0: + # reverse all of the whole bytes. + bytesizes = [(end - start) // 8] + elif isinstance(fmt, numbers.Integral): + if fmt < 0: + raise ValueError("Improper byte length {0}.".format(fmt)) + bytesizes = [fmt] + elif isinstance(fmt, basestring): + m = STRUCT_PACK_RE.match(fmt) + if not m: + raise ValueError("Cannot parse format string {0}.".format(fmt)) + # Split the format string into a list of 'q', '4h' etc. + formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt')) + # Now deal with multiplicative factors, 4h -> hhhh etc. + bytesizes = [] + for f in formatlist: + if len(f) == 1: + bytesizes.append(PACK_CODE_SIZE[f]) + else: + bytesizes.extend([PACK_CODE_SIZE[f[-1]]] * int(f[:-1])) + elif isinstance(fmt, collections.Iterable): + bytesizes = fmt + for bytesize in bytesizes: + if not isinstance(bytesize, numbers.Integral) or bytesize < 0: + raise ValueError("Improper byte length {0}.".format(bytesize)) + else: + raise TypeError("Format must be an integer, string or iterable.") + + repeats = 0 + totalbitsize = 8 * sum(bytesizes) + if not totalbitsize: + return 0 + if repeat: + # Try to repeat up to the end of the bitstring. + finalbit = end + else: + # Just try one (set of) byteswap(s). + finalbit = start + totalbitsize + for patternend in xrange(start + totalbitsize, finalbit + 1, totalbitsize): + bytestart = patternend - totalbitsize + for bytesize in bytesizes: + byteend = bytestart + bytesize * 8 + self._reversebytes(bytestart, byteend) + bytestart += bytesize * 8 + repeats += 1 + return repeats + + def clear(self): + """Remove all bits, reset to zero length.""" + self._clear() + + def copy(self): + """Return a copy of the bitstring.""" + return self._copy() + + int = property(Bits._getint, Bits._setint, + doc="""The bitstring as a two's complement signed int. Read and write. + """) + uint = property(Bits._getuint, Bits._setuint, + doc="""The bitstring as a two's complement unsigned int. Read and write. + """) + float = property(Bits._getfloat, Bits._setfloat, + doc="""The bitstring as a floating point number. Read and write. + """) + intbe = property(Bits._getintbe, Bits._setintbe, + doc="""The bitstring as a two's complement big-endian signed int. Read and write. + """) + uintbe = property(Bits._getuintbe, Bits._setuintbe, + doc="""The bitstring as a two's complement big-endian unsigned int. Read and write. + """) + floatbe = property(Bits._getfloat, Bits._setfloat, + doc="""The bitstring as a big-endian floating point number. Read and write. + """) + intle = property(Bits._getintle, Bits._setintle, + doc="""The bitstring as a two's complement little-endian signed int. Read and write. + """) + uintle = property(Bits._getuintle, Bits._setuintle, + doc="""The bitstring as a two's complement little-endian unsigned int. Read and write. + """) + floatle = property(Bits._getfloatle, Bits._setfloatle, + doc="""The bitstring as a little-endian floating point number. Read and write. + """) + intne = property(Bits._getintne, Bits._setintne, + doc="""The bitstring as a two's complement native-endian signed int. Read and write. + """) + uintne = property(Bits._getuintne, Bits._setuintne, + doc="""The bitstring as a two's complement native-endian unsigned int. Read and write. + """) + floatne = property(Bits._getfloatne, Bits._setfloatne, + doc="""The bitstring as a native-endian floating point number. Read and write. + """) + ue = property(Bits._getue, Bits._setue, + doc="""The bitstring as an unsigned exponential-Golomb code. Read and write. + """) + se = property(Bits._getse, Bits._setse, + doc="""The bitstring as a signed exponential-Golomb code. Read and write. + """) + uie = property(Bits._getuie, Bits._setuie, + doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read and write. + """) + sie = property(Bits._getsie, Bits._setsie, + doc="""The bitstring as a signed interleaved exponential-Golomb code. Read and write. + """) + hex = property(Bits._gethex, Bits._sethex, + doc="""The bitstring as a hexadecimal string. Read and write. + """) + bin = property(Bits._getbin, Bits._setbin_safe, + doc="""The bitstring as a binary string. Read and write. + """) + oct = property(Bits._getoct, Bits._setoct, + doc="""The bitstring as an octal string. Read and write. + """) + bool = property(Bits._getbool, Bits._setbool, + doc="""The bitstring as a bool (True or False). Read and write. + """) + bytes = property(Bits._getbytes, Bits._setbytes_safe, + doc="""The bitstring as a ordinary string. Read and write. + """) + + + +class ConstBitStream(Bits): + """A container or stream holding an immutable sequence of bits. + + For a mutable container use the BitStream class instead. + + Methods inherited from Bits: + + all() -- Check if all specified bits are set to 1 or 0. + any() -- Check if any of specified bits are set to 1 or 0. + count() -- Count the number of bits set to 1 or 0. + cut() -- Create generator of constant sized chunks. + endswith() -- Return whether the bitstring ends with a sub-string. + find() -- Find a sub-bitstring in the current bitstring. + findall() -- Find all occurrences of a sub-bitstring in the current bitstring. + join() -- Join bitstrings together using current bitstring. + rfind() -- Seek backwards to find a sub-bitstring. + split() -- Create generator of chunks split by a delimiter. + startswith() -- Return whether the bitstring starts with a sub-bitstring. + tobytes() -- Return bitstring as bytes, padding if needed. + tofile() -- Write bitstring to file, padding if needed. + unpack() -- Interpret bits using format string. + + Other methods: + + bytealign() -- Align to next byte boundary. + peek() -- Peek at and interpret next bits as a single item. + peeklist() -- Peek at and interpret next bits as a list of items. + read() -- Read and interpret next bits as a single item. + readlist() -- Read and interpret next bits as a list of items. + + Special methods: + + Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^. + + Properties: + + bin -- The bitstring as a binary string. + bool -- For single bit bitstrings, interpret as True or False. + bytepos -- The current byte position in the bitstring. + bytes -- The bitstring as a bytes object. + float -- Interpret as a floating point number. + floatbe -- Interpret as a big-endian floating point number. + floatle -- Interpret as a little-endian floating point number. + floatne -- Interpret as a native-endian floating point number. + hex -- The bitstring as a hexadecimal string. + int -- Interpret as a two's complement signed integer. + intbe -- Interpret as a big-endian signed integer. + intle -- Interpret as a little-endian signed integer. + intne -- Interpret as a native-endian signed integer. + len -- Length of the bitstring in bits. + oct -- The bitstring as an octal string. + pos -- The current bit position in the bitstring. + se -- Interpret as a signed exponential-Golomb code. + ue -- Interpret as an unsigned exponential-Golomb code. + sie -- Interpret as a signed interleaved exponential-Golomb code. + uie -- Interpret as an unsigned interleaved exponential-Golomb code. + uint -- Interpret as a two's complement unsigned integer. + uintbe -- Interpret as a big-endian unsigned integer. + uintle -- Interpret as a little-endian unsigned integer. + uintne -- Interpret as a native-endian unsigned integer. + + """ + + __slots__ = ('_pos') + + def __init__(self, auto=None, length=None, offset=None, **kwargs): + """Either specify an 'auto' initialiser: + auto -- a string of comma separated tokens, an integer, a file object, + a bytearray, a boolean iterable or another bitstring. + + Or initialise via **kwargs with one (and only one) of: + bytes -- raw data as a string, for example read from a binary file. + bin -- binary string representation, e.g. '0b001010'. + hex -- hexadecimal string representation, e.g. '0x2ef' + oct -- octal string representation, e.g. '0o777'. + uint -- an unsigned integer. + int -- a signed integer. + float -- a floating point number. + uintbe -- an unsigned big-endian whole byte integer. + intbe -- a signed big-endian whole byte integer. + floatbe - a big-endian floating point number. + uintle -- an unsigned little-endian whole byte integer. + intle -- a signed little-endian whole byte integer. + floatle -- a little-endian floating point number. + uintne -- an unsigned native-endian whole byte integer. + intne -- a signed native-endian whole byte integer. + floatne -- a native-endian floating point number. + se -- a signed exponential-Golomb code. + ue -- an unsigned exponential-Golomb code. + sie -- a signed interleaved exponential-Golomb code. + uie -- an unsigned interleaved exponential-Golomb code. + bool -- a boolean (True or False). + filename -- a file which will be opened in binary read-only mode. + + Other keyword arguments: + length -- length of the bitstring in bits, if needed and appropriate. + It must be supplied for all integer and float initialisers. + offset -- bit offset to the data. These offset bits are + ignored and this is intended for use when + initialising using 'bytes' or 'filename'. + + """ + self._pos = 0 + + def __new__(cls, auto=None, length=None, offset=None, **kwargs): + x = super(ConstBitStream, cls).__new__(cls) + x._initialise(auto, length, offset, **kwargs) + return x + + def _setbytepos(self, bytepos): + """Move to absolute byte-aligned position in stream.""" + self._setbitpos(bytepos * 8) + + def _getbytepos(self): + """Return the current position in the stream in bytes. Must be byte aligned.""" + if self._pos % 8: + raise ByteAlignError("Not byte aligned in _getbytepos().") + return self._pos // 8 + + def _setbitpos(self, pos): + """Move to absolute postion bit in bitstream.""" + if pos < 0: + raise ValueError("Bit position cannot be negative.") + if pos > self.len: + raise ValueError("Cannot seek past the end of the data.") + self._pos = pos + + def _getbitpos(self): + """Return the current position in the stream in bits.""" + return self._pos + + def _clear(self): + Bits._clear(self) + self._pos = 0 + + def __copy__(self): + """Return a new copy of the ConstBitStream for the copy module.""" + # Note that if you want a new copy (different ID), use _copy instead. + # The copy can use the same datastore as it's immutable. + s = ConstBitStream() + s._datastore = self._datastore + # Reset the bit position, don't copy it. + s._pos = 0 + return s + + def __add__(self, bs): + """Concatenate bitstrings and return new bitstring. + + bs -- the bitstring to append. + + """ + s = Bits.__add__(self, bs) + s._pos = 0 + return s + + def read(self, fmt): + """Interpret next bits according to the format string and return result. + + fmt -- Token string describing how to interpret the next bits. + + Token examples: 'int:12' : 12 bits as a signed integer + 'uint:8' : 8 bits as an unsigned integer + 'float:64' : 8 bytes as a big-endian float + 'intbe:16' : 2 bytes as a big-endian signed integer + 'uintbe:16' : 2 bytes as a big-endian unsigned integer + 'intle:32' : 4 bytes as a little-endian signed integer + 'uintle:32' : 4 bytes as a little-endian unsigned integer + 'floatle:64': 8 bytes as a little-endian float + 'intne:24' : 3 bytes as a native-endian signed integer + 'uintne:24' : 3 bytes as a native-endian unsigned integer + 'floatne:32': 4 bytes as a native-endian float + 'hex:80' : 80 bits as a hex string + 'oct:9' : 9 bits as an octal string + 'bin:1' : single bit binary string + 'ue' : next bits as unsigned exp-Golomb code + 'se' : next bits as signed exp-Golomb code + 'uie' : next bits as unsigned interleaved exp-Golomb code + 'sie' : next bits as signed interleaved exp-Golomb code + 'bits:5' : 5 bits as a bitstring + 'bytes:10' : 10 bytes as a bytes object + 'bool' : 1 bit as a bool + 'pad:3' : 3 bits of padding to ignore - returns None + + fmt may also be an integer, which will be treated like the 'bits' token. + + The position in the bitstring is advanced to after the read items. + + Raises ReadError if not enough bits are available. + Raises ValueError if the format is not understood. + + """ + if isinstance(fmt, numbers.Integral): + if fmt < 0: + raise ValueError("Cannot read negative amount.") + if fmt > self.len - self._pos: + raise ReadError("Cannot read {0} bits, only {1} available.", + fmt, self.len - self._pos) + bs = self._slice(self._pos, self._pos + fmt) + self._pos += fmt + return bs + p = self._pos + _, token = tokenparser(fmt) + if len(token) != 1: + self._pos = p + raise ValueError("Format string should be a single token, not {0} " + "tokens - use readlist() instead.".format(len(token))) + name, length, _ = token[0] + if length is None: + length = self.len - self._pos + value, self._pos = self._readtoken(name, self._pos, length) + return value + + def readlist(self, fmt, **kwargs): + """Interpret next bits according to format string(s) and return list. + + fmt -- A single string or list of strings with comma separated tokens + describing how to interpret the next bits in the bitstring. Items + can also be integers, for reading new bitstring of the given length. + kwargs -- A dictionary or keyword-value pairs - the keywords used in the + format string will be replaced with their given value. + + The position in the bitstring is advanced to after the read items. + + Raises ReadError is not enough bits are available. + Raises ValueError if the format is not understood. + + See the docstring for 'read' for token examples. 'pad' tokens are skipped + and not added to the returned list. + + >>> h, b1, b2 = s.readlist('hex:20, bin:5, bin:3') + >>> i, bs1, bs2 = s.readlist(['uint:12', 10, 10]) + + """ + value, self._pos = self._readlist(fmt, self._pos, **kwargs) + return value + + def readto(self, bs, bytealigned=None): + """Read up to and including next occurrence of bs and return result. + + bs -- The bitstring to find. An integer is not permitted. + bytealigned -- If True the bitstring will only be + found on byte boundaries. + + Raises ValueError if bs is empty. + Raises ReadError if bs is not found. + + """ + if isinstance(bs, numbers.Integral): + raise ValueError("Integers cannot be searched for") + bs = Bits(bs) + oldpos = self._pos + p = self.find(bs, self._pos, bytealigned=bytealigned) + if not p: + raise ReadError("Substring not found") + self._pos += bs.len + return self._slice(oldpos, self._pos) + + def peek(self, fmt): + """Interpret next bits according to format string and return result. + + fmt -- Token string describing how to interpret the next bits. + + The position in the bitstring is not changed. If not enough bits are + available then all bits to the end of the bitstring will be used. + + Raises ReadError if not enough bits are available. + Raises ValueError if the format is not understood. + + See the docstring for 'read' for token examples. + + """ + pos_before = self._pos + value = self.read(fmt) + self._pos = pos_before + return value + + def peeklist(self, fmt, **kwargs): + """Interpret next bits according to format string(s) and return list. + + fmt -- One or more strings with comma separated tokens describing + how to interpret the next bits in the bitstring. + kwargs -- A dictionary or keyword-value pairs - the keywords used in the + format string will be replaced with their given value. + + The position in the bitstring is not changed. If not enough bits are + available then all bits to the end of the bitstring will be used. + + Raises ReadError if not enough bits are available. + Raises ValueError if the format is not understood. + + See the docstring for 'read' for token examples. + + """ + pos = self._pos + return_values = self.readlist(fmt, **kwargs) + self._pos = pos + return return_values + + def bytealign(self): + """Align to next byte and return number of skipped bits. + + Raises ValueError if the end of the bitstring is reached before + aligning to the next byte. + + """ + skipped = (8 - (self._pos % 8)) % 8 + self.pos += self._offset + skipped + assert self._assertsanity() + return skipped + + pos = property(_getbitpos, _setbitpos, + doc="""The position in the bitstring in bits. Read and write. + """) + bitpos = property(_getbitpos, _setbitpos, + doc="""The position in the bitstring in bits. Read and write. + """) + bytepos = property(_getbytepos, _setbytepos, + doc="""The position in the bitstring in bytes. Read and write. + """) + + + + + +class BitStream(ConstBitStream, BitArray): + """A container or stream holding a mutable sequence of bits + + Subclass of the ConstBitStream and BitArray classes. Inherits all of + their methods. + + Methods: + + all() -- Check if all specified bits are set to 1 or 0. + any() -- Check if any of specified bits are set to 1 or 0. + append() -- Append a bitstring. + bytealign() -- Align to next byte boundary. + byteswap() -- Change byte endianness in-place. + count() -- Count the number of bits set to 1 or 0. + cut() -- Create generator of constant sized chunks. + endswith() -- Return whether the bitstring ends with a sub-string. + find() -- Find a sub-bitstring in the current bitstring. + findall() -- Find all occurrences of a sub-bitstring in the current bitstring. + insert() -- Insert a bitstring. + invert() -- Flip bit(s) between one and zero. + join() -- Join bitstrings together using current bitstring. + overwrite() -- Overwrite a section with a new bitstring. + peek() -- Peek at and interpret next bits as a single item. + peeklist() -- Peek at and interpret next bits as a list of items. + prepend() -- Prepend a bitstring. + read() -- Read and interpret next bits as a single item. + readlist() -- Read and interpret next bits as a list of items. + replace() -- Replace occurrences of one bitstring with another. + reverse() -- Reverse bits in-place. + rfind() -- Seek backwards to find a sub-bitstring. + rol() -- Rotate bits to the left. + ror() -- Rotate bits to the right. + set() -- Set bit(s) to 1 or 0. + split() -- Create generator of chunks split by a delimiter. + startswith() -- Return whether the bitstring starts with a sub-bitstring. + tobytes() -- Return bitstring as bytes, padding if needed. + tofile() -- Write bitstring to file, padding if needed. + unpack() -- Interpret bits using format string. + + Special methods: + + Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^= + in addition to [], ==, !=, +, *, ~, <<, >>, &, | and ^. + + Properties: + + bin -- The bitstring as a binary string. + bool -- For single bit bitstrings, interpret as True or False. + bytepos -- The current byte position in the bitstring. + bytes -- The bitstring as a bytes object. + float -- Interpret as a floating point number. + floatbe -- Interpret as a big-endian floating point number. + floatle -- Interpret as a little-endian floating point number. + floatne -- Interpret as a native-endian floating point number. + hex -- The bitstring as a hexadecimal string. + int -- Interpret as a two's complement signed integer. + intbe -- Interpret as a big-endian signed integer. + intle -- Interpret as a little-endian signed integer. + intne -- Interpret as a native-endian signed integer. + len -- Length of the bitstring in bits. + oct -- The bitstring as an octal string. + pos -- The current bit position in the bitstring. + se -- Interpret as a signed exponential-Golomb code. + ue -- Interpret as an unsigned exponential-Golomb code. + sie -- Interpret as a signed interleaved exponential-Golomb code. + uie -- Interpret as an unsigned interleaved exponential-Golomb code. + uint -- Interpret as a two's complement unsigned integer. + uintbe -- Interpret as a big-endian unsigned integer. + uintle -- Interpret as a little-endian unsigned integer. + uintne -- Interpret as a native-endian unsigned integer. + + """ + + __slots__ = () + + # As BitStream objects are mutable, we shouldn't allow them to be hashed. + __hash__ = None + + def __init__(self, auto=None, length=None, offset=None, **kwargs): + """Either specify an 'auto' initialiser: + auto -- a string of comma separated tokens, an integer, a file object, + a bytearray, a boolean iterable or another bitstring. + + Or initialise via **kwargs with one (and only one) of: + bytes -- raw data as a string, for example read from a binary file. + bin -- binary string representation, e.g. '0b001010'. + hex -- hexadecimal string representation, e.g. '0x2ef' + oct -- octal string representation, e.g. '0o777'. + uint -- an unsigned integer. + int -- a signed integer. + float -- a floating point number. + uintbe -- an unsigned big-endian whole byte integer. + intbe -- a signed big-endian whole byte integer. + floatbe - a big-endian floating point number. + uintle -- an unsigned little-endian whole byte integer. + intle -- a signed little-endian whole byte integer. + floatle -- a little-endian floating point number. + uintne -- an unsigned native-endian whole byte integer. + intne -- a signed native-endian whole byte integer. + floatne -- a native-endian floating point number. + se -- a signed exponential-Golomb code. + ue -- an unsigned exponential-Golomb code. + sie -- a signed interleaved exponential-Golomb code. + uie -- an unsigned interleaved exponential-Golomb code. + bool -- a boolean (True or False). + filename -- a file which will be opened in binary read-only mode. + + Other keyword arguments: + length -- length of the bitstring in bits, if needed and appropriate. + It must be supplied for all integer and float initialisers. + offset -- bit offset to the data. These offset bits are + ignored and this is intended for use when + initialising using 'bytes' or 'filename'. + + """ + self._pos = 0 + # For mutable BitStreams we always read in files to memory: + if not isinstance(self._datastore, ByteStore): + self._ensureinmemory() + + def __new__(cls, auto=None, length=None, offset=None, **kwargs): + x = super(BitStream, cls).__new__(cls) + x._initialise(auto, length, offset, **kwargs) + return x + + def __copy__(self): + """Return a new copy of the BitStream.""" + s_copy = BitStream() + s_copy._pos = 0 + if not isinstance(self._datastore, ByteStore): + # Let them both point to the same (invariant) array. + # If either gets modified then at that point they'll be read into memory. + s_copy._datastore = self._datastore + else: + s_copy._datastore = ByteStore(self._datastore._rawarray[:], + self._datastore.bitlength, + self._datastore.offset) + return s_copy + + def prepend(self, bs): + """Prepend a bitstring to the current bitstring. + + bs -- The bitstring to prepend. + + """ + bs = self._converttobitstring(bs) + self._prepend(bs) + self._pos += bs.len + + +def pack(fmt, *values, **kwargs): + """Pack the values according to the format string and return a new BitStream. + + fmt -- A single string or a list of strings with comma separated tokens + describing how to create the BitStream. + values -- Zero or more values to pack according to the format. + kwargs -- A dictionary or keyword-value pairs - the keywords used in the + format string will be replaced with their given value. + + Token examples: 'int:12' : 12 bits as a signed integer + 'uint:8' : 8 bits as an unsigned integer + 'float:64' : 8 bytes as a big-endian float + 'intbe:16' : 2 bytes as a big-endian signed integer + 'uintbe:16' : 2 bytes as a big-endian unsigned integer + 'intle:32' : 4 bytes as a little-endian signed integer + 'uintle:32' : 4 bytes as a little-endian unsigned integer + 'floatle:64': 8 bytes as a little-endian float + 'intne:24' : 3 bytes as a native-endian signed integer + 'uintne:24' : 3 bytes as a native-endian unsigned integer + 'floatne:32': 4 bytes as a native-endian float + 'hex:80' : 80 bits as a hex string + 'oct:9' : 9 bits as an octal string + 'bin:1' : single bit binary string + 'ue' / 'uie': next bits as unsigned exp-Golomb code + 'se' / 'sie': next bits as signed exp-Golomb code + 'bits:5' : 5 bits as a bitstring object + 'bytes:10' : 10 bytes as a bytes object + 'bool' : 1 bit as a bool + 'pad:3' : 3 zero bits as padding + + >>> s = pack('uint:12, bits', 100, '0xffe') + >>> t = pack(['bits', 'bin:3'], s, '111') + >>> u = pack('uint:8=a, uint:8=b, uint:55=a', a=6, b=44) + + """ + tokens = [] + if isinstance(fmt, basestring): + fmt = [fmt] + try: + for f_item in fmt: + _, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys()))) + tokens.extend(tkns) + except ValueError as e: + raise CreationError(*e.args) + value_iter = iter(values) + s = BitStream() + try: + for name, length, value in tokens: + # If the value is in the kwd dictionary then it takes precedence. + if value in kwargs: + value = kwargs[value] + # If the length is in the kwd dictionary then use that too. + if length in kwargs: + length = kwargs[length] + # Also if we just have a dictionary name then we want to use it + if name in kwargs and length is None and value is None: + s.append(kwargs[name]) + continue + if length is not None: + length = int(length) + if value is None and name != 'pad': + # Take the next value from the ones provided + value = next(value_iter) + s._append(BitStream._init_with_token(name, length, value)) + except StopIteration: + raise CreationError("Not enough parameters present to pack according to the " + "format. {0} values are needed.", len(tokens)) + try: + next(value_iter) + except StopIteration: + # Good, we've used up all the *values. + return s + raise CreationError("Too many parameters present to pack according to the format.") + + +# Aliases for backward compatibility +ConstBitArray = Bits +BitString = BitStream + +__all__ = ['ConstBitArray', 'ConstBitStream', 'BitStream', 'BitArray', + 'Bits', 'BitString', 'pack', 'Error', 'ReadError', + 'InterpretError', 'ByteAlignError', 'CreationError', 'bytealigned'] diff --git a/python/bitstring/doc/bitstring_manual.pdf b/python/bitstring/doc/bitstring_manual.pdf new file mode 100644 index 000000000..dc17385b7 Binary files /dev/null and b/python/bitstring/doc/bitstring_manual.pdf differ diff --git a/python/bitstring/release_notes.txt b/python/bitstring/release_notes.txt new file mode 100644 index 000000000..8cad4ca9d --- /dev/null +++ b/python/bitstring/release_notes.txt @@ -0,0 +1,1523 @@ +-------------------------------- +bitstring module version history +-------------------------------- + +--------------------------------------- +March 4th 2014: version 3.1.3 released +--------------------------------------- +This is another bug fix release. + +* Fix for problem with prepend for bitstrings with byte offsets in their data store. + +--------------------------------------- +April 18th 2013: version 3.1.2 released +--------------------------------------- +This is another bug fix release. + +* Fix for problem where unpacking bytes would by eight times too long + +--------------------------------------- +March 21st 2013: version 3.1.1 released +--------------------------------------- +This is a bug fix release. + +* Fix for problem where concatenating bitstrings sometimes modified method's arguments + +------------------------------------------ +February 26th 2013: version 3.1.0 released +------------------------------------------ +This is a minor release with a couple of new features and some bug fixes. + +New 'pad' token +--------------- + +This token can be used in reads and when packing/unpacking to indicate that +you don't care about the contents of these bits. Any padding bits will just +be skipped over when reading/unpacking or zero-filled when packing. + + >>> a, b = s.readlist('pad:5, uint:3, pad:1, uint:3') + +Here only two items are returned in the list - the padding bits are ignored. + +New clear and copy convenience methods +-------------------------------------- + +These methods have been introduced in Python 3.3 for lists and bytearrays, +as more obvious ways of clearing and copying, and we mirror that change here. + +t = s.copy() is equivalent to t = s[:], and s.clear() is equivalent to del s[:]. + +Other changes +------------- + +* Some bug fixes. + +----------------------------------------- +February 7th 2012: version 3.0.2 released +----------------------------------------- +This is a minor update that fixes a few bugs. + +* Fix for subclasses of bitstring classes behaving strangely (Issue 121). +* Fix for excessive memory usage in rare cases (Issue 120). +* Fixes for slicing edge cases. + +There has also been a reorganisation of the code to return it to a single +'bitstring.py' file rather than the package that has been used for the past +several releases. This change shouldn't affect users directly. + +------------------------------------------ +November 21st 2011: version 3.0.1 released +------------------------------------------ +This release fixed a small but very visible bug in bitstring printing. + +------------------------------------------ +November 21st 2011: version 3.0.0 released +------------------------------------------ +This is a major release which breaks backward compatibility in a few places. + +Backwardly incompatible changes +=============================== + +Hex, oct and bin properties don't have leading 0x, 0o and 0b +------------------------------------------------------------ + +If you ask for the hex, octal or binary representations of a bitstring then +they will no longer be prefixed with '0x', 0o' or '0b'. This was done as it +was noticed that the first thing a lot of user code does after getting these +representations was to cut off the first two characters before further +processing. + + >>> a = BitArray('0x123') + >>> a.hex, a.oct, a.bin + ('123', '0443', '000100100011') + +Previously this would have returned ('0x123', '0o0443', '0b000100100011') + +This change might require some recoding, but it should all be simplifications. + +ConstBitArray renamed to Bits +----------------------------- + +Previously Bits was an alias for ConstBitStream (for backward compatibility). +This has now changed so that Bits and BitArray loosely correspond to the +built-in types bytes and bytearray. + +If you were using streaming/reading methods on a Bits object then you will +have to change it to a ConstBitStream. + +The ConstBitArray name is kept as an alias for Bits. + +Stepping in slices has conventional meaning +------------------------------------------- + +The step parameter in __getitem__, __setitem__ and __delitem__ used to act +as a multiplier for the start and stop parameters. No one seemed to use it +though and so it has now reverted to the convential meaning for containers. + +If you are using step then recoding is simple: s[a:b:c] becomes s[a*c:b*c]. + +Some examples of the new usage: + + >>> s = BitArray('0x0000') + s[::4] = [1, 1, 1, 1] + >>> s.hex + '8888' + >>> del s[8::2] + >>> s.hex + '880' + + +New features +============ + +New readto method +----------------- + +This method is a mix between a find and a read - it searches for a bitstring +and then reads up to and including it. For example: + + >>> s = ConstBitStream('0x47000102034704050647') + >>> s.readto('0x47', bytealigned=True) + BitStream('0x47') + >>> s.readto('0x47', bytealigned=True) + BitStream('0x0001020347') + >>> s.readto('0x47', bytealigned=True) + BitStream('0x04050647') + +pack function accepts an iterable as its format +----------------------------------------------- + +Previously only a string was accepted as the format in the pack function. +This was an oversight as it broke the symmetry between pack and unpack. +Now you can use formats like this: + + fmt = ['hex:8', 'bin:3'] + a = pack(fmt, '47', '001') + a.unpack(fmt) + + +-------------------------------------- +June 18th 2011: version 2.2.0 released +-------------------------------------- +This is a minor upgrade with a couple of new features. + +New interleaved exponential-Golomb interpretations +-------------------------------------------------- + +New bit interpretations for interleaved exponential-Golomb (as used in the +Dirac video codec) are supplied via 'uie' and 'sie': + + >>> s = BitArray(uie=41) + >>> s.uie + 41 + >>> s.bin + '0b00010001001' + +These are pretty similar to the non-interleaved versions - see the manual +for more details. Credit goes to Paul Sargent for the patch. + +New package-level bytealigned variable +-------------------------------------- + +A number of methods take a 'bytealigned' parameter to indicate that they +should only work on byte boundaries (e.g. find, replace, split). Previously +this parameter defaulted to 'False'. Instead it now defaults to +'bitstring.bytealigned', which itself defaults to 'False', but can be changed +to modify the default behaviour of the methods. For example: + + >>> a = BitArray('0x00 ff 0f ff') + >>> a.find('0x0f') + (4,) # found first not on a byte boundary + >>> a.find('0x0f', bytealigned=True) + (16,) # forced looking only on byte boundaries + >>> bitstring.bytealigned = True # Change default behaviour + >>> a.find('0x0f') + (16,) + >>> a.find('0x0f', bytealigned=False) + (4,) + +If you're only working with bytes then this can help avoid some errors and +save some typing! + +Other changes +------------- + +* Fix for Python 3.2, correcting for a change to the binascii module. +* Fix for bool initialisation from 0 or 1. +* Efficiency improvements, including interning strategy. + +------------------------------------------ +February 23rd 2011: version 2.1.1 released +------------------------------------------ +This is a release to fix a couple of bugs that were introduced in 2.1.0. + +* Bug fix: Reading using the 'bytes' token had been broken (Issue 102). +* Fixed problem using some methods on ConstBitArrays. +* Better exception handling for tokens missing values. +* Some performance improvements. + +----------------------------------------- +January 23rd 2011: version 2.1.0 released +----------------------------------------- + +New class hierarchy introduced with simpler classes +--------------------------------------------------- +Previously there were just two classes, the immutable Bits which was the base +class for the mutable BitString class. Both of these classes have the concept +of a bit position, from which reads etc. take place so that the bitstring could +be treated as if it were a file or stream. + +Two simpler classes have now been added which are purely bit containers and +don't have a bit position. These are called ConstBitArray and BitArray. As you +can guess the former is an immutable version of the latter. + +The other classes have also been renamed to better reflect their capabilities. +Instead of BitString you can use BitStream, and instead of Bits you can use +ConstBitStream. The old names are kept as aliases for backward compatibility. + +The classes hierarchy is: + + ConstBitArray + / \ + / \ + BitArray ConstBitStream (formerly Bits) + \ / + \ / + BitStream (formerly BitString) + + +Other changes +------------- +A lot of internal reorganisation has taken place since the previous version, +most of which won't be noticed by the end user. Some things you might see are: + +* New package structure. Previous versions have been a single file for the + module and another for the unit tests. The module is now split into many + more files so it can't be used just by copying bitstring.py any more. +* To run the unit tests there is now a script called runtests.py in the test + directory. +* File based bitstring are now implemented in terms of an mmap. This should + be just an implementation detail, but unfortunately for 32-bit versions of + Python this creates a limit of 4GB on the files that can be used. The work + around is either to get a 64-bit Python, or just stick with version 2.0. +* The ConstBitArray and ConstBitStream classes no longer copy byte data when + a slice or a read takes place, they just take a reference. This is mostly + a very nice optimisation, but there are occassions where it could have an + adverse effect. For example if a very large bitstring is created, a small + slice taken and the original deleted. The byte data from the large + bitstring would still be retained in memory. +* Optimisations. Once again this version should be faster than the last. + The module is still pure Python but some of the reorganisation was to make + it more feasible to put some of the code into Cython or similar, so + hopefully more speed will be on the way. + +-------------------------------------- +July 26th 2010: version 2.0.3 released +-------------------------------------- +* Bug fix: Using peek and read for a single bit now returns a new bitstring + as was intended, rather than the old behaviour of returning a bool. +* Removed HTML docs from source archive - better to use the online version. + +-------------------------------------- +July 25th 2010: version 2.0.2 released +-------------------------------------- +This is a major release, with a number of backwardly incompatible changes. +The main change is the removal of many methods, all of which have simple +alternatives. Other changes are quite minor but may need some recoding. + +There are a few new features, most of which have been made to help the +stream-lining of the API. As always there are performance improvements and +some API changes were made purely with future performance in mind. + +The backwardly incompatible changes are: +----------------------------------------- +* Methods removed. + +About half of the class methods have been removed from the API. They all have +simple alternatives, so what remains is more powerful and easier to remember. +The removed methods are listed here on the left, with their equivalent +replacements on the right: + +s.advancebit() -> s.pos += 1 +s.advancebits(bits) -> s.pos += bits +s.advancebyte() -> s.pos += 8 +s.advancebytes(bytes) -> s.pos += 8*bytes +s.allunset([a, b]) -> s.all(False, [a, b]) +s.anyunset([a, b]) -> s.any(False, [a, b]) +s.delete(bits, pos) -> del s[pos:pos+bits] +s.peekbit() -> s.peek(1) +s.peekbitlist(a, b) -> s.peeklist([a, b]) +s.peekbits(bits) -> s.peek(bits) +s.peekbyte() -> s.peek(8) +s.peekbytelist(a, b) -> s.peeklist([8*a, 8*b]) +s.peekbytes(bytes) -> s.peek(8*bytes) +s.readbit() -> s.read(1) +s.readbitlist(a, b) -> s.readlist([a, b]) +s.readbits(bits) -> s.read(bits) +s.readbyte() -> s.read(8) +s.readbytelist(a, b) -> s.readlist([8*a, 8*b]) +s.readbytes(bytes) -> s.read(8*bytes) +s.retreatbit() -> s.pos -= 1 +s.retreatbits(bits) -> s.pos -= bits +s.retreatbyte() -> s.pos -= 8 +s.retreatbytes(bytes) -> s.pos -= 8*bytes +s.reversebytes(start, end) -> s.byteswap(0, start, end) +s.seek(pos) -> s.pos = pos +s.seekbyte(bytepos) -> s.bytepos = bytepos +s.slice(start, end, step) -> s[start:end:step] +s.tell() -> s.pos +s.tellbyte() -> s.bytepos +s.truncateend(bits) -> del s[-bits:] +s.truncatestart(bits) -> del s[:bits] +s.unset([a, b]) -> s.set(False, [a, b]) + +Many of these methods have been deprecated for the last few releases, but +there are some new removals too. Any recoding needed should be quite +straightforward, so while I apologise for the hassle, I had to take the +opportunity to streamline and rationalise what was becoming a bit of an +overblown API. + +* set / unset methods combined. + +The set/unset methods have been combined in a single method, which now +takes a boolean as its first argument: + +s.set([a, b]) -> s.set(1, [a, b]) +s.unset([a, b]) -> s.set(0, [a, b]) +s.allset([a, b]) -> s.all(1, [a, b]) +s.allunset([a, b]) -> s.all(0, [a, b]) +s.anyset([a, b]) -> s.any(1, [a, b]) +s.anyunset([a, b]) -> s.any(0, [a, b]) + +* all / any only accept iterables. + +The all and any methods (previously called allset, allunset, anyset and +anyunset) no longer accept a single bit position. The recommended way of +testing a single bit is just to index it, for example instead of: + +>>> if s.all(True, i): + +just use + +>>> if s[i]: + +If you really want to you can of course use an iterable with a single +element, such as 's.any(False, [i])', but it's clearer just to write +'not s[i]'. + +* Exception raised on reading off end of bitstring. + +If a read or peek goes beyond the end of the bitstring then a ReadError +will be raised. The previous behaviour was that the rest of the bitstring +would be returned and no exception raised. + +* BitStringError renamed to Error. + +The base class for errors in the bitstring module is now just Error, so +it will likely appears in your code as bitstring.Error instead of +the rather repetitive bitstring.BitStringError. + +* Single bit slices and reads return a bool. + +A single index slice (such as s[5]) will now return a bool (i.e. True or +False) rather than a single bit bitstring. This is partly to reflect the +style of the bytearray type, which returns an integer for single items, but +mostly to avoid common errors like: + +>>> if s[0]: +... do_something() + +While the intent of this code snippet is quite clear (i.e. do_something if +the first bit of s is set) under the old rules s[0] would be true as long +as s wasn't empty. That's because any one-bit bitstring was true as it was a +non-empty container. Under the new rule s[0] is True if s starts with a '1' +bit and False if s starts with a '0' bit. + +The change does not affect reads and peeks, so s.peek(1) will still return +a single bit bitstring, which leads on to the next item... + +* Empty bitstrings or bitstrings with only zero bits are considered False. + +Previously a bitstring was False if it had no elements, otherwise it was True. +This is standard behaviour for containers, but wasn't very useful for a container +of just 0s and 1s. The new behaviour means that the bitstring is False if it +has no 1 bits. This means that code like this: + +>>> if s.peek(1): +... do_something() + +should work as you'd expect. It also means that Bits(1000), Bits(0x00) and +Bits('uint:12=0') are all also False. If you need to check for the emptiness of +a bitstring then instead check the len property: + +if s -> if s.len +if not s -> if not s.len + +* Length and offset disallowed for some initialisers. + +Previously you could create bitstring using expressions like: + +>>> s = Bits(hex='0xabcde', offset=4, length=13) + +This has now been disallowed, and the offset and length parameters may only +be used when initialising with bytes or a file. To replace the old behaviour +you could instead use + +>>> s = Bits(hex='0xabcde')[4:17] + +* Renamed 'format' parameter 'fmt'. + +Methods with a 'format' parameter have had it renamed to 'fmt', to prevent +hiding the built-in 'format'. Affects methods unpack, read, peek, readlist, +peeklist and byteswap and the pack function. + +* Iterables instead of *format accepted for some methods. + +This means that for the affected methods (unpack, readlist and peeklist) you +will need to use an iterable to specify multiple items. This is easier to +show than to describe, so instead of + +>>> a, b, c, d = s.readlist('uint:12', 'hex:4', 'bin:7') + +you would instead write + +>>> a, b, c, d = s.readlist(['uint:12', 'hex:4', 'bin:7']) + +Note that you could still use the single string 'uint:12, hex:4, bin:7' if +you preferred. + +* Bool auto-initialisation removed. + +You can no longer use True and False to initialise single bit bitstrings. +The reasoning behind this is that as bool is a subclass of int, it really is +bad practice to have Bits(False) be different to Bits(0) and to have Bits(True) +different to Bits(1). + +If you have used bool auto-initialisation then you will have to be careful to +replace it as the bools will now be interpreted as ints, so Bits(False) will +be empty (a bitstring of length 0), and Bits(True) will be a single zero bit +(a bitstring of length 1). Sorry for the confusion, but I think this will +prevent bigger problems in the future. + +There are a few alternatives for creating a single bit bitstring. My favourite +it to use a list with a single item: + +Bits(False) -> Bits([0]) +Bits(True) -> Bits([1]) + +* New creation from file strategy + +Previously if you created a bitstring from a file, either by auto-initialising +with a file object or using the filename parameter, the file would not be read +into memory unless you tried to modify it, at which point the whole file would +be read. + +The new behaviour depends on whether you create a Bits or a BitString from the +file. If you create a Bits (which is immutable) then the file will never be +read into memory. This allows very large files to be opened for examination +even if they could never fit in memory. + +If however you create a BitString, the whole of the referenced file will be read +to store in memory. If the file is very big this could take a long time, or fail, +but the idea is that in saying you want the mutable BitString you are implicitly +saying that you want to make changes and so (for now) we need to load it into +memory. + +The new strategy is a bit more predictable in terms of performance than the old. +The main point to remember is that if you want to open a file and don't plan to +alter the bitstring then use the Bits class rather than BitString. + +Just to be clear, in neither case will the contents of the file ever be changed - +if you want to output the modified BitString then use the tofile method, for +example. + +* find and rfind return a tuple instead of a bool. + +If a find is unsuccessful then an empty tuple is returned (which is False in a +boolean sense) otherwise a single item tuple with the bit position is returned +(which is True in a boolean sense). You shouldn't need to recode unless you +explicitly compared the result of a find to True or False, for example this +snippet doesn't need to be altered: + +>>> if s.find('0x23'): +... print(s.bitpos) + +but you could now instead use + +>>> found = s.find('0x23') +>>> if found: +... print(found[0]) + +The reason for returning the bit position in a tuple is so that finding at +position zero can still be True - it's the tuple (0,) - whereas not found can +be False - the empty tuple (). + +The new features in this release are: +------------------------------------- +* New count method. + +This method just counts the number of 1 or 0 bits in the bitstring. + +>>> s = Bits('0x31fff4') +>>> s.count(1) +16 + +* read and peek methods accept integers. + +The read, readlist, peek and peeklist methods now accept integers as parameters +to mean "read this many bits and return a bitstring". This has allowed a number +of methods to be removed from this release, so for example instead of: + +>>> a, b, c = s.readbits(5, 6, 7) +>>> if s.peekbit(): +... do_something() + +you should write: + +>>> a, b, c = s.readlist([5, 6, 7]) +>>> if s.peek(1): +... do_something() + +* byteswap used to reverse all bytes. + +The byteswap method now allows a format specifier of 0 (the default) to signify +that all of the whole bytes should be reversed. This means that calling just +byteswap() is almost equivalent to the now removed bytereverse() method (a small +difference is that byteswap won't raise an exception if the bitstring isn't a +whole number of bytes long). + +* Auto initialise with bytearray or (for Python 3 only) bytes. + +So rather than writing: + +>>> a = Bits(bytes=some_bytearray) + +you can just write + +>>> a = Bits(some_bytearray) + +This also works for the bytes type, but only if you're using Python 3. +For Python 2 it's not possible to distinguish between a bytes object and a +str. For this reason this method should be used with some caution as it will +make you code behave differently with the different major Python versions. + +>>> b = Bits(b'abcd\x23\x00') # Only Python 3! + +* set, invert, all and any default to whole bitstring. + +This means that you can for example write: + +>>> a = BitString(100) # 100 zero bits +>>> a.set(1) # set all bits to 1 +>>> a.all(1) # are all bits set to 1? +True +>>> a.any(0) # are any set to 0? +False +>>> a.invert() # invert every bit + +* New exception types. + +As well as renaming BitStringError to just Error +there are also new exceptions which use Error as a base class. + +These can be caught in preference to Error if you need finer control. +The new exceptions sometimes also derive from built-in exceptions: + +ByteAlignError(Error) - whole byte position or length needed. + +ReadError(Error, IndexError) - reading or peeking off the end of +the bitstring. + +CreationError(Error, ValueError) - inappropriate argument during +bitstring creation. + +InterpretError(Error, ValueError) - inappropriate interpretation of +binary data. + + +-------------------------------------------------------------- +March 18th 2010: version 1.3.0 for Python 2.6 and 3.x released +-------------------------------------------------------------- +New features: + +* byteswap method for changing endianness. + +Changes the endianness in-place according to a format string or +integer(s) giving the byte pattern. See the manual for details. + +>>> s = BitString('0x00112233445566') +>>> s.byteswap(2) +3 +>>> s +BitString('0x11003322554466') +>>> s.byteswap('h') +3 +>>> s +BitString('0x00112233445566') +>>> s.byteswap([2, 5]) +1 +>>> s +BitString('0x11006655443322') + +* Multiplicative factors in bitstring creation and reading. + +For example: + +>>> s = Bits('100*0x123') + +* Token grouping using parenthesis. + +For example: + +>>> s = Bits('3*(uint:6=3, 0b1)') + +* Negative slice indices allowed. + +The start and end parameters of many methods may now be negative, with the +same meaning as for negative slice indices. Affects all methods with these +parameters. + +* Sequence ABCs used. + +The Bits class now derives from collections.Sequence, while the BitString +class derives from collections.MutableSequence. + +* Keywords allowed in readlist, peeklist and unpack. + +Keywords for token lengths are now permitted when reading. So for example, +you can write + +>>> s = bitstring.pack('4*(uint:n)', 2, 3, 4, 5, n=7) +>>> s.unpack('4*(uint:n)', n=7) +[2, 3, 4, 5] + +* start and end parameters added to rol and ror. + +* join function accepts other iterables. + +Also its parameter has changed from 'bitstringlist' to 'sequence'. This is +technically a backward incompatibility in the unlikely event that you are +referring to the parameter by name. + +* __init__ method accepts keywords. + +Rather than a long list of initialisers the __init__ methods now use a +**kwargs dictionary for all initialisers except 'auto'. This should have no +effect, except that this is a small backward incompatibility if you use +positional arguments when initialising with anything other than auto +(which would be rather unusual). + +* More optimisations. + +* Bug fixed in replace method (it could fail if start != 0). + +---------------------------------------------------------------- +January 19th 2010: version 1.2.0 for Python 2.6 and 3.x released +---------------------------------------------------------------- + +* New 'Bits' class. + +Introducing a brand new class, Bits, representing an immutable sequence of +bits. + +The Bits class is the base class for the mutable BitString. The differences +between Bits and BitStrings are: + +1) Bits are immutable, so once they have been created their value cannot change. +This of course means that mutating methods (append, replace, del etc.) are not +available for Bits. + +2) Bits are hashable, so they can be used in sets and as keys in dictionaries. + +3) Bits are potentially more efficient than BitStrings, both in terms of +computation and memory. The current implementation is only marginally +more efficient though - this should improve in future versions. + +You can switch from Bits to a BitString or vice versa by constructing a new +object from the old. + +>>> s = Bits('0xabcd') +>>> t = BitString(s) +>>> t.append('0xe') +>>> u = Bits(t) + +The relationship between Bits and BitString is supposed to loosely mirror that +between bytes and bytearray in Python 3. + +* Deprecation messages turned on. + +A number of methods have been flagged for removal in version 2. Deprecation +warnings will now be given, which include an alternative way to do the same +thing. All of the deprecated methods have simpler equivalent alternatives. + +>>> t = s.slice(0, 2) +__main__:1: DeprecationWarning: Call to deprecated function slice. +Instead of 's.slice(a, b, c)' use 's[a:b:c]'. + +The deprecated methods are: advancebit, advancebits, advancebyte, advancebytes, +retreatbit, retreatbits, retreatbyte, retreatbytes, tell, seek, slice, delete, +tellbyte, seekbyte, truncatestart and truncateend. + +* Initialise from bool. + +Booleans have been added to the list of types that can 'auto' +initialise a bitstring. + +>>> zerobit = BitString(False) +>>> onebit = BitString(True) + +* Improved efficiency. + +More methods have been speeded up, in particular some deletions and insertions. + +* Bug fixes. + +A rare problem with truncating the start of bitstrings was fixed. + +A possible problem outputting the final byte in tofile() was fixed. + +----------------------------------------------------------------- +December 22nd 2009: version 1.1.3 for Python 2.6 and 3.x released +----------------------------------------------------------------- + +This version hopefully fixes an installation problem for platforms with +case-sensitive file systems. There are no new features or other bug fixes. + +----------------------------------------------------------------- +December 18th 2009: version 1.1.2 for Python 2.6 and 3.x released +----------------------------------------------------------------- + +This is a minor update with (almost) no new features. + +* Improved efficiency. + +The speed of many typical operations has been increased, some substantially. + +* Initialise from integer. + +A BitString of '0' bits can be created using just an integer to give the length +in bits. So instead of + +>>> s = BitString(length=100) + +you can write just + +>>> s = BitString(100) + +This matches the behaviour of bytearrays and (in Python 3) bytes. + +* A defect related to using the set / unset functions on BitStrings initialised +from a file has been fixed. + +----------------------------------------------------------------- +November 24th 2009: version 1.1.0 for Python 2.6 and 3.x released +----------------------------------------------------------------- +Note that this version will not work for Python 2.4 or 2.5. There may be an +update for these Python versions some time next year, but it's not a priorty +quite yet. Also note that only one version is now provided, which works for +Python 2.6 and 3.x (done with the minimum of hackery!) + +* Improved efficiency. + +A fair number of functions have improved efficiency, some quite dramatically. + +* New bit setting and checking functions. + +Although these functions don't do anything that couldn't be done before, they +do make some common use cases much more efficient. If you need to set or check +single bits then these are the functions you need. + +set / unset : Set bit(s) to 1 or 0 respectively. +allset / allunset : Check if all bits are 1 or all 0. +anyset / anyunset : Check if any bits are 1 or any 0. + +>>> s = BitString(length=1000) +>>> s.set((10, 100, 44, 12, 1)) +>>> s.allunset((2, 22, 222)) +True +>>> s.anyset(range(7, 77)) +True + +* New rotate functions. + +ror / rol : Rotate bits to the right or left respectively. + +>>> s = BitString('0b100000000') +>>> s.ror(2) +>>> s.bin +'0b001000000' +>>> s.rol(5) +>>> s.bin +'0b000000100' + +* Floating point interpretations. + +New float initialisations and interpretations are available. These only work +for BitStrings of length 32 or 64 bits. + +>>> s = BitString(float=0.2, length=64) +>>> s.float +0.200000000000000001 +>>> t = bitstring.pack('<3f', -0.4, 1e34, 17.0) +>>> t.hex +'0xcdccccbedf84f67700008841' + +* 'bytes' token reintroduced. + +This token returns a bytes object (equivalent to a str in Python 2.6). + +>>> s = BitString('0x010203') +>>> s.unpack('bytes:2, bytes:1') +['\x01\x02', '\x03'] + +* 'uint' is now the default token type. + +So for example these are equivalent: + +a, b = s.readlist('uint:12, uint:12') +a, b = s.readlist('12, 12') + +-------------------------------------------------------- +October 10th 2009: version 1.0.1 for Python 3.x released +-------------------------------------------------------- +This is a straight port of version 1.0.0 to Python 3. + +For changes since the last Python 3 release read all the way down in this +document to version 0.4.3. + +This version will also work for Python 2.6, but there's no advantage to using +it over the 1.0.0 release. It won't work for anything before 2.6. + +------------------------------------------------------- +October 9th 2009: version 1.0.0 for Python 2.x released +------------------------------------------------------- +Version 1 is here! + +This is the first release not to carry the 'beta' tag. It contains a couple of +minor new features but is principally a release to fix the API. If you've been +using an older version then you almost certainly will have to recode a bit. If +you're not ready to do that then you may wish to delay updating. + +So the bad news is that there are lots of small changes to the API. The good +news is that all the changes are pretty trivial, the new API is cleaner and +more 'Pythonic', and that by making it version 1.0 I'm promising not to +tweak it again for some time. + +** API Changes ** + +* New read / peek functions for returning multiple items. + +The functions read, readbits, readbytes, peek, peekbits and peekbytes now only +ever return a single item, never a list. + +The new functions readlist, readbitlist, readbytelist, peeklist, peekbitlist +and peekbytelist can be used to read multiple items and will always return a +list. + +So a line like: + +>>> a, b = s.read('uint:12, hex:32') + +becomes + +>>> a, b = s.readlist('uint:12, hex:32') + +* Renaming / removing functions. + +Functions have been renamed as follows: + +seekbit -> seek +tellbit -> tell +reversebits -> reverse +deletebits -> delete +tostring -> tobytes + +and a couple have been removed altogether: + +deletebytes - use delete instead. +empty - use 'not s' rather than 's.empty()'. + +* Renaming parameters. + +The parameters 'startbit' and 'endbit' have been renamed 'start' and 'end'. +This affects the functions slice, find, findall, rfind, reverse, cut and split. + +The parameter 'bitpos' has been renamed to 'pos'. The affects the functions +seek, tell, insert, overwrite and delete. + +* Mutating methods return None rather than self. + +This means that you can't chain functions together so + +>>> s.append('0x00').prepend('0xff') +>>> t = s.reverse() + +Needs to be rewritten + +>>> s.append('0x00') +>>> s.prepend('0xff) +>>> s.reverse() +>>> t = s + +Affects truncatestart, truncateend, insert, overwrite, delete, append, +prepend, reverse and reversebytes. + +* Properties renamed. + +The 'data' property has been renamed to 'bytes'. Also if the BitString is not a +whole number of bytes then a ValueError exception will be raised when using +'bytes' as a 'getter'. + +Properties 'len' and 'pos' have been added to replace 'length' and 'bitpos', +although the longer names have not been removed so you can continue to use them +if you prefer. + +* Other changes. + +The unpack function now always returns a list, never a single item. + +BitStrings are now 'unhashable', so calling hash on one or making a set will +fail. + +The colon separating the token name from its length is now mandatory. So for +example BitString('uint12=100') becomes BitString('uint:12=100'). + +Removed support for the 'bytes' token in format strings. Instead of +s.read('bytes:4') use s.read('bits:32'). + +** New features ** + +* Added endswith and startswith functions. + +These do much as you'd expect; they return True or False depending on whether +the BitString starts or ends with the parameter. + +>>> BitString('0xef342').startswith('0b11101') +True + +---------------------------------------------------------- +September 11th 2009: version 0.5.2 for Python 2.x released +---------------------------------------------------------- +Finally some tools for dealing with endianness! + +* New interpretations are now available for whole-byte BitStrings that treat +them as big, little, or native-endian. + +>>> big = BitString(intbe=1, length=16) # or BitString('intbe:16=1') if you prefer. +>>> little = BitString(intle=1, length=16) +>>> print big.hex, little.hex +0x0001 0x0100 +>>> print big.intbe, little.intle +1 1 + +* 'Struct'-like compact format codes + +To save some typing when using pack, unpack, read and peek, compact format +codes based on those used in the struct and array modules have been added. +These must start with a character indicating the endianness (>, < or @ for +big, little and native-endian), followed by characters giving the format: + +b 1-byte signed int +B 1-byte unsigned int +h 2-byte signed int +H 2-byte unsigned int +l 4-byte signed int +L 4-byte unsigned int +q 8-byte signed int +Q 8-byte unsigned int + +For example: + +>>> s = bitstring.pack('<4h', 0, 1, 2, 3) + +creates a BitString with four little-endian 2-byte integers. While + +>>> x, y, z = s.read('>hhl') + +reads them back as two big-endian two-byte integers and one four-byte big +endian integer. + +Of course you can combine this new format with the old ones however you like: + +>>> s.unpack('>> from bitstring import BitString, pack +>>> a = pack('0b11, 0xff, 0o77, int:5=-1, se=33') + +You can also leave placeholders in the format, which will be filled in by +the values provided. + +>>> b = pack('uint:10, hex:4', 33, 'f') + +Finally you can use a dictionary or keywords. + +>>> c = pack('bin=a, hex=b, bin=a', a='010', b='ef') + +The unpack function is similar to the read function except that it always +unpacks from the start of the BitString. + +>>> x, y = b.unpack('uint:10, hex') + +If a token is given without a length (as above) then it will expand to fill the +remaining bits in the BitString. This also now works with read() and peek(). + +* New tostring() and tofile() functions. + +The tostring() function just returns the data as a string, with up to seven +zero bits appended to byte align. The tofile() function does the same except +writes to a file object. + +>>> f = open('myfile', 'wb') +>>> BitString('0x1234ff').tofile(f) + +* Other changes. + +The use of '=' is now mandatory in 'auto' initialisers. Tokens like 'uint12 100' will +no longer work. Also the use of a ':' before the length is encouraged, but not yet +mandated. So the previous example should be written as 'uint:12=100'. + +The 'auto' initialiser will now take a file object. + +>>> f = open('myfile', 'rb') +>>> s = BitString(f) + +----------------------------------------------------- +July 19th 2009: version 0.5.0 for Python 2.x released +----------------------------------------------------- + +This update breaks backward compatibility in a couple of areas. The only one +you probably need to be concerned about is the change to the default for +bytealigned in find, replace, split, etc. + +See the user manual for more details on each of these items. + +* Expanded abilities of 'auto' initialiser. + +More types can be initialised through the 'auto' initialiser. For example +instead of + +>>> a = BitString(uint=44, length=16) + +you can write + +>>> a = BitString('uint16=44') + +Also, different comma-separated tokens will be joined together, e.g. + +>>> b = BitString('0xff') + 'int8=-5' + +can be written + +>>> b = BitString('0xff, int8=-5') + +* New formatted read() and peek() functions. + +These takes a format string similar to that used in the auto initialiser. +If only one token is provided then a single value is returned, otherwise a +list of values is returned. + +>>> start_code, width, height = s.read('hex32, uint12, uint12') + +is equivalent to + +>>> start_code = s.readbits(32).hex +>>> width = s.readbits(12).uint +>>> height = s.readbits(12).uint + +The tokens are: + + int n : n bits as an unsigned integer. + uint n : n bits as a signed integer. + hex n : n bits as a hexadecimal string. + oct n : n bits as an octal string. + bin n : n bits as a binary string. + ue : next bits as an unsigned exp-Golomb. + se : next bits as a signed exp-Golomb. + bits n : n bits as a new BitString. + bytes n : n bytes as a new BitString. + +See the user manual for more details. + +* hex() and oct() functions removed. + +The special functions for hex() and oct() have been removed. Please use the +hex and oct properties instead. + +>>> hex(s) + +becomes + +>>> s.hex + +* join made a member function. + +The join function must now be called on a BitString object, which will be +used to join the list together. You may need to recode slightly: + +>>> s = bitstring.join('0x34', '0b1001', '0b1') + +becomes + +>>> s = BitString().join('0x34', '0b1001', '0b1') + +* More than one value allowed in readbits, readbytes, peekbits and peekbytes + +If you specify more than one bit or byte length then a list of BitStrings will +be returned. + +>>> a, b, c = s.readbits(10, 5, 5) + +is equivalent to + +>>> a = readbits(10) +>>> b = readbits(5) +>>> c = readbits(5) + +* bytealigned defaults to False, and is at the end of the parameter list + +Functions that have a bytealigned paramater have changed so that it now +defaults to False rather than True. Also its position in the parameter list +has changed to be at the end. You may need to recode slightly (sorry!) + +* readue and readse functions have been removed + +Instead you should use the new read function with a 'ue' or 'se' token: + +>>> i = s.readue() + +becomes + +>>> i = s.read('ue') + +This is more flexible as you can read multiple items in one go, plus you can +now also use the peek function with ue and se. + +* Minor bugs fixed. + +See the issue tracker for more details. + +----------------------------------------------------- +June 15th 2009: version 0.4.3 for Python 2.x released +----------------------------------------------------- + +This is a minor update. This release is the first to bundle the bitstring +manual. This is a PDF and you can find it in the docs directory. + +Changes in version 0.4.3 + +* New 'cut' function + +This function returns a generator for constant sized chunks of a BitString. + +>>> for byte in s.cut(8): +... do_something_with(byte) + +You can also specify a startbit and endbit, as well as a count, which limits +the number of items generated: + +>>> first100TSPackets = list(s.cut(188*8, count=100)) + +* 'slice' function now equivalent to __getitem__. + +This means that a step can also be given to the slice function so that the +following are now the same thing, and it's just a personal preference which +to use: + +>>> s1 = s[a:b:c] +>>> s2 = s.slice(a, b, c) + +* findall gets a 'count' parameter. + +So now + +>>> list(a.findall(s, count=n)) + +is equivalent to + +>>> list(a.findall(s))[:n] + +except that it won't need to generate the whole list and so is much more +efficient. + +* Changes to 'split'. + +The split function now has a 'count' parameter rather than 'maxsplit'. This +makes the interface closer to that for cut, replace and findall. The final item +generated is now no longer the whole of the rest of the BitString. + +* A couple of minor bugs were fixed. See the issue tracker for details. + +---------------------------------------------------- +May 25th 2009: version 0.4.2 for Python 2.x released +---------------------------------------------------- + +This is a minor update, and almost doesn't break compatibility with version +0.4.0, but with the slight exception of findall() returning a generator, +detailed below. + +Changes in version 0.4.2 + +* Stepping in slices + +The use of the step parameter (also known as the stride) in slices has been +added. Its use is a little non-standard as it effectively gives a multiplicative +factor to apply to the start and stop parameters, rather than skipping over +bits. + +For example this makes it much more convenient if you want to give slices in +terms of bytes instead of bits. Instead of writing s[a*8:b*8] you can use +s[a:b:8]. + +When using a step the BitString is effectively truncated to a multiple of the +step, so s[::8] is equal to s if s is an integer number of bytes, otherwise it +is truncated by up to 7 bits. So the final seven complete 16-bit words could be +written as s[-7::16] + +Negative slices are also allowed, and should do what you'd expect. So for +example s[::-1] returns a bit-reversed copy of s (which is similar to +s.reversebits(), which does the same operation on s in-place). As another +example, to get the first 10 bytes in reverse byte order you could use +s_bytereversed = s[0:10:-8]. + +* Removed restrictions on offset + +You can now specify an offset of greater than 7 bits when creating a BitString, +and the use of offset is also now permitted when using the filename initialiser. +This is useful when you want to create a BitString from the middle of a file +without having to read the file into memory. + +>>> f = BitString(filename='reallybigfile', offset=8000000, length=32) + +* Integers can be assigned to slices + +You can now assign an integer to a slice of a BitString. If the integer doesn't +fit in the size of slice given then a ValueError exception is raised. So this +is now allowed and works as expected: + +>>> s[8:16] = 106 + +and is equivalent to + +>>> s[8:16] = BitString(uint=106, length=8) + +* Less exceptions raised + +Some changes have been made to slicing so that less exceptions are raised, +bringing the interface closer to that for lists. So for example trying to delete +past the end of the BitString will now just delete to the end, rather than +raising a ValueError. + +* Initialisation from lists and tuples + +A new option for the auto initialiser is to pass it a list or tuple. The items +in the list or tuple are evaluated as booleans and the bits in the BitString are +set to 1 for True items and 0 for False items. This can be used anywhere the +auto initialiser can currently be used. For example: + +>>> a = BitString([True, 7, False, 0, ()]) # 0b11000 +>>> b = a + ['Yes', ''] # Adds '0b10' +>>> (True, True, False) in a +True + +* Miscellany + +reversebits() now has optional startbit and endbit parameters. + +As an optimisation findall() will return a generator, rather than a list. If you +still want the whole list then of course you can just call list() on the +generator. + +Improved efficiency of rfind(). + +A couple of minor bugs were fixed. See the issue tracker for details. + +----------------------------------------------------- +April 23rd 2009: Python 3 only version 0.4.1 released +----------------------------------------------------- + +This version is just a port of version 0.4.0 to Python 3. All the unit tests +pass, but beyond that only limited ad hoc testing has been done and so it +should be considered an experimental release. That said, the unit test +coverage is very good - I'm just not sure if anyone even wants a Python 3 +version! + +--------------------------------------- +April 11th 2009: version 0.4.0 released +--------------------------------------- +Changes in version 0.4.0 + +* New functions + +Added rfind(), findall(), replace(). These do pretty much what you'd expect - +see the docstrings or the wiki for more information. + +* More special functions + +Some missing functions were added: __repr__, __contains__, __rand__, +__ror__, _rxor__ and __delitem__. + +* Miscellany + +A couple of small bugs were fixed (see the issue tracker). + +---- + +There are some small backward incompatibilities relative to version 0.3.2: + +* Combined find() and findbytealigned() + +findbytealigned() has been removed, and becomes part of find(). The default +start position has changed on both find() and split() to be the start of the +BitString. You may need to recode: + +>>> s1.find(bs) +>>> s2.findbytealigned(bs) +>>> s2.split(bs) + +becomes + +>>> s1.find(bs, bytealigned=False, startbit=s1.bitpos) +>>> s2.find(bs, startbit=s1.bitpos) # bytealigned defaults to True +>>> s2.split(bs, startbit=s2.bitpos) + +* Reading off end of BitString no longer raises exception. + +Previously a read or peek function that encountered the end of the BitString +would raise a ValueError. It will now instead return the remainder of the +BitString, which could be an empty BitString. This is closer to the file +object interface. + +* Removed visibility of offset. + +The offset property was previously read-only, and has now been removed from +public view altogether. As it is used internally for efficiency reasons you +shouldn't really have needed to use it. If you do then use the _offset parameter +instead (with caution). + +--------------------------------------- +March 11th 2009: version 0.3.2 released +--------------------------------------- +Changes in version 0.3.2 + +* Better performance + +A number of functions (especially find() and findbytealigned()) have been sped +up considerably. + +* Bit-wise operations + +Added support for bit-wise AND (&), OR (|) and XOR (^). For example: + +>>> a = BitString('0b00111') +>>> print a & '0b10101' +0b00101 + +* Miscellany + +Added seekbit() and seekbyte() functions. These complement the 'advance' and +'retreat' functions, although you can still just use bitpos and bytepos +properties directly. + +>>> a.seekbit(100) # Equivalent to a.bitpos = 100 + +Allowed comparisons between BitString objects and strings. For example this +will now work: + +>>> a = BitString('0b00001111') +>>> a == '0x0f' +True + +------------------------------------------ +February 26th 2009: version 0.3.1 released +------------------------------------------ +Changes in version 0.3.1 + +This version only adds features and fixes bugs relative to 0.3.0, and doesn't +break backwards compatibility. + +* Octal interpretation and initialisation + +The oct property now joins bin and hex. Just prefix octal numbers with '0o'. + +>>> a = BitString('0o755') +>>> print a.bin +0b111101101 + +* Simpler copying + +Rather than using b = copy.copy(a) to create a copy of a BitString, now you +can just use b = BitString(a). + +* More special methods + +Lots of new special methods added, for example bit-shifting via << and >>, +equality testing via == and !=, bit inversion (~) and concatenation using *. + +Also __setitem__ is now supported so BitString objects can be modified using +standard index notation. + +* Proper installer + +Finally got round to writing the distutils script. To install just +python setup.py install. + +------------------------------------------ +February 15th 2009: version 0.3.0 released +------------------------------------------ +Changes in version 0.3.0 + +* Simpler initialisation from binary and hexadecimal + +The first argument in the BitString constructor is now called auto and will +attempt to interpret the type of a string. Prefix binary numbers with '0b' +and hexadecimals with '0x'. + +>>> a = BitString('0b0') # single zero bit +>>> b = BitString('0xffff') # two bytes + +Previously the first argument was data, so if you relied on this then you +will need to recode: + +>>> a = BitString('\x00\x00\x01\xb3') # Don't do this any more! + +becomes + +>>> a = BitString(data='\x00\x00\x01\xb3') + +or just + +>>> a = BitString('0x000001b3') + +This new notation can also be used in functions that take a BitString as an +argument. For example: + +>>> a = BitString('0x0011') + '0xff' +>>> a.insert('0b001', 6) +>>> a.find('0b1111') + +* BitString made more mutable + +The functions append, deletebits, insert, overwrite, truncatestart and +truncateend now modify the BitString that they act upon. This allows for +cleaner and more efficient code, but you may need to rewrite slightly if you +depended upon the old behaviour: + +>>> a = BitString(hex='0xffff') +>>> a = a.append(BitString(hex='0x00')) +>>> b = a.deletebits(10, 10) + +becomes: + +>>> a = BitString('0xffff') +>>> a.append('0x00') +>>> b = copy.copy(a) +>>> b.deletebits(10, 10) + +Thanks to Frank Aune for suggestions in this and other areas. + +* Changes to printing + +The binary interpretation of a BitString is now prepended with '0b'. This is +in keeping with the Python 2.6 (and 3.0) bin function. The prefix is optional +when initialising using 'bin='. + +Also, if you just print a BitString with no interpretation it will pick +something appropriate - hex if it is an integer number of bytes, otherwise +binary. If the BitString representation is very long it will be truncated +by '...' so it is only an approximate interpretation. + +>>> a = BitString('0b0011111') +>>> print a +0b0011111 +>>> a += '0b0' +>>> print a +0x3e + +* More convenience functions + +Some missing functions such as advancebit and deletebytes have been added. Also +a number of peek functions make an appearance as have prepend and reversebits. +See the Tutorial for more details. + +----------------------------------------- +January 13th 2009: version 0.2.0 released +----------------------------------------- +Some fairly minor updates, not really deserving of a whole version point update. +------------------------------------------ +December 29th 2008: version 0.1.0 released +------------------------------------------ +First release! diff --git a/python/bitstring/setup.py b/python/bitstring/setup.py new file mode 100644 index 000000000..9f088dda9 --- /dev/null +++ b/python/bitstring/setup.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +from distutils.core import setup +# from distutils.extension import Extension +# from Cython.Distutils import build_ext +import sys + +kwds = {'long_description': open('README.txt').read()} + +if sys.version_info[:2] < (2, 6): + raise Exception('This version of bitstring needs Python 2.6 or later. ' + 'For Python 2.4 / 2.5 please use bitstring version 1.0 instead.') + +# macros = [('PYREX_WITHOUT_ASSERTIONS', None)] +# ext_modules = [Extension('bitstring', ["bitstring.pyx"], define_macros=macros)] + +setup(name='bitstring', + version='3.1.3', + description='Simple construction, analysis and modification of binary data.', + author='Scott Griffiths', + author_email='scott@griffiths.name', + url='http://python-bitstring.googlecode.com', + download_url='http://python-bitstring.googlecode.com', + license='The MIT License: http://www.opensource.org/licenses/mit-license.php', + # cmdclass = {'build_ext': build_ext}, + # ext_modules = ext_modules, + py_modules=['bitstring'], + platforms='all', + classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.0', + 'Programming Language :: Python :: 3.1', + 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + **kwds + ) + diff --git a/python/bitstring/test/smalltestfile b/python/bitstring/test/smalltestfile new file mode 100644 index 000000000..be687ec35 --- /dev/null +++ b/python/bitstring/test/smalltestfile @@ -0,0 +1 @@ +#Eg‰«Íï \ No newline at end of file diff --git a/python/bitstring/test/test.m1v b/python/bitstring/test/test.m1v new file mode 100644 index 000000000..2da3ece11 Binary files /dev/null and b/python/bitstring/test/test.m1v differ diff --git a/python/bitstring/test/test_bitarray.py b/python/bitstring/test/test_bitarray.py new file mode 100644 index 000000000..b80f90617 --- /dev/null +++ b/python/bitstring/test/test_bitarray.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +""" +Unit tests for the bitarray module. +""" + +import unittest +import sys + +sys.path.insert(0, '..') +import bitstring +from bitstring import BitArray + +class All(unittest.TestCase): + def testCreationFromUint(self): + s = BitArray(uint=15, length=6) + self.assertEqual(s.bin, '001111') + s = BitArray(uint=0, length=1) + self.assertEqual(s.bin, '0') + s.uint = 1 + self.assertEqual(s.uint, 1) + s = BitArray(length=8) + s.uint = 0 + self.assertEqual(s.uint, 0) + s.uint = 255 + self.assertEqual(s.uint, 255) + self.assertEqual(s.len, 8) + self.assertRaises(bitstring.CreationError, s._setuint, 256) + + def testCreationFromOct(self): + s = BitArray(oct='7') + self.assertEqual(s.oct, '7') + self.assertEqual(s.bin, '111') + s.append('0o1') + self.assertEqual(s.bin, '111001') + s.oct = '12345670' + self.assertEqual(s.length, 24) + self.assertEqual(s.bin, '001010011100101110111000') + s = BitArray('0o123') + self.assertEqual(s.oct, '123') + + +class NoPosAttribute(unittest.TestCase): + def testReplace(self): + s = BitArray('0b01') + s.replace('0b1', '0b11') + self.assertEqual(s, '0b011') + + def testDelete(self): + s = BitArray('0b000000001') + del s[-1:] + self.assertEqual(s, '0b00000000') + + def testInsert(self): + s = BitArray('0b00') + s.insert('0xf', 1) + self.assertEqual(s, '0b011110') + + def testInsertParameters(self): + s = BitArray('0b111') + self.assertRaises(TypeError, s.insert, '0x4') + + def testOverwrite(self): + s = BitArray('0b01110') + s.overwrite('0b000', 1) + self.assertEqual(s, '0b00000') + + def testOverwriteParameters(self): + s = BitArray('0b0000') + self.assertRaises(TypeError, s.overwrite, '0b111') + + def testPrepend(self): + s = BitArray('0b0') + s.prepend([1]) + self.assertEqual(s, [1, 0]) + + def testRol(self): + s = BitArray('0b0001') + s.rol(1) + self.assertEqual(s, '0b0010') + + def testRor(self): + s = BitArray('0b1000') + s.ror(1) + self.assertEqual(s, '0b0100') + + def testSetItem(self): + s = BitArray('0b000100') + s[4:5] = '0xf' + self.assertEqual(s, '0b000111110') + s[0:1] = [1] + self.assertEqual(s, '0b100111110') + + +class Bugs(unittest.TestCase): + def testAddingNonsense(self): + a = BitArray([0]) + a += '0' # a uint of length 0 - so nothing gets added. + self.assertEqual(a, [0]) + self.assertRaises(ValueError, a.__iadd__, '3') + self.assertRaises(ValueError, a.__iadd__, 'se') + self.assertRaises(ValueError, a.__iadd__, 'float:32') + + def testPrependAfterCreationFromDataWithOffset(self): + s1 = BitArray(bytes=b'\x00\x00\x07\xff\xf0\x00', offset=21, length=15) + self.assertFalse(s1.any(0)) + s1.prepend('0b0') + self.assertEqual(s1.bin, '0111111111111111') + s1.prepend('0b0') + self.assertEqual(s1.bin, '00111111111111111') + + +class ByteAligned(unittest.TestCase): + def testDefault(self, defaultbytealigned=bitstring.bytealigned): + self.assertFalse(defaultbytealigned) + + def testChangingIt(self): + bitstring.bytealigned = True + self.assertTrue(bitstring.bytealigned) + bitstring.bytealigned = False + + def testNotByteAligned(self): + bitstring.bytealigned = False + a = BitArray('0x00 ff 0f f') + l = list(a.findall('0xff')) + self.assertEqual(l, [8, 20]) + p = a.find('0x0f')[0] + self.assertEqual(p, 4) + p = a.rfind('0xff')[0] + self.assertEqual(p, 20) + s = list(a.split('0xff')) + self.assertEqual(s, ['0x00', '0xff0', '0xff']) + a.replace('0xff', '') + self.assertEqual(a, '0x000') + + def testByteAligned(self): + bitstring.bytealigned = True + a = BitArray('0x00 ff 0f f') + l = list(a.findall('0xff')) + self.assertEqual(l, [8]) + p = a.find('0x0f')[0] + self.assertEqual(p, 16) + p = a.rfind('0xff')[0] + self.assertEqual(p, 8) + s = list(a.split('0xff')) + self.assertEqual(s, ['0x00', '0xff0ff']) + a.replace('0xff', '') + self.assertEqual(a, '0x000ff') + + +class SliceAssignment(unittest.TestCase): + + def testSliceAssignmentSingleBit(self): + a = BitArray('0b000') + a[2] = '0b1' + self.assertEqual(a.bin, '001') + a[0] = BitArray(bin='1') + self.assertEqual(a.bin, '101') + a[-1] = '0b0' + self.assertEqual(a.bin, '100') + a[-3] = '0b0' + self.assertEqual(a.bin, '000') + + def testSliceAssignmentSingleBitErrors(self): + a = BitArray('0b000') + self.assertRaises(IndexError, a.__setitem__, -4, '0b1') + self.assertRaises(IndexError, a.__setitem__, 3, '0b1') + self.assertRaises(TypeError, a.__setitem__, 1, 1.3) + + def testSliceAssignmentMulipleBits(self): + a = BitArray('0b0') + a[0] = '0b110' + self.assertEqual(a.bin, '110') + a[0] = '0b000' + self.assertEqual(a.bin, '00010') + a[0:3] = '0b111' + self.assertEqual(a.bin, '11110') + a[-2:] = '0b011' + self.assertEqual(a.bin, '111011') + a[:] = '0x12345' + self.assertEqual(a.hex, '12345') + a[:] = '' + self.assertFalse(a) + + def testSliceAssignmentMultipleBitsErrors(self): + a = BitArray() + self.assertRaises(IndexError, a.__setitem__, 0, '0b00') + a += '0b1' + a[0:2] = '0b11' + self.assertEqual(a, '0b11') + + def testDelSliceStep(self): + a = BitArray(bin='100111101001001110110100101') + del a[::2] + self.assertEqual(a.bin, '0110010101100') + del a[3:9:3] + self.assertEqual(a.bin, '01101101100') + del a[2:7:1] + self.assertEqual(a.bin, '011100') + del a[::99] + self.assertEqual(a.bin, '11100') + del a[::1] + self.assertEqual(a.bin, '') + + def testDelSliceNegativeStep(self): + a = BitArray('0b0001011101101100100110000001') + del a[5:23:-3] + self.assertEqual(a.bin, '0001011101101100100110000001') + del a[25:3:-3] + self.assertEqual(a.bin, '00011101010000100001') + del a[:6:-7] + self.assertEqual(a.bin, '000111010100010000') + del a[15::-2] + self.assertEqual(a.bin, '0010000000') + del a[::-1] + self.assertEqual(a.bin, '') + + def testDelSliceErrors(self): + a = BitArray(10) + del a[5:3] + self.assertEqual(a, 10) + del a[3:5:-1] + self.assertEqual(a, 10) + + def testDelSingleElement(self): + a = BitArray('0b0010011') + del a[-1] + self.assertEqual(a.bin, '001001') + del a[2] + self.assertEqual(a.bin, '00001') + try: + del a[5] + self.assertTrue(False) + except IndexError: + pass + + def testSetSliceStep(self): + a = BitArray(bin='0000000000') + a[::2] = '0b11111' + self.assertEqual(a.bin, '1010101010') + a[4:9:3] = [0, 0] + self.assertEqual(a.bin, '1010001010') + a[7:3:-1] = [1, 1, 1, 0] + self.assertEqual(a.bin, '1010011110') + a[7:1:-2] = [0, 0, 1] + self.assertEqual(a.bin, '1011001010') + a[::-5] = [1, 1] + self.assertEqual(a.bin, '1011101011') + a[::-1] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 1] + self.assertEqual(a.bin, '1000000000') + + def testSetSliceErrors(self): + a = BitArray(8) + try: + a[::3] = [1] + self.assertTrue(False) + except ValueError: + pass + class A(object): pass + try: + a[1:2] = A() + self.assertTrue(False) + except TypeError: + pass + try: + a[1:4:-1] = [1, 2] + self.assertTrue(False) + except ValueError: + pass + + +class Subclassing(unittest.TestCase): + + def testIsInstance(self): + class SubBits(BitArray): pass + a = SubBits() + self.assertTrue(isinstance(a, SubBits)) + + def testClassType(self): + class SubBits(BitArray): pass + self.assertEqual(SubBits().__class__, SubBits) + + +class Clear(unittest.TestCase): + + def testClear(self): + s = BitArray('0xfff') + s.clear() + self.assertEqual(s.len, 0) + + +class Copy(unittest.TestCase): + + def testCopyMethod(self): + s = BitArray(9) + t = s.copy() + self.assertEqual(s, t) + t[0] = True + self.assertEqual(t.bin, '100000000') + self.assertEqual(s.bin, '000000000') + + +class ModifiedByAddingBug(unittest.TestCase): + + def testAdding(self): + a = BitArray('0b0') + b = BitArray('0b11') + c = a + b + self.assertEqual(c, '0b011') + self.assertEqual(a, '0b0') + self.assertEqual(b, '0b11') \ No newline at end of file diff --git a/python/bitstring/test/test_bits.py b/python/bitstring/test/test_bits.py new file mode 100644 index 000000000..402c03899 --- /dev/null +++ b/python/bitstring/test/test_bits.py @@ -0,0 +1,378 @@ +#!/usr/bin/env python + +import unittest +import sys + +sys.path.insert(0, '..') +import bitstring +from bitstring import MmapByteArray +from bitstring import Bits, BitArray, ConstByteStore, ByteStore + +class Creation(unittest.TestCase): + def testCreationFromBytes(self): + s = Bits(bytes=b'\xa0\xff') + self.assertEqual((s.len, s.hex), (16, 'a0ff')) + s = Bits(bytes=b'abc', length=0) + self.assertEqual(s, '') + + def testCreationFromBytesErrors(self): + self.assertRaises(bitstring.CreationError, Bits, bytes=b'abc', length=25) + + def testCreationFromDataWithOffset(self): + s1 = Bits(bytes=b'\x0b\x1c\x2f', offset=0, length=20) + s2 = Bits(bytes=b'\xa0\xb1\xC2', offset=4) + self.assertEqual((s2.len, s2.hex), (20, '0b1c2')) + self.assertEqual((s1.len, s1.hex), (20, '0b1c2')) + self.assertTrue(s1 == s2) + + def testCreationFromHex(self): + s = Bits(hex='0xA0ff') + self.assertEqual((s.len, s.hex), (16, 'a0ff')) + s = Bits(hex='0x0x0X') + self.assertEqual((s.length, s.hex), (0, '')) + + def testCreationFromHexWithWhitespace(self): + s = Bits(hex=' \n0 X a 4e \r3 \n') + self.assertEqual(s.hex, 'a4e3') + + def testCreationFromHexErrors(self): + self.assertRaises(bitstring.CreationError, Bits, hex='0xx0') + self.assertRaises(bitstring.CreationError, Bits, hex='0xX0') + self.assertRaises(bitstring.CreationError, Bits, hex='0Xx0') + self.assertRaises(bitstring.CreationError, Bits, hex='-2e') + # These really should fail, but it's awkward and not a big deal... +# self.assertRaises(bitstring.CreationError, Bits, '0x2', length=2) +# self.assertRaises(bitstring.CreationError, Bits, '0x3', offset=1) + + def testCreationFromBin(self): + s = Bits(bin='1010000011111111') + self.assertEqual((s.length, s.hex), (16, 'a0ff')) + s = Bits(bin='00')[:1] + self.assertEqual(s.bin, '0') + s = Bits(bin=' 0000 \n 0001\r ') + self.assertEqual(s.bin, '00000001') + + def testCreationFromBinWithWhitespace(self): + s = Bits(bin=' \r\r\n0 B 00 1 1 \t0 ') + self.assertEqual(s.bin, '00110') + + def testCreationFromOctErrors(self): + s = Bits('0b00011') + self.assertRaises(bitstring.InterpretError, s._getoct) + self.assertRaises(bitstring.CreationError, s._setoct, '8') + + def testCreationFromUintWithOffset(self): + self.assertRaises(bitstring.Error, Bits, uint=12, length=8, offset=1) + + def testCreationFromUintErrors(self): + self.assertRaises(bitstring.CreationError, Bits, uint=-1, length=10) + self.assertRaises(bitstring.CreationError, Bits, uint=12) + self.assertRaises(bitstring.CreationError, Bits, uint=4, length=2) + self.assertRaises(bitstring.CreationError, Bits, uint=0, length=0) + self.assertRaises(bitstring.CreationError, Bits, uint=12, length=-12) + + def testCreationFromInt(self): + s = Bits(int=0, length=4) + self.assertEqual(s.bin, '0000') + s = Bits(int=1, length=2) + self.assertEqual(s.bin, '01') + s = Bits(int=-1, length=11) + self.assertEqual(s.bin, '11111111111') + s = Bits(int=12, length=7) + self.assertEqual(s.int, 12) + s = Bits(int=-243, length=108) + self.assertEqual((s.int, s.length), (-243, 108)) + for length in range(6, 10): + for value in range(-17, 17): + s = Bits(int=value, length=length) + self.assertEqual((s.int, s.length), (value, length)) + s = Bits(int=10, length=8) + + def testCreationFromIntErrors(self): + self.assertRaises(bitstring.CreationError, Bits, int=-1, length=0) + self.assertRaises(bitstring.CreationError, Bits, int=12) + self.assertRaises(bitstring.CreationError, Bits, int=4, length=3) + self.assertRaises(bitstring.CreationError, Bits, int=-5, length=3) + + def testCreationFromSe(self): + for i in range(-100, 10): + s = Bits(se=i) + self.assertEqual(s.se, i) + + def testCreationFromSeWithOffset(self): + self.assertRaises(bitstring.CreationError, Bits, se=-13, offset=1) + + def testCreationFromSeErrors(self): + self.assertRaises(bitstring.CreationError, Bits, se=-5, length=33) + s = Bits(bin='001000') + self.assertRaises(bitstring.InterpretError, s._getse) + + def testCreationFromUe(self): + [self.assertEqual(Bits(ue=i).ue, i) for i in range(0, 20)] + + def testCreationFromUeWithOffset(self): + self.assertRaises(bitstring.CreationError, Bits, ue=104, offset=2) + + def testCreationFromUeErrors(self): + self.assertRaises(bitstring.CreationError, Bits, ue=-1) + self.assertRaises(bitstring.CreationError, Bits, ue=1, length=12) + s = Bits(bin='10') + self.assertRaises(bitstring.InterpretError, s._getue) + + def testCreationFromBool(self): + a = Bits('bool=1') + self.assertEqual(a, 'bool=1') + b = Bits('bool=0') + self.assertEqual(b, [0]) + c = bitstring.pack('2*bool', 0, 1) + self.assertEqual(c, '0b01') + + def testCreationKeywordError(self): + self.assertRaises(bitstring.CreationError, Bits, squirrel=5) + + def testDataStoreType(self): + a = Bits('0xf') + self.assertEqual(type(a._datastore), bitstring.ConstByteStore) + + +class Initialisation(unittest.TestCase): + def testEmptyInit(self): + a = Bits() + self.assertEqual(a, '') + + def testNoPos(self): + a = Bits('0xabcdef') + try: + a.pos + except AttributeError: + pass + else: + assert False + + def testFind(self): + a = Bits('0xabcd') + r = a.find('0xbc') + self.assertEqual(r[0], 4) + r = a.find('0x23462346246', bytealigned=True) + self.assertFalse(r) + + def testRfind(self): + a = Bits('0b11101010010010') + b = a.rfind('0b010') + self.assertEqual(b[0], 11) + + def testFindAll(self): + a = Bits('0b0010011') + b = list(a.findall([1])) + self.assertEqual(b, [2, 5, 6]) + + +class Cut(unittest.TestCase): + def testCut(self): + s = Bits(30) + for t in s.cut(3): + self.assertEqual(t, [0] * 3) + + +class InterleavedExpGolomb(unittest.TestCase): + def testCreation(self): + s1 = Bits(uie=0) + s2 = Bits(uie=1) + self.assertEqual(s1, [1]) + self.assertEqual(s2, [0, 0, 1]) + s1 = Bits(sie=0) + s2 = Bits(sie=-1) + s3 = Bits(sie=1) + self.assertEqual(s1, [1]) + self.assertEqual(s2, [0, 0, 1, 1]) + self.assertEqual(s3, [0, 0, 1, 0]) + + def testCreationFromProperty(self): + s = BitArray() + s.uie = 45 + self.assertEqual(s.uie, 45) + s.sie = -45 + self.assertEqual(s.sie, -45) + + def testInterpretation(self): + for x in range(101): + self.assertEqual(Bits(uie=x).uie, x) + for x in range(-100, 100): + self.assertEqual(Bits(sie=x).sie, x) + + def testErrors(self): + for f in ['sie=100, 0b1001', '0b00', 'uie=100, 0b1001']: + s = Bits(f) + self.assertRaises(bitstring.InterpretError, s._getsie) + self.assertRaises(bitstring.InterpretError, s._getuie) + self.assertRaises(ValueError, Bits, 'uie=-10') + + +class FileBased(unittest.TestCase): + def setUp(self): + self.a = Bits(filename='smalltestfile') + self.b = Bits(filename='smalltestfile', offset=16) + self.c = Bits(filename='smalltestfile', offset=20, length=16) + self.d = Bits(filename='smalltestfile', offset=20, length=4) + + def testCreationWithOffset(self): + self.assertEqual(self.a, '0x0123456789abcdef') + self.assertEqual(self.b, '0x456789abcdef') + self.assertEqual(self.c, '0x5678') + + def testBitOperators(self): + x = self.b[4:20] + self.assertEqual(x, '0x5678') + self.assertEqual((x & self.c).hex, self.c.hex) + self.assertEqual(self.c ^ self.b[4:20], 16) + self.assertEqual(self.a[23:36] | self.c[3:], self.c[3:]) + + def testAddition(self): + h = self.d + '0x1' + x = self.a[20:24] + self.c[-4:] + self.c[8:12] + self.assertEqual(x, '0x587') + x = self.b + x + self.assertEqual(x.hex, '456789abcdef587') + x = BitArray(x) + del x[12:24] + self.assertEqual(x, '0x456abcdef587') + +class Mmap(unittest.TestCase): + def setUp(self): + self.f = open('smalltestfile', 'rb') + + def tearDown(self): + self.f.close() + + def testByteArrayEquivalence(self): + a = MmapByteArray(self.f) + self.assertEqual(a.bytelength, 8) + self.assertEqual(len(a), 8) + self.assertEqual(a[0], 0x01) + self.assertEqual(a[1], 0x23) + self.assertEqual(a[7], 0xef) + self.assertEqual(a[0:1], bytearray([1])) + self.assertEqual(a[:], bytearray([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef])) + self.assertEqual(a[2:4], bytearray([0x45, 0x67])) + + def testWithLength(self): + a = MmapByteArray(self.f, 3) + self.assertEqual(a[0], 0x01) + self.assertEqual(len(a), 3) + + def testWithOffset(self): + a = MmapByteArray(self.f, None, 5) + self.assertEqual(len(a), 3) + self.assertEqual(a[0], 0xab) + + def testWithLengthAndOffset(self): + a = MmapByteArray(self.f, 3, 3) + self.assertEqual(len(a), 3) + self.assertEqual(a[0], 0x67) + self.assertEqual(a[:], bytearray([0x67, 0x89, 0xab])) + + +class Comparisons(unittest.TestCase): + def testUnorderable(self): + a = Bits(5) + b = Bits(5) + self.assertRaises(TypeError, a.__lt__, b) + self.assertRaises(TypeError, a.__gt__, b) + self.assertRaises(TypeError, a.__le__, b) + self.assertRaises(TypeError, a.__ge__, b) + + +class Subclassing(unittest.TestCase): + + def testIsInstance(self): + class SubBits(bitstring.Bits): pass + a = SubBits() + self.assertTrue(isinstance(a, SubBits)) + + def testClassType(self): + class SubBits(bitstring.Bits): pass + self.assertEqual(SubBits().__class__, SubBits) + + +class LongBoolConversion(unittest.TestCase): + + def testLongBool(self): + a = Bits(1000) + b = bool(a) + self.assertTrue(b is False) + + +# Some basic tests for the private ByteStore classes + +class ConstByteStoreCreation(unittest.TestCase): + + def testProperties(self): + a = ConstByteStore(bytearray(b'abc')) + self.assertEqual(a.bytelength, 3) + self.assertEqual(a.offset, 0) + self.assertEqual(a.bitlength, 24) + self.assertEqual(a._rawarray, b'abc') + + def testGetBit(self): + a = ConstByteStore(bytearray([0x0f])) + self.assertEqual(a.getbit(0), False) + self.assertEqual(a.getbit(3), False) + self.assertEqual(a.getbit(4), True) + self.assertEqual(a.getbit(7), True) + + b = ConstByteStore(bytearray([0x0f]), 7, 1) + self.assertEqual(b.getbit(2), False) + self.assertEqual(b.getbit(3), True) + + def testGetByte(self): + a = ConstByteStore(bytearray(b'abcde'), 1, 13) + self.assertEqual(a.getbyte(0), 97) + self.assertEqual(a.getbyte(1), 98) + self.assertEqual(a.getbyte(4), 101) + + +class PadToken(unittest.TestCase): + + def testCreation(self): + a = Bits('pad:10') + self.assertEqual(a, Bits(10)) + b = Bits('pad:0') + self.assertEqual(b, Bits()) + c = Bits('0b11, pad:1, 0b111') + self.assertEqual(c, Bits('0b110111')) + + def testPack(self): + s = bitstring.pack('0b11, pad:3=5, 0b1') + self.assertEqual(s.bin, '110001') + d = bitstring.pack('pad:c', c=12) + self.assertEqual(d, Bits(12)) + e = bitstring.pack('0xf, uint:12, pad:1, bin, pad:4, 0b10', 0, '111') + self.assertEqual(e.bin, '11110000000000000111000010') + + def testUnpack(self): + s = Bits('0b111000111') + x, y = s.unpack('3, pad:3, 3') + self.assertEqual((x, y), (7, 7)) + x, y = s.unpack('2, pad:2, bin') + self.assertEqual((x, y), (3, '00111')) + x = s.unpack('pad:1, pad:2, pad:3') + self.assertEqual(x, []) + + +class ModifiedByAddingBug(unittest.TestCase): + + def testAdding(self): + a = Bits('0b0') + b = Bits('0b11') + c = a + b + self.assertEqual(c, '0b011') + self.assertEqual(a, '0b0') + self.assertEqual(b, '0b11') + + def testAdding2(self): + a = Bits(100) + b = Bits(101) + c = a + b + self.assertEqual(a, 100) + self.assertEqual(b, 101) + self.assertEqual(c, 201) diff --git a/python/bitstring/test/test_bitstore.py b/python/bitstring/test/test_bitstore.py new file mode 100644 index 000000000..9f5c9036e --- /dev/null +++ b/python/bitstring/test/test_bitstore.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import unittest +import sys +sys.path.insert(0, '..') +from bitstring import ByteStore, ConstByteStore, equal, offsetcopy + + +class OffsetCopy(unittest.TestCase): + def testStraightCopy(self): + s = ByteStore(bytearray([10, 5, 1]), 24, 0) + t = offsetcopy(s, 0) + self.assertEqual(t._rawarray, bytearray([10, 5, 1])) + + def testOffsetIncrease(self): + s = ByteStore(bytearray([1, 1, 1]), 24, 0) + t = offsetcopy(s, 4) + self.assertEqual(t.bitlength, 24) + self.assertEqual(t.offset, 4) + self.assertEqual(t._rawarray, bytearray([0, 16, 16, 16])) + + +class Equals(unittest.TestCase): + + def testBothSingleByte(self): + s = ByteStore(bytearray([128]), 3, 0) + t = ByteStore(bytearray([64]), 3, 1) + u = ByteStore(bytearray([32]), 3, 2) + self.assertTrue(equal(s, t)) + self.assertTrue(equal(s, u)) + self.assertTrue(equal(u, t)) + + def testOneSingleByte(self): + s = ByteStore(bytearray([1, 0]), 2, 7) + t = ByteStore(bytearray([64]), 2, 1) + self.assertTrue(equal(s, t)) + self.assertTrue(equal(t, s)) \ No newline at end of file diff --git a/python/bitstring/test/test_bitstream.py b/python/bitstring/test/test_bitstream.py new file mode 100644 index 000000000..f94193d32 --- /dev/null +++ b/python/bitstring/test/test_bitstream.py @@ -0,0 +1,3940 @@ +#!/usr/bin/env python + +import unittest +import sys +sys.path.insert(0, '..') +import bitstring +import copy +import os +import collections +from bitstring import BitStream, ConstBitStream, pack +from bitstring import ByteStore, offsetcopy + + +class FlexibleInitialisation(unittest.TestCase): + def testFlexibleInitialisation(self): + a = BitStream('uint:8=12') + c = BitStream(' uint : 8 = 12') + self.assertTrue(a == c == BitStream(uint=12, length=8)) + self.assertEqual(a.uint, 12) + a = BitStream(' int:2= -1') + b = BitStream('int :2 = -1') + c = BitStream(' int: 2 =-1 ') + self.assertTrue(a == b == c == BitStream(int=-1, length=2)) + + def testFlexibleInitialisation2(self): + h = BitStream('hex=12') + o = BitStream('oct=33') + b = BitStream('bin=10') + self.assertEqual(h, '0x12') + self.assertEqual(o, '0o33') + self.assertEqual(b, '0b10') + + def testFlexibleInitialisation3(self): + for s in ['se=-1', ' se = -1 ', 'se = -1']: + a = BitStream(s) + self.assertEqual(a.se, -1) + for s in ['ue=23', 'ue =23', 'ue = 23']: + a = BitStream(s) + self.assertEqual(a.ue, 23) + + def testMultipleStringInitialisation(self): + a = BitStream('0b1 , 0x1') + self.assertEqual(a, '0b10001') + a = BitStream('ue=5, ue=1, se=-2') + self.assertEqual(a.read('ue'), 5) + self.assertEqual(a.read('ue'), 1) + self.assertEqual(a.read('se'), -2) + b = BitStream('uint:32 = 12, 0b11') + 'int:100=-100, 0o44' + self.assertEqual(b.read(32).uint, 12) + self.assertEqual(b.read(2).bin, '11') + self.assertEqual(b.read(100).int, -100) + + +class Reading(unittest.TestCase): + def testReadBits(self): + s = BitStream(bytes=b'\x4d\x55') + self.assertEqual(s.read(4).hex, '4') + self.assertEqual(s.read(8).hex, 'd5') + self.assertEqual(s.read(1), [0]) + self.assertEqual(s.read(3).bin, '101') + self.assertFalse(s.read(0)) + + def testReadByte(self): + s = BitStream(hex='4d55') + self.assertEqual(s.read(8).hex, '4d') + self.assertEqual(s.read(8).hex, '55') + + def testReadBytes(self): + s = BitStream(hex='0x112233448811') + self.assertEqual(s.read(3 * 8).hex, '112233') + self.assertRaises(ValueError, s.read, -2 * 8) + s.bitpos += 1 + self.assertEqual(s.read(2 * 8).bin, '1000100100010000') + + def testReadUE(self): + self.assertRaises(bitstring.InterpretError, BitStream('')._getue) + # The numbers 0 to 8 as unsigned Exponential-Golomb codes + s = BitStream(bin='1 010 011 00100 00101 00110 00111 0001000 0001001') + self.assertEqual(s.pos, 0) + for i in range(9): + self.assertEqual(s.read('ue'), i) + self.assertRaises(bitstring.ReadError, s.read, 'ue') + + def testReadSE(self): + s = BitStream(bin='010 00110 0001010 0001000 00111') + self.assertEqual(s.read('se'), 1) + self.assertEqual(s.read('se'), 3) + self.assertEqual(s.readlist(3 * ['se']), [5, 4, -3]) + + +class Find(unittest.TestCase): + def testFind1(self): + s = ConstBitStream(bin='0b0000110110000') + self.assertTrue(s.find(BitStream(bin='11011'), False)) + self.assertEqual(s.bitpos, 4) + self.assertEqual(s.read(5).bin, '11011') + s.bitpos = 0 + self.assertFalse(s.find('0b11001', False)) + + def testFind2(self): + s = BitStream(bin='0') + self.assertTrue(s.find(s, False)) + self.assertEqual(s.pos, 0) + self.assertFalse(s.find('0b00', False)) + self.assertRaises(ValueError, s.find, BitStream(), False) + + def testFindWithOffset(self): + s = BitStream(hex='0x112233')[4:] + self.assertTrue(s.find('0x23', False)) + self.assertEqual(s.pos, 8) + + def testFindCornerCases(self): + s = BitStream(bin='000111000111') + self.assertTrue(s.find('0b000')) + self.assertEqual(s.pos, 0) + self.assertTrue(s.find('0b000')) + self.assertEqual(s.pos, 0) + self.assertTrue(s.find('0b0111000111')) + self.assertEqual(s.pos, 2) + self.assertTrue(s.find('0b000', start=2)) + self.assertEqual(s.pos, 6) + self.assertTrue(s.find('0b111', start=6)) + self.assertEqual(s.pos, 9) + s.pos += 2 + self.assertTrue(s.find('0b1', start=s.pos)) + + def testFindBytes(self): + s = BitStream('0x010203040102ff') + self.assertFalse(s.find('0x05', bytealigned=True)) + self.assertTrue(s.find('0x02', bytealigned=True)) + self.assertEqual(s.read(16).hex, '0203') + self.assertTrue(s.find('0x02', start=s.bitpos, bytealigned=True)) + s.read(1) + self.assertFalse(s.find('0x02', start=s.bitpos, bytealigned=True)) + + def testFindBytesAlignedCornerCases(self): + s = BitStream('0xff') + self.assertTrue(s.find(s)) + self.assertFalse(s.find(BitStream(hex='0x12'))) + self.assertFalse(s.find(BitStream(hex='0xffff'))) + + def testFindBytesBitpos(self): + s = BitStream(hex='0x1122334455') + s.pos = 2 + s.find('0x66', bytealigned=True) + self.assertEqual(s.pos, 2) + s.pos = 38 + s.find('0x66', bytealigned=True) + self.assertEqual(s.pos, 38) + + def testFindByteAligned(self): + s = BitStream(hex='0x12345678') + self.assertTrue(s.find(BitStream(hex='0x56'), bytealigned=True)) + self.assertEqual(s.bytepos, 2) + s.pos = 0 + self.assertFalse(s.find(BitStream(hex='0x45'), bytealigned=True)) + s = BitStream('0x1234') + s.find('0x1234') + self.assertTrue(s.find('0x1234')) + s += '0b111' + s.pos = 3 + s.find('0b1', start=17, bytealigned=True) + self.assertFalse(s.find('0b1', start=17, bytealigned=True)) + self.assertEqual(s.pos, 3) + + def testFindByteAlignedWithOffset(self): + s = BitStream(hex='0x112233')[4:] + self.assertTrue(s.find(BitStream(hex='0x23'))) + + def testFindByteAlignedErrors(self): + s = BitStream(hex='0xffff') + self.assertRaises(ValueError, s.find, '') + self.assertRaises(ValueError, s.find, BitStream()) + + +class Rfind(unittest.TestCase): + def testRfind(self): + a = BitStream('0b001001001') + b = a.rfind('0b001') + self.assertEqual(b, (6,)) + self.assertEqual(a.pos, 6) + big = BitStream(length=100000) + '0x12' + BitStream(length=10000) + found = big.rfind('0x12', bytealigned=True) + self.assertEqual(found, (100000,)) + self.assertEqual(big.pos, 100000) + + def testRfindByteAligned(self): + a = BitStream('0x8888') + b = a.rfind('0b1', bytealigned=True) + self.assertEqual(b, (8,)) + self.assertEqual(a.pos, 8) + + def testRfindStartbit(self): + a = BitStream('0x0000ffffff') + b = a.rfind('0x0000', start=1, bytealigned=True) + self.assertEqual(b, ()) + self.assertEqual(a.pos, 0) + b = a.rfind('0x00', start=1, bytealigned=True) + self.assertEqual(b, (8,)) + self.assertEqual(a.pos, 8) + + def testRfindEndbit(self): + a = BitStream('0x000fff') + b = a.rfind('0b011', bytealigned=False, start=0, end=14) + self.assertEqual(bool(b), True) + b = a.rfind('0b011', False, 0, 13) + self.assertEqual(b, ()) + + def testRfindErrors(self): + a = BitStream('0x43234234') + self.assertRaises(ValueError, a.rfind, '', bytealigned=True) + self.assertRaises(ValueError, a.rfind, '0b1', start=-99, bytealigned=True) + self.assertRaises(ValueError, a.rfind, '0b1', end=33, bytealigned=True) + self.assertRaises(ValueError, a.rfind, '0b1', start=10, end=9, bytealigned=True) + + +class Shift(unittest.TestCase): + def testShiftLeft(self): + s = BitStream('0b1010') + t = s << 1 + self.assertEqual(s.bin, '1010') + self.assertEqual(t.bin, '0100') + t = t << 0 + self.assertEqual(t, '0b0100') + t = t << 100 + self.assertEqual(t.bin, '0000') + + def testShiftLeftErrors(self): + s = BitStream() + self.assertRaises(ValueError, s.__lshift__, 1) + s = BitStream('0xf') + self.assertRaises(ValueError, s.__lshift__, -1) + + def testShiftRight(self): + s = BitStream('0b1010') + t = s >> 1 + self.assertEqual(s.bin, '1010') + self.assertEqual(t.bin, '0101') + q = s >> 0 + self.assertEqual(q, '0b1010') + q.replace('0b1010', '') + s = s >> 100 + self.assertEqual(s.bin, '0000') + + def testShiftRightErrors(self): + s = BitStream() + self.assertRaises(ValueError, s.__rshift__, 1) + s = BitStream('0xf') + self.assertRaises(ValueError, s.__rshift__, -1) + + def testShiftRightInPlace(self): + s = BitStream('0xffff')[4:12] + s >>= 1 + self.assertEqual(s, '0b01111111') + s = BitStream('0b11011') + s >>= 2 + self.assertEqual(s.bin, '00110') + s >>= 100000000000000 + self.assertEqual(s.bin, '00000') + s = BitStream('0xff') + s >>= 1 + self.assertEqual(s, '0x7f') + s >>= 0 + self.assertEqual(s, '0x7f') + + def testShiftRightInPlaceErrors(self): + s = BitStream() + self.assertRaises(ValueError, s.__irshift__, 1) + s += '0b11' + self.assertRaises(ValueError, s.__irshift__, -1) + + def testShiftLeftInPlace(self): + s = BitStream('0xffff') + t = s[4:12] + t <<= 2 + self.assertEqual(t, '0b11111100') + s = BitStream('0b11011') + s <<= 2 + self.assertEqual(s.bin, '01100') + s <<= 100000000000000000000 + self.assertEqual(s.bin, '00000') + s = BitStream('0xff') + s <<= 1 + self.assertEqual(s, '0xfe') + s <<= 0 + self.assertEqual(s, '0xfe') + + def testShiftLeftInPlaceErrors(self): + s = BitStream() + self.assertRaises(ValueError, s.__ilshift__, 1) + s += '0b11' + self.assertRaises(ValueError, s.__ilshift__, -1) + + +class Replace(unittest.TestCase): + def testReplace1(self): + a = BitStream('0b1') + n = a.replace('0b1', '0b0', bytealigned=True) + self.assertEqual(a.bin, '0') + self.assertEqual(n, 1) + n = a.replace('0b1', '0b0', bytealigned=True) + self.assertEqual(n, 0) + + def testReplace2(self): + a = BitStream('0b00001111111') + n = a.replace('0b1', '0b0', bytealigned=True) + self.assertEqual(a.bin, '00001111011') + self.assertEqual(n, 1) + n = a.replace('0b1', '0b0', bytealigned=False) + self.assertEqual(a.bin, '00000000000') + self.assertEqual(n, 6) + + def testReplace3(self): + a = BitStream('0b0') + n = a.replace('0b0', '0b110011111', bytealigned=True) + self.assertEqual(n, 1) + self.assertEqual(a.bin, '110011111') + n = a.replace('0b11', '', bytealigned=False) + self.assertEqual(n, 3) + self.assertEqual(a.bin, '001') + + def testReplace4(self): + a = BitStream('0x00114723ef4732344700') + n = a.replace('0x47', '0x00', bytealigned=True) + self.assertEqual(n, 3) + self.assertEqual(a.hex, '00110023ef0032340000') + a.replace('0x00', '', bytealigned=True) + self.assertEqual(a.hex, '1123ef3234') + a.replace('0x11', '', start=1, bytealigned=True) + self.assertEqual(a.hex, '1123ef3234') + a.replace('0x11', '0xfff', end=7, bytealigned=True) + self.assertEqual(a.hex, '1123ef3234') + a.replace('0x11', '0xfff', end=8, bytealigned=True) + self.assertEqual(a.hex, 'fff23ef3234') + + def testReplace5(self): + a = BitStream('0xab') + b = BitStream('0xcd') + c = BitStream('0xabef') + c.replace(a, b) + self.assertEqual(c, '0xcdef') + self.assertEqual(a, '0xab') + self.assertEqual(b, '0xcd') + a = BitStream('0x0011223344') + a.pos = 12 + a.replace('0x11', '0xfff', bytealigned=True) + self.assertEqual(a.pos, 8) + self.assertEqual(a, '0x00fff223344') + + def testReplaceWithSelf(self): + a = BitStream('0b11') + a.replace('0b1', a) + self.assertEqual(a, '0xf') + a.replace(a, a) + self.assertEqual(a, '0xf') + + def testReplaceCount(self): + a = BitStream('0x223344223344223344') + n = a.replace('0x2', '0x0', count=0, bytealigned=True) + self.assertEqual(n, 0) + self.assertEqual(a.hex, '223344223344223344') + n = a.replace('0x2', '0x0', count=1, bytealigned=True) + self.assertEqual(n, 1) + self.assertEqual(a.hex, '023344223344223344') + n = a.replace('0x33', '', count=2, bytealigned=True) + self.assertEqual(n, 2) + self.assertEqual(a.hex, '02442244223344') + n = a.replace('0x44', '0x4444', count=1435, bytealigned=True) + self.assertEqual(n, 3) + self.assertEqual(a.hex, '02444422444422334444') + + def testReplaceBitpos(self): + a = BitStream('0xff') + a.bitpos = 8 + a.replace('0xff', '', bytealigned=True) + self.assertEqual(a.bitpos, 0) + a = BitStream('0b0011110001') + a.bitpos = 4 + a.replace('0b1', '0b000') + self.assertEqual(a.bitpos, 8) + a = BitStream('0b1') + a.bitpos = 1 + a.replace('0b1', '0b11111', bytealigned=True) + self.assertEqual(a.bitpos, 5) + a.replace('0b11', '0b0', False) + self.assertEqual(a.bitpos, 3) + a.append('0b00') + a.replace('0b00', '0xffff') + self.assertEqual(a.bitpos, 17) + + def testReplaceErrors(self): + a = BitStream('0o123415') + self.assertRaises(ValueError, a.replace, '', '0o7', bytealigned=True) + self.assertRaises(ValueError, a.replace, '0b1', '0b1', start=-100, bytealigned=True) + self.assertRaises(ValueError, a.replace, '0b1', '0b1', end=19, bytealigned=True) + + +class SliceAssignment(unittest.TestCase): + + # TODO: Move this to another class + def testSetSlice(self): + a = BitStream() + a[0:0] = '0xabcdef' + self.assertEqual(a.bytepos, 3) + a[4:16] = '' + self.assertEqual(a, '0xaef') + self.assertEqual(a.bitpos, 4) + a[8:] = '0x00' + self.assertEqual(a, '0xae00') + self.assertEqual(a.bytepos, 2) + a += '0xf' + a[8:] = '0xe' + self.assertEqual(a, '0xaee') + self.assertEqual(a.bitpos, 12) + b = BitStream() + b[0:800] = '0xffee' + self.assertEqual(b, '0xffee') + b[4:48] = '0xeed123' + self.assertEqual(b, '0xfeed123') + b[-800:8] = '0x0000' + self.assertEqual(b, '0x0000ed123') + a = BitStream('0xabcde') + self.assertEqual(a[-100:-90], '') + self.assertEqual(a[-100:-16], '0xa') + a[-100:-16] = '0x0' + self.assertEqual(a, '0x0bcde') + + def testInsertingUsingSetItem(self): + a = BitStream() + a[0:0] = '0xdeadbeef' + self.assertEqual(a, '0xdeadbeef') + self.assertEqual(a.bytepos, 4) + a[16:16] = '0xfeed' + self.assertEqual(a, '0xdeadfeedbeef') + self.assertEqual(a.bytepos, 4) + a[0:0] = '0xa' + self.assertEqual(a, '0xadeadfeedbeef') + self.assertEqual(a.bitpos, 4) + a.bytepos = 6 + a[0:0] = '0xff' + self.assertEqual(a.bytepos, 1) + a[8:0] = '0x00000' + self.assertTrue(a.startswith('0xff00000adead')) + + def testSliceAssignmentBitPos(self): + a = BitStream('int:64=-1') + a.pos = 64 + a[0:8] = '' + self.assertEqual(a.pos, 0) + a.pos = 52 + a[48:56] = '0x0000' + self.assertEqual(a.pos, 64) + a[10:10] = '0x0' + self.assertEqual(a.pos, 14) + a[56:68] = '0x000' + self.assertEqual(a.pos, 14) + + +class Pack(unittest.TestCase): + def testPack1(self): + s = bitstring.pack('uint:6, bin, hex, int:6, se, ue, oct', 10, '0b110', 'ff', -1, -6, 6, '54') + t = BitStream('uint:6=10, 0b110, 0xff, int:6=-1, se=-6, ue=6, oct=54') + self.assertEqual(s, t) + self.assertRaises(bitstring.CreationError, pack, 'tomato', '0') + self.assertRaises(bitstring.CreationError, pack, 'uint', 12) + self.assertRaises(bitstring.CreationError, pack, 'hex', 'penguin') + self.assertRaises(bitstring.CreationError, pack, 'hex12', '0x12') + + def testPackWithLiterals(self): + s = bitstring.pack('0xf') + self.assertEqual(s, '0xf') + self.assertTrue(type(s), BitStream) + s = pack('0b1') + self.assertEqual(s, '0b1') + s = pack('0o7') + self.assertEqual(s, '0o7') + s = pack('int:10=-1') + self.assertEqual(s, '0b1111111111') + s = pack('uint:10=1') + self.assertEqual(s, '0b0000000001') + s = pack('ue=12') + self.assertEqual(s.ue, 12) + s = pack('se=-12') + self.assertEqual(s.se, -12) + s = pack('bin=01') + self.assertEqual(s.bin, '01') + s = pack('hex=01') + self.assertEqual(s.hex, '01') + s = pack('oct=01') + self.assertEqual(s.oct, '01') + + def testPackWithDict(self): + a = pack('uint:6=width, se=height', height=100, width=12) + w, h = a.unpack('uint:6, se') + self.assertEqual(w, 12) + self.assertEqual(h, 100) + d = {} + d['w'] = '0xf' + d['300'] = 423 + d['e'] = '0b1101' + a = pack('int:100=300, bin=e, uint:12=300', **d) + x, y, z = a.unpack('int:100, bin, uint:12') + self.assertEqual(x, 423) + self.assertEqual(y, '1101') + self.assertEqual(z, 423) + + def testPackWithDict2(self): + a = pack('int:5, bin:3=b, 0x3, bin=c, se=12', 10, b='0b111', c='0b1') + b = BitStream('int:5=10, 0b111, 0x3, 0b1, se=12') + self.assertEqual(a, b) + a = pack('bits:3=b', b=BitStream('0b101')) + self.assertEqual(a, '0b101') + a = pack('bits:24=b', b=BitStream('0x001122')) + self.assertEqual(a, '0x001122') + + def testPackWithDict3(self): + s = pack('hex:4=e, hex:4=0xe, hex:4=e', e='f') + self.assertEqual(s, '0xfef') + s = pack('sep', sep='0b00') + self.assertEqual(s, '0b00') + + def testPackWithDict4(self): + s = pack('hello', hello='0xf') + self.assertEqual(s, '0xf') + s = pack('x, y, x, y, x', x='0b10', y='uint:12=100') + t = BitStream('0b10, uint:12=100, 0b10, uint:12=100, 0b10') + self.assertEqual(s, t) + a = [1, 2, 3, 4, 5] + s = pack('int:8, div,' * 5, *a, **{'div': '0b1'}) + t = BitStream('int:8=1, 0b1, int:8=2, 0b1, int:8=3, 0b1, int:8=4, 0b1, int:8=5, 0b1') + self.assertEqual(s, t) + + def testPackWithLocals(self): + width = 352 + height = 288 + s = pack('uint:12=width, uint:12=height', **locals()) + self.assertEqual(s, '0x160120') + + def testPackWithLengthRestriction(self): + s = pack('bin:3', '0b000') + self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b0011') + self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b11') + self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b0011') + self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b11') + + s = pack('hex:4', '0xf') + self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b111') + self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b11111') + self.assertRaises(bitstring.CreationError, pack, 'hex:8=0xf') + + s = pack('oct:6', '0o77') + self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o1') + self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o111') + self.assertRaises(bitstring.CreationError, pack, 'oct:3', '0b1') + self.assertRaises(bitstring.CreationError, pack, 'oct:3=hello', hello='0o12') + + s = pack('bits:3', BitStream('0b111')) + self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b11')) + self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b1111')) + self.assertRaises(bitstring.CreationError, pack, 'bits:12=b', b=BitStream('0b11')) + + def testPackNull(self): + s = pack('') + self.assertFalse(s) + s = pack(',') + self.assertFalse(s) + s = pack(',,,,,0b1,,,,,,,,,,,,,0b1,,,,,,,,,,') + self.assertEqual(s, '0b11') + s = pack(',,uint:12,,bin:3,', 100, '100') + a, b = s.unpack(',,,uint:12,,,,bin:3,,,') + self.assertEqual(a, 100) + self.assertEqual(b, '100') + + def testPackDefaultUint(self): + s = pack('10, 5', 1, 2) + a, b = s.unpack('10, 5') + self.assertEqual((a, b), (1, 2)) + s = pack('10=150, 12=qee', qee=3) + self.assertEqual(s, 'uint:10=150, uint:12=3') + t = BitStream('100=5') + self.assertEqual(t, 'uint:100=5') + + def testPackDefualtUintErrors(self): + self.assertRaises(bitstring.CreationError, BitStream, '5=-1') + + def testPackingLongKeywordBitstring(self): + s = pack('bits=b', b=BitStream(128000)) + self.assertEqual(s, BitStream(128000)) + + def testPackingWithListFormat(self): + f = ['bin', 'hex', 'uint:10'] + a = pack(','.join(f), '00', '234', 100) + b = pack(f, '00', '234', 100) + self.assertEqual(a, b) + + +class Unpack(unittest.TestCase): + def testUnpack1(self): + s = BitStream('uint:13=23, hex=e, bin=010, int:41=-554, 0o44332, se=-12, ue=4') + s.pos = 11 + a, b, c, d, e, f, g = s.unpack('uint:13, hex:4, bin:3, int:41, oct:15, se, ue') + self.assertEqual(a, 23) + self.assertEqual(b, 'e') + self.assertEqual(c, '010') + self.assertEqual(d, -554) + self.assertEqual(e, '44332') + self.assertEqual(f, -12) + self.assertEqual(g, 4) + self.assertEqual(s.pos, 11) + + def testUnpack2(self): + s = BitStream('0xff, 0b000, uint:12=100') + a, b, c = s.unpack('bits:8, bits, uint:12') + self.assertEqual(type(s), BitStream) + self.assertEqual(a, '0xff') + self.assertEqual(type(s), BitStream) + self.assertEqual(b, '0b000') + self.assertEqual(c, 100) + a, b = s.unpack(['bits:11', 'uint']) + self.assertEqual(a, '0xff, 0b000') + self.assertEqual(b, 100) + + def testUnpackNull(self): + s = pack('0b1, , , 0xf,') + a, b = s.unpack('bin:1,,,hex:4,') + self.assertEqual(a, '1') + self.assertEqual(b, 'f') + + +class FromFile(unittest.TestCase): + def testCreationFromFileOperations(self): + s = BitStream(filename='smalltestfile') + s.append('0xff') + self.assertEqual(s.hex, '0123456789abcdefff') + + s = ConstBitStream(filename='smalltestfile') + t = BitStream('0xff') + s + self.assertEqual(t.hex, 'ff0123456789abcdef') + + s = BitStream(filename='smalltestfile') + del s[:1] + self.assertEqual((BitStream('0b0') + s).hex, '0123456789abcdef') + + s = BitStream(filename='smalltestfile') + del s[:7 * 8] + self.assertEqual(s.hex, 'ef') + + s = BitStream(filename='smalltestfile') + s.insert('0xc', 4) + self.assertEqual(s.hex, '0c123456789abcdef') + + s = BitStream(filename='smalltestfile') + s.prepend('0xf') + self.assertEqual(s.hex, 'f0123456789abcdef') + + s = BitStream(filename='smalltestfile') + s.overwrite('0xaaa', 12) + self.assertEqual(s.hex, '012aaa6789abcdef') + + s = BitStream(filename='smalltestfile') + s.reverse() + self.assertEqual(s.hex, 'f7b3d591e6a2c480') + + s = BitStream(filename='smalltestfile') + del s[-60:] + self.assertEqual(s.hex, '0') + + s = BitStream(filename='smalltestfile') + del s[:60] + self.assertEqual(s.hex, 'f') + + def testFileProperties(self): + s = ConstBitStream(filename='smalltestfile') + self.assertEqual(s.hex, '0123456789abcdef') + self.assertEqual(s.uint, 81985529216486895) + self.assertEqual(s.int, 81985529216486895) + self.assertEqual(s.bin, '0000000100100011010001010110011110001001101010111100110111101111') + self.assertEqual(s[:-1].oct, '002215053170465363367') + s.bitpos = 0 + self.assertEqual(s.read('se'), -72) + s.bitpos = 0 + self.assertEqual(s.read('ue'), 144) + self.assertEqual(s.bytes, b'\x01\x23\x45\x67\x89\xab\xcd\xef') + self.assertEqual(s.tobytes(), b'\x01\x23\x45\x67\x89\xab\xcd\xef') + + def testCreationFromFileWithLength(self): + s = ConstBitStream(filename='test.m1v', length=32) + self.assertEqual(s.length, 32) + self.assertEqual(s.hex, '000001b3') + s = ConstBitStream(filename='test.m1v', length=0) + self.assertFalse(s) + self.assertRaises(bitstring.CreationError, BitStream, filename='smalltestfile', length=65) + self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', length=64, offset=1) + # self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', offset=65) + f = open('smalltestfile', 'rb') + # self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=65) + self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, length=65) + self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=60, length=5) + + def testCreationFromFileWithOffset(self): + a = BitStream(filename='test.m1v', offset=4) + self.assertEqual(a.peek(4 * 8).hex, '00001b31') + b = BitStream(filename='test.m1v', offset=28) + self.assertEqual(b.peek(8).hex, '31') + + def testFileSlices(self): + s = BitStream(filename='smalltestfile') + self.assertEqual(s[-16:].hex, 'cdef') + + def testCreataionFromFileErrors(self): + self.assertRaises(IOError, BitStream, filename='Idonotexist') + + def testFindInFile(self): + s = BitStream(filename='test.m1v') + self.assertTrue(s.find('0x160120')) + self.assertEqual(s.bytepos, 4) + s3 = s.read(3 * 8) + self.assertEqual(s3.hex, '160120') + s.bytepos = 0 + self.assertTrue(s._pos == 0) + self.assertTrue(s.find('0x0001b2')) + self.assertEqual(s.bytepos, 13) + + def testHexFromFile(self): + s = BitStream(filename='test.m1v') + self.assertEqual(s[0:32].hex, '000001b3') + self.assertEqual(s[-32:].hex, '000001b7') + s.hex = '0x11' + self.assertEqual(s.hex, '11') + + def testFileOperations(self): + s1 = BitStream(filename='test.m1v') + s2 = BitStream(filename='test.m1v') + self.assertEqual(s1.read(32).hex, '000001b3') + self.assertEqual(s2.read(32).hex, '000001b3') + s1.bytepos += 4 + self.assertEqual(s1.read(8).hex, '02') + self.assertEqual(s2.read(5 * 8).hex, '1601208302') + s1.pos = s1.len + try: + s1.pos += 1 + self.assertTrue(False) + except ValueError: + pass + + def testFileBitGetting(self): + s = ConstBitStream(filename='smalltestfile', offset=16, length=8) # 0x45 + b = s[1] + self.assertTrue(b) + b = s.any(0, [-1, -2, -3]) + self.assertTrue(b) + b = s.all(0, [0, 1, 2]) + self.assertFalse(b) + + def testVeryLargeFiles(self): + # This uses an 11GB file which isn't distributed for obvious reasons + # and so this test won't work for anyone except me! + try: + s = ConstBitStream(filename='11GB.mkv') + except IOError: + return + self.assertEqual(s.len, 11743020505 * 8) + self.assertEqual(s[1000000000:1000000100].hex, 'bdef7335d4545f680d669ce24') + self.assertEqual(s[-4::8].hex, 'bbebf7a1') + + +class CreationErrors(unittest.TestCase): + def testIncorrectBinAssignment(self): + s = BitStream() + self.assertRaises(bitstring.CreationError, s._setbin_safe, '0010020') + + def testIncorrectHexAssignment(self): + s = BitStream() + self.assertRaises(bitstring.CreationError, s._sethex, '0xabcdefg') + + +class Length(unittest.TestCase): + def testLengthZero(self): + self.assertEqual(BitStream('').len, 0) + + def testLength(self): + self.assertEqual(BitStream('0x80').len, 8) + + def testLengthErrors(self): + #TODO: Lots of new checks, for various inits which now disallow length and offset + pass + #self.assertRaises(ValueError, BitStream, bin='111', length=-1) + #self.assertRaises(ValueError, BitStream, bin='111', length=4) + + def testOffsetLengthError(self): + self.assertRaises(bitstring.CreationError, BitStream, hex='0xffff', offset=-1) + + +class SimpleConversions(unittest.TestCase): + def testConvertToUint(self): + self.assertEqual(BitStream('0x10').uint, 16) + self.assertEqual(BitStream('0b000111').uint, 7) + + def testConvertToInt(self): + self.assertEqual(BitStream('0x10').int, 16) + self.assertEqual(BitStream('0b11110').int, -2) + + def testConvertToHex(self): + self.assertEqual(BitStream(bytes=b'\x00\x12\x23\xff').hex, '001223ff') + s = BitStream('0b11111') + self.assertRaises(bitstring.InterpretError, s._gethex) + + +class Empty(unittest.TestCase): + def testEmptyBitstring(self): + s = BitStream() + self.assertRaises(bitstring.ReadError, s.read, 1) + self.assertEqual(s.bin, '') + self.assertEqual(s.hex, '') + self.assertRaises(bitstring.InterpretError, s._getint) + self.assertRaises(bitstring.InterpretError, s._getuint) + self.assertFalse(s) + + def testNonEmptyBitStream(self): + s = BitStream(bin='0') + self.assertFalse(not s.len) + + +class Position(unittest.TestCase): + def testBitPosition(self): + s = BitStream(bytes=b'\x00\x00\x00') + self.assertEqual(s.bitpos, 0) + s.read(5) + self.assertEqual(s.pos, 5) + s.pos = s.len + self.assertRaises(bitstring.ReadError, s.read, 1) + + def testBytePosition(self): + s = BitStream(bytes=b'\x00\x00\x00') + self.assertEqual(s.bytepos, 0) + s.read(10) + self.assertRaises(bitstring.ByteAlignError, s._getbytepos) + s.read(6) + self.assertEqual(s.bytepos, 2) + + def testSeekToBit(self): + s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00') + s.bitpos = 0 + self.assertEqual(s.bitpos, 0) + self.assertRaises(ValueError, s._setbitpos, -1) + self.assertRaises(ValueError, s._setbitpos, 6 * 8 + 1) + s.bitpos = 6 * 8 + self.assertEqual(s.bitpos, 6 * 8) + + def testSeekToByte(self): + s = BitStream(bytes=b'\x00\x00\x00\x00\x00\xab') + s.bytepos = 5 + self.assertEqual(s.read(8).hex, 'ab') + + def testAdvanceBitsAndBytes(self): + s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00\x00\x00') + s.pos += 5 + self.assertEqual(s.pos, 5) + s.bitpos += 16 + self.assertEqual(s.pos, 2 * 8 + 5) + s.pos -= 8 + self.assertEqual(s.pos, 8 + 5) + + def testRetreatBitsAndBytes(self): + a = BitStream(length=100) + a.pos = 80 + a.bytepos -= 5 + self.assertEqual(a.bytepos, 5) + a.pos -= 5 + self.assertEqual(a.pos, 35) + + +class Offset(unittest.TestCase): + def testOffset1(self): + s = BitStream(bytes=b'\x00\x1b\x3f', offset=4) + self.assertEqual(s.read(8).bin, '00000001') + self.assertEqual(s.length, 20) + + def testOffset2(self): + s1 = BitStream(bytes=b'\xf1\x02\x04') + s2 = BitStream(bytes=b'\xf1\x02\x04', length=23) + for i in [1, 2, 3, 4, 5, 6, 7, 6, 5, 4, 3, 2, 1, 0, 7, 3, 5, 1, 4]: + s1._datastore = offsetcopy(s1._datastore, i) + self.assertEqual(s1.hex, 'f10204') + s2._datastore = offsetcopy(s2._datastore, i) + self.assertEqual(s2.bin, '11110001000000100000010') + + +class Append(unittest.TestCase): + def testAppend(self): + s1 = BitStream('0b00000') + s1.append(BitStream(bool=True)) + self.assertEqual(s1.bin, '000001') + self.assertEqual((BitStream('0x0102') + BitStream('0x0304')).hex, '01020304') + + def testAppendSameBitstring(self): + s1 = BitStream('0xf0')[:6] + s1.append(s1) + self.assertEqual(s1.bin, '111100111100') + + def testAppendWithOffset(self): + s = BitStream(bytes=b'\x28\x28', offset=1) + s.append('0b0') + self.assertEqual(s.hex, '5050') + + +class ByteAlign(unittest.TestCase): + def testByteAlign(self): + s = BitStream(hex='0001ff23') + s.bytealign() + self.assertEqual(s.bytepos, 0) + s.pos += 11 + s.bytealign() + self.assertEqual(s.bytepos, 2) + s.pos -= 10 + s.bytealign() + self.assertEqual(s.bytepos, 1) + + def testByteAlignWithOffset(self): + s = BitStream(hex='0112233') + s._datastore = offsetcopy(s._datastore, 3) + bitstoalign = s.bytealign() + self.assertEqual(bitstoalign, 0) + self.assertEqual(s.read(5).bin, '00001') + + def testInsertByteAligned(self): + s = BitStream('0x0011') + s.insert(BitStream('0x22'), 8) + self.assertEqual(s.hex, '002211') + s = BitStream(0) + s.insert(BitStream(bin='101'), 0) + self.assertEqual(s.bin, '101') + + +class Truncate(unittest.TestCase): + def testTruncateStart(self): + s = BitStream('0b1') + del s[:1] + self.assertFalse(s) + s = BitStream(hex='1234') + self.assertEqual(s.hex, '1234') + del s[:4] + self.assertEqual(s.hex, '234') + del s[:9] + self.assertEqual(s.bin, '100') + del s[:2] + self.assertEqual(s.bin, '0') + self.assertEqual(s.len, 1) + del s[:1] + self.assertFalse(s) + + def testTruncateEnd(self): + s = BitStream('0b1') + del s[-1:] + self.assertFalse(s) + s = BitStream(bytes=b'\x12\x34') + self.assertEqual(s.hex, '1234') + del s[-4:] + self.assertEqual(s.hex, '123') + del s[-9:] + self.assertEqual(s.bin, '000') + del s[-3:] + self.assertFalse(s) + s = BitStream('0b001') + del s[:2] + del s[-1:] + self.assertFalse(s) + + +class Slice(unittest.TestCase): + def testByteAlignedSlice(self): + s = BitStream(hex='0x123456') + self.assertEqual(s[8:16].hex, '34') + s = s[8:24] + self.assertEqual(s.len, 16) + self.assertEqual(s.hex, '3456') + s = s[0:8] + self.assertEqual(s.hex, '34') + s.hex = '0x123456' + self.assertEqual(s[8:24][0:8].hex, '34') + + def testSlice(self): + s = BitStream(bin='000001111100000') + s1 = s[0:5] + s2 = s[5:10] + s3 = s[10:15] + self.assertEqual(s1.bin, '00000') + self.assertEqual(s2.bin, '11111') + self.assertEqual(s3.bin, '00000') + + +class Insert(unittest.TestCase): + def testInsert(self): + s1 = BitStream(hex='0x123456') + s2 = BitStream(hex='0xff') + s1.bytepos = 1 + s1.insert(s2) + self.assertEqual(s1.bytepos, 2) + self.assertEqual(s1.hex, '12ff3456') + s1.insert('0xee', 24) + self.assertEqual(s1.hex, '12ff34ee56') + self.assertEqual(s1.bitpos, 32) + self.assertRaises(ValueError, s1.insert, '0b1', -1000) + self.assertRaises(ValueError, s1.insert, '0b1', 1000) + + def testInsertNull(self): + s = BitStream(hex='0x123').insert(BitStream(), 3) + self.assertEqual(s.hex, '123') + + def testInsertBits(self): + one = BitStream(bin='1') + zero = BitStream(bin='0') + s = BitStream(bin='00') + s.insert(one, 0) + self.assertEqual(s.bin, '100') + s.insert(zero, 0) + self.assertEqual(s.bin, '0100') + s.insert(one, s.len) + self.assertEqual(s.bin, '01001') + s.insert(s, 2) + self.assertEqual(s.bin, '0101001001') + + +class Resetting(unittest.TestCase): + def testSetHex(self): + s = BitStream() + s.hex = '0' + self.assertEqual(s.hex, '0') + s.hex = '0x010203045' + self.assertEqual(s.hex, '010203045') + self.assertRaises(bitstring.CreationError, s._sethex, '0x002g') + + def testSetBin(self): + s = BitStream(bin="000101101") + self.assertEqual(s.bin, '000101101') + self.assertEqual(s.len, 9) + s.bin = '0' + self.assertEqual(s.bin, '0') + self.assertEqual(s.len, 1) + + def testSetEmptyBin(self): + s = BitStream(hex='0x000001b3') + s.bin = '' + self.assertEqual(s.len, 0) + self.assertEqual(s.bin, '') + + def testSetInvalidBin(self): + s = BitStream() + self.assertRaises(bitstring.CreationError, s._setbin_safe, '00102') + + +class Overwriting(unittest.TestCase): + def testOverwriteBit(self): + s = BitStream(bin='0') + s.overwrite(BitStream(bin='1'), 0) + self.assertEqual(s.bin, '1') + + def testOverwriteLimits(self): + s = BitStream(bin='0b11111') + s.overwrite(BitStream(bin='000'), 0) + self.assertEqual(s.bin, '00011') + s.overwrite('0b000', 2) + self.assertEqual(s.bin, '00000') + + def testOverwriteNull(self): + s = BitStream(hex='342563fedec') + s2 = BitStream(s) + s.overwrite(BitStream(bin=''), 23) + self.assertEqual(s.bin, s2.bin) + + def testOverwritePosition(self): + s1 = BitStream(hex='0123456') + s2 = BitStream(hex='ff') + s1.bytepos = 1 + s1.overwrite(s2) + self.assertEqual((s1.hex, s1.bytepos), ('01ff456', 2)) + s1.overwrite('0xff', 0) + self.assertEqual((s1.hex, s1.bytepos), ('ffff456', 1)) + + def testOverwriteWithSelf(self): + s = BitStream('0x123') + s.overwrite(s) + self.assertEqual(s, '0x123') + + +class Split(unittest.TestCase): + def testSplitByteAlignedCornerCases(self): + s = BitStream() + bsl = s.split(BitStream(hex='0xff')) + self.assertEqual(next(bsl).hex, '') + self.assertRaises(StopIteration, next, bsl) + s = BitStream(hex='aabbcceeddff') + delimiter = BitStream() + bsl = s.split(delimiter) + self.assertRaises(ValueError, next, bsl) + delimiter = BitStream(hex='11') + bsl = s.split(delimiter) + self.assertEqual(next(bsl).hex, s.hex) + + def testSplitByteAligned(self): + s = BitStream(hex='0x1234aa1234bbcc1234ffff') + delimiter = BitStream(hex='1234') + bsl = s.split(delimiter) + self.assertEqual([b.hex for b in bsl], ['', '1234aa', '1234bbcc', '1234ffff']) + self.assertEqual(s.pos, 0) + + def testSplitByteAlignedWithIntialBytes(self): + s = BitStream(hex='aa471234fedc43 47112233 47 4723 472314') + delimiter = BitStream(hex='47') + s.find(delimiter) + self.assertEqual(s.bytepos, 1) + bsl = s.split(delimiter, start=0) + self.assertEqual([b.hex for b in bsl], ['aa', '471234fedc43', '47112233', + '47', '4723', '472314']) + self.assertEqual(s.bytepos, 1) + + def testSplitByteAlignedWithOverlappingDelimiter(self): + s = BitStream(hex='aaffaaffaaffaaffaaff') + bsl = s.split(BitStream(hex='aaffaa')) + self.assertEqual([b.hex for b in bsl], ['', 'aaffaaff', 'aaffaaffaaff']) + + +class Adding(unittest.TestCase): + def testAdding(self): + s1 = BitStream(hex='0x0102') + s2 = BitStream(hex='0x0304') + s3 = s1 + s2 + self.assertEqual(s1.hex, '0102') + self.assertEqual(s2.hex, '0304') + self.assertEqual(s3.hex, '01020304') + s3 += s1 + self.assertEqual(s3.hex, '010203040102') + self.assertEqual(s2[9:16].bin, '0000100') + self.assertEqual(s1[0:9].bin, '000000010') + s4 = BitStream(bin='000000010') +\ + BitStream(bin='0000100') + self.assertEqual(s4.bin, '0000000100000100') + s2p = s2[9:16] + s1p = s1[0:9] + s5p = s1p + s2p + s5 = s1[0:9] + s2[9:16] + self.assertEqual(s5.bin, '0000000100000100') + + def testMoreAdding(self): + s = BitStream(bin='00') + BitStream(bin='') + BitStream(bin='11') + self.assertEqual(s.bin, '0011') + s = '0b01' + s += BitStream('0b11') + self.assertEqual(s.bin, '0111') + s = BitStream('0x00') + t = BitStream('0x11') + s += t + self.assertEqual(s.hex, '0011') + self.assertEqual(t.hex, '11') + s += s + self.assertEqual(s.hex, '00110011') + + def testRadd(self): + s = '0xff' + BitStream('0xee') + self.assertEqual(s.hex, 'ffee') + + + def testTruncateAsserts(self): + s = BitStream('0x001122') + s.bytepos = 2 + del s[-s.len:] + self.assertEqual(s.bytepos, 0) + s.append('0x00') + s.append('0x1122') + s.bytepos = 2 + del s[:s.len] + self.assertEqual(s.bytepos, 0) + s.append('0x00') + + def testOverwriteErrors(self): + s = BitStream(bin='11111') + self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), -10) + self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), 6) + self.assertRaises(ValueError, s.overwrite, BitStream(bin='11111'), 1) + + def testDeleteBits(self): + s = BitStream(bin='000111100000') + s.bitpos = 4 + del s[4:8] + self.assertEqual(s.bin, '00010000') + del s[4:1004] + self.assertTrue(s.bin, '0001') + + def testDeleteBitsWithPosition(self): + s = BitStream(bin='000111100000') + del s[4:8] + self.assertEqual(s.bin, '00010000') + + def testDeleteBytes(self): + s = BitStream('0x00112233') + del s[8:8] + self.assertEqual(s.hex, '00112233') + self.assertEqual(s.pos, 0) + del s[8:16] + self.assertEqual(s.hex, '002233') + self.assertEqual(s.bytepos, 0) + del s[:24] + self.assertFalse(s) + self.assertEqual(s.pos, 0) + + def testGetItemWithPositivePosition(self): + s = BitStream(bin='0b1011') + self.assertEqual(s[0], True) + self.assertEqual(s[1], False) + self.assertEqual(s[2], True) + self.assertEqual(s[3], True) + self.assertRaises(IndexError, s.__getitem__, 4) + + def testGetItemWithNegativePosition(self): + s = BitStream(bin='1011') + self.assertEqual(s[-1], True) + self.assertEqual(s[-2], True) + self.assertEqual(s[-3], False) + self.assertEqual(s[-4], True) + self.assertRaises(IndexError, s.__getitem__, -5) + + def testSlicing(self): + s = ConstBitStream(hex='0123456789') + self.assertEqual(s[0:8].hex, '01') + self.assertFalse(s[0:0]) + self.assertFalse(s[23:20]) + self.assertEqual(s[8:12].bin, '0010') + self.assertEqual(s[32:80], '0x89') + + def testNegativeSlicing(self): + s = ConstBitStream(hex='012345678') + self.assertEqual(s[:-8].hex, '0123456') + self.assertEqual(s[-16:-8].hex, '56') + self.assertEqual(s[-24:].hex, '345678') + self.assertEqual(s[-1000:-24], '0x012') + + def testLen(self): + s = BitStream() + self.assertEqual(len(s), 0) + s.append(BitStream(bin='001')) + self.assertEqual(len(s), 3) + + def testJoin(self): + s1 = BitStream(bin='0') + s2 = BitStream(bin='1') + s3 = BitStream(bin='000') + s4 = BitStream(bin='111') + strings = [s1, s2, s1, s3, s4] + s = BitStream().join(strings) + self.assertEqual(s.bin, '010000111') + + def testJoin2(self): + s1 = BitStream(hex='00112233445566778899aabbccddeeff') + s2 = BitStream(bin='0b000011') + bsl = [s1[0:32], s1[4:12], s2, s2, s2, s2] + s = ConstBitStream().join(bsl) + self.assertEqual(s.hex, '00112233010c30c3') + + bsl = [BitStream(uint=j, length=12) for j in range(10) for i in range(10)] + s = BitStream().join(bsl) + self.assertEqual(s.length, 1200) + + + def testPos(self): + s = BitStream(bin='1') + self.assertEqual(s.bitpos, 0) + s.read(1) + self.assertEqual(s.bitpos, 1) + + def testWritingData(self): + strings = [BitStream(bin=x) for x in ['0', '001', '0011010010', '010010', '1011']] + s = BitStream().join(strings) + s2 = BitStream(bytes=s.bytes) + self.assertEqual(s2.bin, '000100110100100100101011') + s2.append(BitStream(bin='1')) + s3 = BitStream(bytes=s2.tobytes()) + self.assertEqual(s3.bin, '00010011010010010010101110000000') + + def testWritingDataWithOffsets(self): + s1 = BitStream(bytes=b'\x10') + s2 = BitStream(bytes=b'\x08\x00', length=8, offset=1) + s3 = BitStream(bytes=b'\x04\x00', length=8, offset=2) + self.assertTrue(s1 == s2) + self.assertTrue(s2 == s3) + self.assertTrue(s1.bytes == s2.bytes) + self.assertTrue(s2.bytes == s3.bytes) + + def testVariousThings1(self): + hexes = ['12345678', '87654321', 'ffffffffff', 'ed', '12ec'] + bins = ['001010', '1101011', '0010000100101110110110', '11', '011'] + bsl = [] + for (hex, bin) in list(zip(hexes, bins)) * 5: + bsl.append(BitStream(hex=hex)) + bsl.append(BitStream(bin=bin)) + s = BitStream().join(bsl) + for (hex, bin) in list(zip(hexes, bins)) * 5: + h = s.read(4 * len(hex)) + b = s.read(len(bin)) + self.assertEqual(h.hex, hex) + self.assertEqual(b.bin, bin) + + def testVariousThings2(self): + s1 = BitStream(hex="0x1f08")[:13] + self.assertEqual(s1.bin, '0001111100001') + s2 = BitStream(bin='0101') + self.assertEqual(s2.bin, '0101') + s1.append(s2) + self.assertEqual(s1.length, 17) + self.assertEqual(s1.bin, '00011111000010101') + s1 = s1[3:8] + self.assertEqual(s1.bin, '11111') + + def testVariousThings3(self): + s1 = BitStream(hex='0x012480ff')[2:27] + s2 = s1 + s1 + self.assertEqual(s2.length, 50) + s3 = s2[0:25] + s4 = s2[25:50] + self.assertEqual(s3.bin, s4.bin) + + def testPeekBit(self): + s = BitStream(bin='01') + self.assertEqual(s.peek(1), [0]) + self.assertEqual(s.peek(1), [0]) + self.assertEqual(s.read(1), [0]) + self.assertEqual(s.peek(1), [1]) + self.assertEqual(s.peek(1), [1]) + + s = BitStream(bytes=b'\x1f', offset=3) + self.assertEqual(s.len, 5) + self.assertEqual(s.peek(5).bin, '11111') + self.assertEqual(s.peek(5).bin, '11111') + s.pos += 1 + self.assertRaises(bitstring.ReadError, s.peek, 5) + + s = BitStream(hex='001122334455') + self.assertEqual(s.peek(8).hex, '00') + self.assertEqual(s.read(8).hex, '00') + s.pos += 33 + self.assertRaises(bitstring.ReadError, s.peek, 8) + + s = BitStream(hex='001122334455') + self.assertEqual(s.peek(8 * 2).hex, '0011') + self.assertEqual(s.read(8 * 3).hex, '001122') + self.assertEqual(s.peek(8 * 3).hex, '334455') + self.assertRaises(bitstring.ReadError, s.peek, 25) + + def testAdvanceBit(self): + s = BitStream(hex='0xff') + s.bitpos = 6 + s.pos += 1 + self.assertEqual(s.bitpos, 7) + s.bitpos += 1 + try: + s.pos += 1 + self.assertTrue(False) + except ValueError: + pass + + def testAdvanceByte(self): + s = BitStream(hex='0x010203') + s.bytepos += 1 + self.assertEqual(s.bytepos, 1) + s.bytepos += 1 + self.assertEqual(s.bytepos, 2) + s.bytepos += 1 + try: + s.bytepos += 1 + self.assertTrue(False) + except ValueError: + pass + + def testRetreatBit(self): + s = BitStream(hex='0xff') + try: + s.pos -= 1 + self.assertTrue(False) + except ValueError: + pass + s.pos = 5 + s.pos -= 1 + self.assertEqual(s.pos, 4) + + def testRetreatByte(self): + s = BitStream(hex='0x010203') + try: + s.bytepos -= 1 + self.assertTrue(False) + except ValueError: + pass + s.bytepos = 3 + s.bytepos -= 1 + self.assertEqual(s.bytepos, 2) + self.assertEqual(s.read(8).hex, '03') + + def testCreationByAuto(self): + s = BitStream('0xff') + self.assertEqual(s.hex, 'ff') + s = BitStream('0b00011') + self.assertEqual(s.bin, '00011') + self.assertRaises(bitstring.CreationError, BitStream, 'hello') + s1 = BitStream(bytes=b'\xf5', length=3, offset=5) + s2 = BitStream(s1, length=1, offset=1) + self.assertEqual(s2, '0b0') + s = BitStream(bytes=b'\xff', offset=2) + t = BitStream(s, offset=2) + self.assertEqual(t, '0b1111') + self.assertRaises(TypeError, BitStream, auto=1.2) + + def testCreationByAuto2(self): + s = BitStream('bin=001') + self.assertEqual(s.bin, '001') + s = BitStream('oct=0o007') + self.assertEqual(s.oct, '007') + s = BitStream('hex=123abc') + self.assertEqual(s, '0x123abc') + + s = BitStream('bin:2=01') + self.assertEqual(s, '0b01') + for s in ['bin:1=01', 'bits:4=0b1', 'oct:3=000', 'hex:4=0x1234']: + self.assertRaises(bitstring.CreationError, BitStream, s) + + def testInsertUsingAuto(self): + s = BitStream('0xff') + s.insert('0x00', 4) + self.assertEqual(s.hex, 'f00f') + self.assertRaises(ValueError, s.insert, 'ff') + + def testOverwriteUsingAuto(self): + s = BitStream('0x0110') + s.overwrite('0b1') + self.assertEqual(s.hex, '8110') + s.overwrite('') + self.assertEqual(s.hex, '8110') + self.assertRaises(ValueError, s.overwrite, '0bf') + + def testFindUsingAuto(self): + s = BitStream('0b000000010100011000') + self.assertTrue(s.find('0b101')) + self.assertEqual(s.pos, 7) + + def testFindbytealignedUsingAuto(self): + s = BitStream('0x00004700') + self.assertTrue(s.find('0b01000111', bytealigned=True)) + self.assertEqual(s.bytepos, 2) + + def testAppendUsingAuto(self): + s = BitStream('0b000') + s.append('0b111') + self.assertEqual(s.bin, '000111') + s.append('0b0') + self.assertEqual(s.bin, '0001110') + + def testSplitByteAlignedUsingAuto(self): + s = BitStream('0x000143563200015533000123') + sections = s.split('0x0001') + self.assertEqual(next(sections).hex, '') + self.assertEqual(next(sections).hex, '0001435632') + self.assertEqual(next(sections).hex, '00015533') + self.assertEqual(next(sections).hex, '000123') + self.assertRaises(StopIteration, next, sections) + + def testSplitByteAlignedWithSelf(self): + s = BitStream('0x1234') + sections = s.split(s) + self.assertEqual(next(sections).hex, '') + self.assertEqual(next(sections).hex, '1234') + self.assertRaises(StopIteration, next, sections) + + def testPrepend(self): + s = BitStream('0b000') + s.prepend('0b11') + self.assertEqual(s.bin, '11000') + s.prepend(s) + self.assertEqual(s.bin, '1100011000') + s.prepend('') + self.assertEqual(s.bin, '1100011000') + + def testNullSlice(self): + s = BitStream('0x111') + t = s[1:1] + self.assertEqual(t._datastore.bytelength, 0) + + def testMultipleAutos(self): + s = BitStream('0xa') + s.prepend('0xf') + s.append('0xb') + self.assertEqual(s, '0xfab') + s.prepend(s) + s.append('0x100') + s.overwrite('0x5', 4) + self.assertEqual(s, '0xf5bfab100') + + def testReverse(self): + s = BitStream('0b0011') + s.reverse() + self.assertEqual(s.bin, '1100') + s = BitStream('0b10') + s.reverse() + self.assertEqual(s.bin, '01') + s = BitStream() + s.reverse() + self.assertEqual(s.bin, '') + + def testInitWithConcatenatedStrings(self): + s = BitStream('0xff 0Xee 0xd 0xcc') + self.assertEqual(s.hex, 'ffeedcc') + s = BitStream('0b0 0B111 0b001') + self.assertEqual(s.bin, '0111001') + s += '0b1' + '0B1' + self.assertEqual(s.bin, '011100111') + s = BitStream(hex='ff0xee') + self.assertEqual(s.hex, 'ffee') + s = BitStream(bin='000b0b11') + self.assertEqual(s.bin, '0011') + s = BitStream(' 0o123 0O 7 0 o1') + self.assertEqual(s.oct, '12371') + s += ' 0 o 332' + self.assertEqual(s.oct, '12371332') + + def testEquals(self): + s1 = BitStream('0b01010101') + s2 = BitStream('0b01010101') + self.assertTrue(s1 == s2) + s3 = BitStream() + s4 = BitStream() + self.assertTrue(s3 == s4) + self.assertFalse(s3 != s4) + s5 = BitStream(bytes=b'\xff', offset=2, length=3) + s6 = BitStream('0b111') + self.assertTrue(s5 == s6) + class A(object): + pass + self.assertFalse(s5 == A()) + + def testLargeEquals(self): + s1 = BitStream(1000000) + s2 = BitStream(1000000) + s1.set(True, [-1, 55, 53214, 534211, 999999]) + s2.set(True, [-1, 55, 53214, 534211, 999999]) + self.assertEqual(s1, s2) + s1.set(True, 800000) + self.assertNotEqual(s1, s2) + + def testNotEquals(self): + s1 = BitStream('0b0') + s2 = BitStream('0b1') + self.assertTrue(s1 != s2) + self.assertFalse(s1 != BitStream('0b0')) + + def testEqualityWithAutoInitialised(self): + a = BitStream('0b00110111') + self.assertTrue(a == '0b00110111') + self.assertTrue(a == '0x37') + self.assertTrue('0b0011 0111' == a) + self.assertTrue('0x3 0x7' == a) + self.assertFalse(a == '0b11001000') + self.assertFalse('0x3737' == a) + + def testInvertSpecialMethod(self): + s = BitStream('0b00011001') + self.assertEqual((~s).bin, '11100110') + self.assertEqual((~BitStream('0b0')).bin, '1') + self.assertEqual((~BitStream('0b1')).bin, '0') + self.assertTrue(~~s == s) + + def testInvertBitPosition(self): + s = ConstBitStream('0xefef') + s.pos = 8 + t = ~s + self.assertEqual(s.pos, 8) + self.assertEqual(t.pos, 0) + + def testInvertSpecialMethodErrors(self): + s = BitStream() + self.assertRaises(bitstring.Error, s.__invert__) + + def testJoinWithAuto(self): + s = BitStream().join(['0xf', '0b00', BitStream(bin='11')]) + self.assertEqual(s, '0b11110011') + + def testAutoBitStringCopy(self): + s = BitStream('0xabcdef') + t = BitStream(s) + self.assertEqual(t.hex, 'abcdef') + del s[-8:] + self.assertEqual(t.hex, 'abcdef') + +class Multiplication(unittest.TestCase): + + def testMultiplication(self): + a = BitStream('0xff') + b = a * 8 + self.assertEqual(b, '0xffffffffffffffff') + b = 4 * a + self.assertEqual(b, '0xffffffff') + self.assertTrue(1 * a == a * 1 == a) + c = a * 0 + self.assertFalse(c) + a *= 3 + self.assertEqual(a, '0xffffff') + a *= 0 + self.assertFalse(a) + one = BitStream('0b1') + zero = BitStream('0b0') + mix = one * 2 + 3 * zero + 2 * one * 2 + self.assertEqual(mix, '0b110001111') + q = BitStream() + q *= 143 + self.assertFalse(q) + q += [True, True, False] + q.pos += 2 + q *= 0 + self.assertFalse(q) + self.assertEqual(q.bitpos, 0) + + def testMultiplicationWithFiles(self): + a = BitStream(filename='test.m1v') + b = a.len + a *= 3 + self.assertEqual(a.len, 3 * b) + + def testMultiplicationErrors(self): + a = BitStream('0b1') + b = BitStream('0b0') + self.assertRaises(ValueError, a.__mul__, -1) + self.assertRaises(ValueError, a.__imul__, -1) + self.assertRaises(ValueError, a.__rmul__, -1) + self.assertRaises(TypeError, a.__mul__, 1.2) + self.assertRaises(TypeError, a.__rmul__, b) + self.assertRaises(TypeError, a.__imul__, b) + + def testFileAndMemEquivalence(self): + a = ConstBitStream(filename='smalltestfile') + b = BitStream(filename='smalltestfile') + self.assertTrue(isinstance(a._datastore._rawarray, bitstring.MmapByteArray)) + self.assertTrue(isinstance(b._datastore._rawarray, bytearray)) + self.assertEqual(a._datastore.getbyte(0), b._datastore.getbyte(0)) + self.assertEqual(a._datastore.getbyteslice(1, 5), bytearray(b._datastore.getbyteslice(1, 5))) + + +class BitWise(unittest.TestCase): + + def testBitwiseAnd(self): + a = BitStream('0b01101') + b = BitStream('0b00110') + self.assertEqual((a & b).bin, '00100') + self.assertEqual((a & '0b11111'), a) + self.assertRaises(ValueError, a.__and__, '0b1') + self.assertRaises(ValueError, b.__and__, '0b110111111') + c = BitStream('0b0011011') + c.pos = 4 + d = c & '0b1111000' + self.assertEqual(d.pos, 0) + self.assertEqual(d.bin, '0011000') + d = '0b1111000' & c + self.assertEqual(d.bin, '0011000') + + def testBitwiseOr(self): + a = BitStream('0b111001001') + b = BitStream('0b011100011') + self.assertEqual((a | b).bin, '111101011') + self.assertEqual((a | '0b000000000'), a) + self.assertRaises(ValueError, a.__or__, '0b0000') + self.assertRaises(ValueError, b.__or__, a + '0b1') + a = '0xff00' | BitStream('0x00f0') + self.assertEqual(a.hex, 'fff0') + + def testBitwiseXor(self): + a = BitStream('0b111001001') + b = BitStream('0b011100011') + self.assertEqual((a ^ b).bin, '100101010') + self.assertEqual((a ^ '0b111100000').bin, '000101001') + self.assertRaises(ValueError, a.__xor__, '0b0000') + self.assertRaises(ValueError, b.__xor__, a + '0b1') + a = '0o707' ^ BitStream('0o777') + self.assertEqual(a.oct, '070') + +class Split(unittest.TestCase): + + def testSplit(self): + a = BitStream('0b0 010100111 010100 0101 010') + a.pos = 20 + subs = [i.bin for i in a.split('0b010')] + self.assertEqual(subs, ['0', '010100111', '010100', '0101', '010']) + self.assertEqual(a.pos, 20) + + def testSplitCornerCases(self): + a = BitStream('0b000000') + bsl = a.split('0b1', False) + self.assertEqual(next(bsl), a) + self.assertRaises(StopIteration, next, bsl) + b = BitStream() + bsl = b.split('0b001', False) + self.assertFalse(next(bsl)) + self.assertRaises(StopIteration, next, bsl) + + def testSplitErrors(self): + a = BitStream('0b0') + b = a.split('', False) + self.assertRaises(ValueError, next, b) + + def testSliceWithOffset(self): + a = BitStream(bytes=b'\x00\xff\x00', offset=7) + b = a[7:12] + self.assertEqual(b.bin, '11000') + + def testSplitWithMaxsplit(self): + a = BitStream('0xaabbccbbccddbbccddee') + self.assertEqual(len(list(a.split('0xbb', bytealigned=True))), 4) + bsl = list(a.split('0xbb', count=1, bytealigned=True)) + self.assertEqual((len(bsl), bsl[0]), (1, '0xaa')) + bsl = list(a.split('0xbb', count=2, bytealigned=True)) + self.assertEqual(len(bsl), 2) + self.assertEqual(bsl[0], '0xaa') + self.assertEqual(bsl[1], '0xbbcc') + + def testSplitMore(self): + s = BitStream('0b1100011001110110') + for i in range(10): + a = list(s.split('0b11', False, count=i)) + b = list(s.split('0b11', False))[:i] + self.assertEqual(a, b) + b = s.split('0b11', count=-1) + self.assertRaises(ValueError, next, b) + + def testSplitStartbit(self): + a = BitStream('0b0010101001000000001111') + bsl = a.split('0b001', bytealigned=False, start=1) + self.assertEqual([x.bin for x in bsl], ['010101', '001000000', '001111']) + b = a.split('0b001', start=-100) + self.assertRaises(ValueError, next, b) + b = a.split('0b001', start=23) + self.assertRaises(ValueError, next, b) + b = a.split('0b1', start=10, end=9) + self.assertRaises(ValueError, next, b) + + def testSplitStartbitByteAligned(self): + a = BitStream('0x00ffffee') + bsl = list(a.split('0b111', start=9, bytealigned=True)) + self.assertEqual([x.bin for x in bsl], ['1111111', '11111111', '11101110']) + + def testSplitEndbit(self): + a = BitStream('0b000010001001011') + bsl = list(a.split('0b1', bytealigned=False, end=14)) + self.assertEqual([x.bin for x in bsl], ['0000', '1000', '100', '10', '1']) + self.assertEqual(list(a[4:12].split('0b0', False)), list(a.split('0b0', start=4, end=12))) + # Shouldn't raise ValueError + bsl = list(a.split('0xffee', end=15)) + # Whereas this one will when we call next() + bsl = a.split('0xffee', end=16) + self.assertRaises(ValueError, next, bsl) + + def testSplitEndbitByteAligned(self): + a = BitStream('0xff00ff')[:22] + bsl = list(a.split('0b 0000 0000 111', end=19)) + self.assertEqual([x.bin for x in bsl], ['11111111', '00000000111']) + bsl = list(a.split('0b 0000 0000 111', end=18)) + self.assertEqual([x.bin for x in bsl], ['111111110000000011']) + + def testSplitMaxSplit(self): + a = BitStream('0b1' * 20) + for i in range(10): + bsl = list(a.split('0b1', count=i)) + self.assertEqual(len(bsl), i) + + ####################### + + def testPositionInSlice(self): + a = BitStream('0x00ffff00') + a.bytepos = 2 + b = a[8:24] + self.assertEqual(b.bytepos, 0) + + def testFindByteAlignedWithBits(self): + a = BitStream('0x00112233445566778899') + a.find('0b0001', bytealigned=True) + self.assertEqual(a.bitpos, 8) + + def testFindStartbitNotByteAligned(self): + a = BitStream('0b0010000100') + found = a.find('0b1', start=4) + self.assertEqual((found, a.bitpos), ((7,), 7)) + found = a.find('0b1', start=2) + self.assertEqual((found, a.bitpos), ((2,), 2)) + found = a.find('0b1', bytealigned=False, start=8) + self.assertEqual((found, a.bitpos), ((), 2)) + + def testFindEndbitNotByteAligned(self): + a = BitStream('0b0010010000') + found = a.find('0b1', bytealigned=False, end=2) + self.assertEqual((found, a.bitpos), ((), 0)) + found = a.find('0b1', end=3) + self.assertEqual((found, a.bitpos), ((2,), 2)) + found = a.find('0b1', bytealigned=False, start=3, end=5) + self.assertEqual((found, a.bitpos), ((), 2)) + found = a.find('0b1', start=3, end=6) + self.assertEqual((found[0], a.bitpos), (5, 5)) + + def testFindStartbitByteAligned(self): + a = BitStream('0xff001122ff0011ff') + a.pos = 40 + found = a.find('0x22', start=23, bytealigned=True) + self.assertEqual((found, a.bytepos), ((24,), 3)) + a.bytepos = 4 + found = a.find('0x22', start=24, bytealigned=True) + self.assertEqual((found, a.bytepos), ((24,), 3)) + found = a.find('0x22', start=25, bytealigned=True) + self.assertEqual((found, a.pos), ((), 24)) + found = a.find('0b111', start=40, bytealigned=True) + self.assertEqual((found, a.pos), ((56,), 56)) + + def testFindEndbitByteAligned(self): + a = BitStream('0xff001122ff0011ff') + found = a.find('0x22', end=31, bytealigned=True) + self.assertFalse(found) + self.assertEqual(a.pos, 0) + found = a.find('0x22', end=32, bytealigned=True) + self.assertTrue(found) + self.assertEqual(a.pos, 24) + self.assertEqual(found[0], 24) + + def testFindStartEndbitErrors(self): + a = BitStream('0b00100') + self.assertRaises(ValueError, a.find, '0b1', bytealigned=False, start=-100) + self.assertRaises(ValueError, a.find, '0b1', end=6) + self.assertRaises(ValueError, a.find, '0b1', start=4, end=3) + b = BitStream('0x0011223344') + self.assertRaises(ValueError, a.find, '0x22', bytealigned=True, start=-100) + self.assertRaises(ValueError, a.find, '0x22', end=41, bytealigned=True) + + def testPrependAndAppendAgain(self): + c = BitStream('0x1122334455667788') + c.bitpos = 40 + c.prepend('0b1') + self.assertEqual(c.bitpos, 41) + c = BitStream() + c.prepend('0x1234') + self.assertEqual(c.bytepos, 2) + c = BitStream() + c.append('0x1234') + self.assertEqual(c.bytepos, 0) + s = BitStream(bytes=b'\xff\xff', offset=2) + self.assertEqual(s.length, 14) + t = BitStream(bytes=b'\x80', offset=1, length=2) + s.prepend(t) + self.assertEqual(s, '0x3fff') + + def testFindAll(self): + a = BitStream('0b11111') + p = a.findall('0b1') + self.assertEqual(list(p), [0, 1, 2, 3, 4]) + p = a.findall('0b11') + self.assertEqual(list(p), [0, 1, 2, 3]) + p = a.findall('0b10') + self.assertEqual(list(p), []) + a = BitStream('0x4733eeff66554747335832434547') + p = a.findall('0x47', bytealigned=True) + self.assertEqual(list(p), [0, 6 * 8, 7 * 8, 13 * 8]) + p = a.findall('0x4733', bytealigned=True) + self.assertEqual(list(p), [0, 7 * 8]) + a = BitStream('0b1001001001001001001') + p = a.findall('0b1001', bytealigned=False) + self.assertEqual(list(p), [0, 3, 6, 9, 12, 15]) + self.assertEqual(a.pos, 15) + + def testFindAllGenerator(self): + a = BitStream('0xff1234512345ff1234ff12ff') + p = a.findall('0xff', bytealigned=True) + self.assertEqual(next(p), 0) + self.assertEqual(next(p), 6 * 8) + self.assertEqual(next(p), 9 * 8) + self.assertEqual(next(p), 11 * 8) + self.assertRaises(StopIteration, next, p) + + def testFindAllCount(self): + s = BitStream('0b1') * 100 + for i in [0, 1, 23]: + self.assertEqual(len(list(s.findall('0b1', count=i))), i) + b = s.findall('0b1', bytealigned=True, count=-1) + self.assertRaises(ValueError, next, b) + + def testContains(self): + a = BitStream('0b1') + '0x0001dead0001' + self.assertTrue('0xdead' in a) + self.assertEqual(a.pos, 0) + self.assertFalse('0xfeed' in a) + + def testRepr(self): + max = bitstring.MAX_CHARS + bls = ['', '0b1', '0o5', '0x43412424f41', '0b00101001010101'] + for bs in bls: + a = BitStream(bs) + b = eval(a.__repr__()) + self.assertTrue(a == b) + for f in [ConstBitStream(filename='test.m1v'), + ConstBitStream(filename='test.m1v', length=17), + ConstBitStream(filename='test.m1v', length=23, offset=23102)]: + f2 = eval(f.__repr__()) + self.assertEqual(f._datastore._rawarray.source.name, f2._datastore._rawarray.source.name) + self.assertTrue(f2.tobytes() == f.tobytes()) + a = BitStream('0b1') + self.assertEqual(repr(a), "BitStream('0b1')") + a += '0b11' + self.assertEqual(repr(a), "BitStream('0b111')") + a += '0b1' + self.assertEqual(repr(a), "BitStream('0xf')") + a *= max + self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "')") + a += '0xf' + self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "...') # length=%d" % (max * 4 + 4)) + + def testPrint(self): + s = BitStream(hex='0x00') + self.assertEqual('0x' + s.hex, s.__str__()) + s = BitStream(filename='test.m1v') + self.assertEqual('0x' + s[0:bitstring.MAX_CHARS * 4].hex + '...', s.__str__()) + self.assertEqual(BitStream().__str__(), '') + s = BitStream('0b11010') + self.assertEqual('0b' + s.bin, s.__str__()) + s = BitStream('0x12345678901234567890,0b1') + self.assertEqual('0x12345678901234567890, 0b1', s.__str__()) + + def testIter(self): + a = BitStream('0b001010') + b = BitStream() + for bit in a: + b.append(ConstBitStream(bool=bit)) + self.assertEqual(a, b) + + def testDelitem(self): + a = BitStream('0xffee') + del a[0:8] + self.assertEqual(a.hex, 'ee') + del a[0:8] + self.assertFalse(a) + del a[10:12] + self.assertFalse(a) + + def testNonZeroBitsAtStart(self): + a = BitStream(bytes=b'\xff', offset=2) + b = BitStream('0b00') + b += a + self.assertTrue(b == '0b0011 1111') + #self.assertEqual(a._datastore.rawbytes, b'\xff') + self.assertEqual(a.tobytes(), b'\xfc') + + def testNonZeroBitsAtEnd(self): + a = BitStream(bytes=b'\xff', length=5) + #self.assertEqual(a._datastore.rawbytes, b'\xff') + b = BitStream('0b00') + a += b + self.assertTrue(a == '0b1111100') + self.assertEqual(a.tobytes(), b'\xf8') + self.assertRaises(ValueError, a._getbytes) + + def testNewOffsetErrors(self): + self.assertRaises(bitstring.CreationError, BitStream, hex='ff', offset=-1) + self.assertRaises(bitstring.CreationError, BitStream, '0xffffffff', offset=33) + + def testSliceStep(self): + a = BitStream('0x3') + b = a[::1] + self.assertEqual(a, b) + self.assertEqual(a[2:4:1], '0b11') + self.assertEqual(a[0:2:1], '0b00') + self.assertEqual(a[:3], '0o1') + + a = BitStream('0x0011223344556677') + self.assertEqual(a[-8:], '0x77') + self.assertEqual(a[:-24], '0x0011223344') + self.assertEqual(a[-1000:-24], '0x0011223344') + + def testInterestingSliceStep(self): + a = BitStream('0b0011000111') + self.assertEqual(a[7:3:-1], '0b1000') + self.assertEqual(a[9:2:-1], '0b1110001') + self.assertEqual(a[8:2:-2], '0b100') + self.assertEqual(a[100:-20:-3], '0b1010') + self.assertEqual(a[100:-20:-1], '0b1110001100') + self.assertEqual(a[10:2:-1], '0b1110001') + self.assertEqual(a[100:2:-1], '0b1110001') + + def testInsertionOrderAndBitpos(self): + b = BitStream() + b[0:0] = '0b0' + b[0:0] = '0b1' + self.assertEqual(b, '0b10') + self.assertEqual(b.bitpos, 1) + a = BitStream() + a.insert('0b0') + a.insert('0b1') + self.assertEqual(a, '0b01') + self.assertEqual(a.bitpos, 2) + + def testOverwriteOrderAndBitpos(self): + a = BitStream('0xff') + a.overwrite('0xa') + self.assertEqual(a, '0xaf') + self.assertEqual(a.bitpos, 4) + a.overwrite('0xb') + self.assertEqual(a, '0xab') + self.assertEqual(a.bitpos, 8) + self.assertRaises(ValueError, a.overwrite, '0b1') + a.overwrite('0xa', 4) + self.assertEqual(a, '0xaa') + self.assertEqual(a.bitpos, 8) + a.overwrite(a, 0) + self.assertEqual(a, '0xaa') + + def testInitSliceWithInt(self): + a = BitStream(length=8) + a[:] = 100 + self.assertEqual(a.uint, 100) + a[0] = 1 + self.assertEqual(a.bin, '11100100') + a[1] = 0 + self.assertEqual(a.bin, '10100100') + a[-1] = -1 + self.assertEqual(a.bin, '10100101') + a[-3:] = -2 + self.assertEqual(a.bin, '10100110') + + def testInitSliceWithIntErrors(self): + a = BitStream('0b0000') + self.assertRaises(ValueError, a.__setitem__, slice(0, 4), 16) + self.assertRaises(ValueError, a.__setitem__, slice(0, 4), -9) + self.assertRaises(ValueError, a.__setitem__, 0, 2) + self.assertRaises(ValueError, a.__setitem__, 0, -2) + + def testReverseWithSlice(self): + a = BitStream('0x0012ff') + a.reverse() + self.assertEqual(a, '0xff4800') + a.reverse(8, 16) + self.assertEqual(a, '0xff1200') + b = a[8:16] + b.reverse() + a[8:16] = b + self.assertEqual(a, '0xff4800') + + def testReverseWithSliceErrors(self): + a = BitStream('0x123') + self.assertRaises(ValueError, a.reverse, -1, 4) + self.assertRaises(ValueError, a.reverse, 10, 9) + self.assertRaises(ValueError, a.reverse, 1, 10000) + + def testInitialiseFromList(self): + a = BitStream([]) + self.assertFalse(a) + a = BitStream([True, False, [], [0], 'hello']) + self.assertEqual(a, '0b10011') + a += [] + self.assertEqual(a, '0b10011') + a += [True, False, True] + self.assertEqual(a, '0b10011101') + a.find([12, 23]) + self.assertEqual(a.pos, 3) + self.assertEqual([1, 0, False, True], BitStream('0b1001')) + a = [True] + BitStream('0b1') + self.assertEqual(a, '0b11') + + def testInitialiseFromTuple(self): + a = BitStream(()) + self.assertFalse(a) + a = BitStream((0, 1, '0', '1')) + self.assertEqual('0b0111', a) + a.replace((True, True), []) + self.assertEqual(a, (False, True)) + + def testCut(self): + a = BitStream('0x00112233445') + b = list(a.cut(8)) + self.assertEqual(b, ['0x00', '0x11', '0x22', '0x33', '0x44']) + b = list(a.cut(4, 8, 16)) + self.assertEqual(b, ['0x1', '0x1']) + b = list(a.cut(4, 0, 44, 4)) + self.assertEqual(b, ['0x0', '0x0', '0x1', '0x1']) + a = BitStream() + b = list(a.cut(10)) + self.assertTrue(not b) + + def testCutErrors(self): + a = BitStream('0b1') + b = a.cut(1, 1, 2) + self.assertRaises(ValueError, next, b) + b = a.cut(1, -2, 1) + self.assertRaises(ValueError, next, b) + b = a.cut(0) + self.assertRaises(ValueError, next, b) + b = a.cut(1, count=-1) + self.assertRaises(ValueError, next, b) + + def testCutProblem(self): + s = BitStream('0x1234') + for n in list(s.cut(4)): + s.prepend(n) + self.assertEqual(s, '0x43211234') + + def testJoinFunctions(self): + a = BitStream().join(['0xa', '0xb', '0b1111']) + self.assertEqual(a, '0xabf') + a = BitStream('0b1').join(['0b0' for i in range(10)]) + self.assertEqual(a, '0b0101010101010101010') + a = BitStream('0xff').join([]) + self.assertFalse(a) + + def testAddingBitpos(self): + a = BitStream('0xff') + b = BitStream('0x00') + a.bitpos = b.bitpos = 8 + c = a + b + self.assertEqual(c.bitpos, 0) + + def testIntelligentRead1(self): + a = BitStream(uint=123, length=23) + u = a.read('uint:23') + self.assertEqual(u, 123) + self.assertEqual(a.pos, a.len) + b = BitStream(int=-12, length=44) + i = b.read('int:44') + self.assertEqual(i, -12) + self.assertEqual(b.pos, b.len) + u2, i2 = (a + b).readlist('uint:23, int:44') + self.assertEqual((u2, i2), (123, -12)) + + def testIntelligentRead2(self): + a = BitStream(ue=822) + u = a.read('ue') + self.assertEqual(u, 822) + self.assertEqual(a.pos, a.len) + b = BitStream(se=-1001) + s = b.read('se') + self.assertEqual(s, -1001) + self.assertEqual(b.pos, b.len) + s, u1, u2 = (b + 2 * a).readlist('se, ue, ue') + self.assertEqual((s, u1, u2), (-1001, 822, 822)) + + def testIntelligentRead3(self): + a = BitStream('0x123') + '0b11101' + h = a.read('hex:12') + self.assertEqual(h, '123') + b = a.read('bin: 5') + self.assertEqual(b, '11101') + c = '0b' + b + a + b, h = c.readlist('bin:5, hex:12') + self.assertEqual((b, h), ('11101', '123')) + + def testIntelligentRead4(self): + a = BitStream('0o007') + o = a.read('oct:9') + self.assertEqual(o, '007') + self.assertEqual(a.pos, a.len) + + def testIntelligentRead5(self): + a = BitStream('0x00112233') + c0, c1, c2 = a.readlist('bits:8, bits:8, bits:16') + self.assertEqual((c0, c1, c2), (BitStream('0x00'), BitStream('0x11'), BitStream('0x2233'))) + a.pos = 0 + c = a.read('bits:16') + self.assertEqual(c, BitStream('0x0011')) + + def testIntelligentRead6(self): + a = BitStream('0b000111000') + b1, b2, b3 = a.readlist('bin :3, int: 3, int:3') + self.assertEqual(b1, '000') + self.assertEqual(b2, -1) + self.assertEqual(b3, 0) + + def testIntelligentRead7(self): + a = BitStream('0x1234') + a1, a2, a3, a4 = a.readlist('bin:0, oct:0, hex:0, bits:0') + self.assertTrue(a1 == a2 == a3 == '') + self.assertFalse(a4) + self.assertRaises(ValueError, a.read, 'int:0') + self.assertRaises(ValueError, a.read, 'uint:0') + self.assertEqual(a.pos, 0) + + def testIntelligentRead8(self): + a = BitStream('0x123456') + for t in ['hex:1', 'oct:1', 'hex4', '-5', 'fred', 'bin:-2', + 'uint:p', 'uint:-2', 'int:u', 'int:-3', 'ses', 'uee', '-14']: + self.assertRaises(ValueError, a.read, t) + + def testIntelligentRead9(self): + a = BitStream('0xff') + self.assertEqual(a.read('intle'), -1) + + def testFillerReads1(self): + s = BitStream('0x012345') + t = s.read('bits') + self.assertEqual(s, t) + s.pos = 0 + a, b = s.readlist('hex:8, hex') + self.assertEqual(a, '01') + self.assertEqual(b, '2345') + self.assertTrue(isinstance(b, str)) + s.bytepos = 0 + a, b = s.readlist('bin, hex:20') + self.assertEqual(a, '0000') + self.assertEqual(b, '12345') + self.assertTrue(isinstance(a, str)) + + def testFillerReads2(self): + s = BitStream('0xabcdef') + self.assertRaises(bitstring.Error, s.readlist, 'bits, se') + self.assertRaises(bitstring.Error, s.readlist, 'hex:4, bits, ue, bin:4') + s.pos = 0 + self.assertRaises(bitstring.Error, s.readlist, 'bin, bin') + + def testIntelligentPeek(self): + a = BitStream('0b01, 0x43, 0o4, uint:23=2, se=5, ue=3') + b, c, e = a.peeklist('bin:2, hex:8, oct:3') + self.assertEqual((b, c, e), ('01', '43', '4')) + self.assertEqual(a.pos, 0) + a.pos = 13 + f, g, h = a.peeklist('uint:23, se, ue') + self.assertEqual((f, g, h), (2, 5, 3)) + self.assertEqual(a.pos, 13) + + def testReadMultipleBits(self): + s = BitStream('0x123456789abcdef') + a, b = s.readlist([4, 4]) + self.assertEqual(a, '0x1') + self.assertEqual(b, '0x2') + c, d, e = s.readlist([8, 16, 8]) + self.assertEqual(c, '0x34') + self.assertEqual(d, '0x5678') + self.assertEqual(e, '0x9a') + + def testPeekMultipleBits(self): + s = BitStream('0b1101, 0o721, 0x2234567') + a, b, c, d = s.peeklist([2, 1, 1, 9]) + self.assertEqual(a, '0b11') + self.assertEqual(bool(b), False) + self.assertEqual(bool(c), True) + self.assertEqual(d, '0o721') + self.assertEqual(s.pos, 0) + a, b = s.peeklist([4, 9]) + self.assertEqual(a, '0b1101') + self.assertEqual(b, '0o721') + s.pos = 13 + a, b = s.peeklist([16, 8]) + self.assertEqual(a, '0x2234') + self.assertEqual(b, '0x56') + self.assertEqual(s.pos, 13) + + def testDifficultPrepends(self): + a = BitStream('0b1101011') + b = BitStream() + for i in range(10): + b.prepend(a) + self.assertEqual(b, a * 10) + + def testPackingWrongNumberOfThings(self): + self.assertRaises(bitstring.CreationError, pack, 'bin:1') + self.assertRaises(bitstring.CreationError, pack, '', 100) + + def testPackWithVariousKeys(self): + a = pack('uint10', uint10='0b1') + self.assertEqual(a, '0b1') + b = pack('0b110', **{'0b110': '0xfff'}) + self.assertEqual(b, '0xfff') + + def testPackWithVariableLength(self): + for i in range(1, 11): + a = pack('uint:n', 0, n=i) + self.assertEqual(a.bin, '0' * i) + + def testToBytes(self): + a = BitStream(bytes=b'\xab\x00') + b = a.tobytes() + self.assertEqual(a.bytes, b) + for i in range(7): + del a[-1:] + self.assertEqual(a.tobytes(), b'\xab\x00') + del a[-1:] + self.assertEqual(a.tobytes(), b'\xab') + + def testToFile(self): + a = BitStream('0x0000ff')[:17] + f = open('temp_bitstring_unit_testing_file', 'wb') + a.tofile(f) + f.close() + b = BitStream(filename='temp_bitstring_unit_testing_file') + self.assertEqual(b, '0x000080') + + a = BitStream('0x911111') + del a[:1] + self.assertEqual(a + '0b0', '0x222222') + f = open('temp_bitstring_unit_testing_file', 'wb') + a.tofile(f) + f.close() + b = BitStream(filename='temp_bitstring_unit_testing_file') + self.assertEqual(b, '0x222222') + os.remove('temp_bitstring_unit_testing_file') + + #def testToFileWithLargerFile(self): + # a = BitStream(length=16000000) + # a[1] = '0b1' + # a[-2] = '0b1' + # f = open('temp_bitstring_unit_testing_file' ,'wb') + # a.tofile(f) + # f.close() + # b = BitStream(filename='temp_bitstring_unit_testing_file') + # self.assertEqual(b.len, 16000000) + # self.assertEqual(b[1], True) + # + # f = open('temp_bitstring_unit_testing_file' ,'wb') + # a[1:].tofile(f) + # f.close() + # b = BitStream(filename='temp_bitstring_unit_testing_file') + # self.assertEqual(b.len, 16000000) + # self.assertEqual(b[0], True) + # os.remove('temp_bitstring_unit_testing_file') + + def testTokenParser(self): + tp = bitstring.tokenparser + self.assertEqual(tp('hex'), (True, [('hex', None, None)])) + self.assertEqual(tp('hex=14'), (True, [('hex', None, '14')])) + self.assertEqual(tp('se'), (False, [('se', None, None)])) + self.assertEqual(tp('ue=12'), (False, [('ue', None, '12')])) + self.assertEqual(tp('0xef'), (False, [('0x', None, 'ef')])) + self.assertEqual(tp('uint:12'), (False, [('uint', 12, None)])) + self.assertEqual(tp('int:30=-1'), (False, [('int', 30, '-1')])) + self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)])) + self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)])) + self.assertEqual(tp('123'), (False, [('uint', 123, None)])) + self.assertEqual(tp('123'), (False, [('uint', 123, None)])) + self.assertRaises(ValueError, tp, 'hex12') + self.assertEqual(tp('hex12', ('hex12',)), (False, [('hex12', None, None)])) + self.assertEqual(tp('2*bits:6'), (False, [('bits', 6, None), ('bits', 6, None)])) + + def testAutoFromFileObject(self): + with open('test.m1v', 'rb') as f: + s = ConstBitStream(f, offset=32, length=12) + self.assertEqual(s.uint, 352) + t = ConstBitStream('0xf') + f + self.assertTrue(t.startswith('0xf000001b3160')) + s2 = ConstBitStream(f) + t2 = BitStream('0xc') + t2.prepend(s2) + self.assertTrue(t2.startswith('0x000001b3')) + self.assertTrue(t2.endswith('0xc')) + with open('test.m1v', 'rb') as b: + u = BitStream(bytes=b.read()) + # TODO: u == s2 is much slower than u.bytes == s2.bytes + self.assertEqual(u.bytes, s2.bytes) + + def testFileBasedCopy(self): + with open('smalltestfile', 'rb') as f: + s = BitStream(f) + t = BitStream(s) + s.prepend('0b1') + self.assertEqual(s[1:], t) + s = BitStream(f) + t = copy.copy(s) + t.append('0b1') + self.assertEqual(s, t[:-1]) + + def testBigEndianSynonyms(self): + s = BitStream('0x12318276ef') + self.assertEqual(s.int, s.intbe) + self.assertEqual(s.uint, s.uintbe) + s = BitStream(intbe=-100, length=16) + self.assertEqual(s, 'int:16=-100') + s = BitStream(uintbe=13, length=24) + self.assertEqual(s, 'int:24=13') + s = BitStream('uintbe:32=1000') + self.assertEqual(s, 'uint:32=1000') + s = BitStream('intbe:8=2') + self.assertEqual(s, 'int:8=2') + self.assertEqual(s.read('intbe'), 2) + s.pos = 0 + self.assertEqual(s.read('uintbe'), 2) + + def testBigEndianSynonymErrors(self): + self.assertRaises(bitstring.CreationError, BitStream, uintbe=100, length=15) + self.assertRaises(bitstring.CreationError, BitStream, intbe=100, length=15) + self.assertRaises(bitstring.CreationError, BitStream, 'uintbe:17=100') + self.assertRaises(bitstring.CreationError, BitStream, 'intbe:7=2') + s = BitStream('0b1') + self.assertRaises(bitstring.InterpretError, s._getintbe) + self.assertRaises(bitstring.InterpretError, s._getuintbe) + self.assertRaises(ValueError, s.read, 'uintbe') + self.assertRaises(ValueError, s.read, 'intbe') + + def testLittleEndianUint(self): + s = BitStream(uint=100, length=16) + self.assertEqual(s.uintle, 25600) + s = BitStream(uintle=100, length=16) + self.assertEqual(s.uint, 25600) + self.assertEqual(s.uintle, 100) + s.uintle += 5 + self.assertEqual(s.uintle, 105) + s = BitStream('uintle:32=999') + self.assertEqual(s.uintle, 999) + s.byteswap() + self.assertEqual(s.uint, 999) + s = pack('uintle:24', 1001) + self.assertEqual(s.uintle, 1001) + self.assertEqual(s.length, 24) + self.assertEqual(s.read('uintle'), 1001) + + def testLittleEndianInt(self): + s = BitStream(int=100, length=16) + self.assertEqual(s.intle, 25600) + s = BitStream(intle=100, length=16) + self.assertEqual(s.int, 25600) + self.assertEqual(s.intle, 100) + s.intle += 5 + self.assertEqual(s.intle, 105) + s = BitStream('intle:32=999') + self.assertEqual(s.intle, 999) + s.byteswap() + self.assertEqual(s.int, 999) + s = pack('intle:24', 1001) + self.assertEqual(s.intle, 1001) + self.assertEqual(s.length, 24) + self.assertEqual(s.read('intle'), 1001) + + def testLittleEndianErrors(self): + self.assertRaises(bitstring.CreationError, BitStream, 'uintle:15=10') + self.assertRaises(bitstring.CreationError, BitStream, 'intle:31=-999') + self.assertRaises(bitstring.CreationError, BitStream, uintle=100, length=15) + self.assertRaises(bitstring.CreationError, BitStream, intle=100, length=15) + s = BitStream('0xfff') + self.assertRaises(bitstring.InterpretError, s._getintle) + self.assertRaises(bitstring.InterpretError, s._getuintle) + self.assertRaises(ValueError, s.read, 'uintle') + self.assertRaises(ValueError, s.read, 'intle') + + def testStructTokens1(self): + self.assertEqual(pack('b', 23), BitStream('intbe:8=23')) + self.assertEqual(pack('>B', 23), BitStream('uintbe:8=23')) + self.assertEqual(pack('>h', 23), BitStream('intbe:16=23')) + self.assertEqual(pack('>H', 23), BitStream('uintbe:16=23')) + self.assertEqual(pack('>l', 23), BitStream('intbe:32=23')) + self.assertEqual(pack('>L', 23), BitStream('uintbe:32=23')) + self.assertEqual(pack('>q', 23), BitStream('intbe:64=23')) + self.assertEqual(pack('>Q', 23), BitStream('uintbe:64=23')) + self.assertRaises(bitstring.CreationError, pack, '2L', 40, 40)) + + def testStructTokens2(self): + s = pack('>hhl', 1, 2, 3) + a, b, c = s.unpack('>hhl') + self.assertEqual((a, b, c), (1, 2, 3)) + s = pack('Q \tL', 1001, 43, 21, 9999) + self.assertEqual(s.unpack('QL'), [1001, 43, 21, 9999]) + + def testStructTokensMultiplicativeFactors(self): + s = pack('<2h', 1, 2) + a, b = s.unpack('<2h') + self.assertEqual((a, b), (1, 2)) + s = pack('<100q', *range(100)) + self.assertEqual(s.len, 100 * 64) + self.assertEqual(s[44*64:45*64].uintle, 44) + s = pack('@L0B2h', 5, 5, 5) + self.assertEqual(s.unpack('@Lhh'), [5, 5, 5]) + + def testStructTokensErrors(self): + for f in ['>>q', '<>q', 'q>', '2q', 'q', '>-2q', '@a', '>int:8', '>q2']: + self.assertRaises(bitstring.CreationError, pack, f, 100) + + def testImmutableBitStreams(self): + a = ConstBitStream('0x012345') + self.assertEqual(a, '0x012345') + b = BitStream('0xf') + a + self.assertEqual(b, '0xf012345') + try: + a.append(b) + self.assertTrue(False) + except AttributeError: + pass + try: + a.prepend(b) + self.assertTrue(False) + except AttributeError: + pass + try: + a[0] = '0b1' + self.assertTrue(False) + except TypeError: + pass + try: + del a[5] + self.assertTrue(False) + except TypeError: + pass + try: + a.replace('0b1', '0b0') + self.assertTrue(False) + except AttributeError: + pass + try: + a.insert('0b11', 4) + self.assertTrue(False) + except AttributeError: + pass + try: + a.reverse() + self.assertTrue(False) + except AttributeError: + pass + try: + a.reversebytes() + self.assertTrue(False) + except AttributeError: + pass + self.assertEqual(a, '0x012345') + self.assertTrue(isinstance(a, ConstBitStream)) + + def testReverseBytes(self): + a = BitStream('0x123456') + a.byteswap() + self.assertEqual(a, '0x563412') + b = a + '0b1' + b.byteswap() + self.assertEqual('0x123456, 0b1', b) + a = BitStream('0x54') + a.byteswap() + self.assertEqual(a, '0x54') + a = BitStream() + a.byteswap() + self.assertFalse(a) + + def testReverseBytes2(self): + a = BitStream() + a.byteswap() + self.assertFalse(a) + a = BitStream('0x00112233') + a.byteswap(0, 0, 16) + self.assertEqual(a, '0x11002233') + a.byteswap(0, 4, 28) + self.assertEqual(a, '0x12302103') + a.byteswap(start=0, end=18) + self.assertEqual(a, '0x30122103') + self.assertRaises(ValueError, a.byteswap, 0, 10, 2) + self.assertRaises(ValueError, a.byteswap, 0, -4, 4) + self.assertRaises(ValueError, a.byteswap, 0, 24, 48) + a.byteswap(0, 24) + self.assertEqual(a, '0x30122103') + a.byteswap(0, 11, 11) + self.assertEqual(a, '0x30122103') + + def testCapitalsInPack(self): + a = pack('A', A='0b1') + self.assertEqual(a, '0b1') + format = 'bits:4=BL_OFFT, uint:12=width, uint:12=height' + d = {'BL_OFFT': '0b1011', 'width': 352, 'height': 288} + s = bitstring.pack(format, **d) + self.assertEqual(s, '0b1011, uint:12=352, uint:12=288') + a = pack('0X0, uint:8, hex', 45, '0XABcD') + self.assertEqual(a, '0x0, uint:8=45, 0xabCD') + + def testOtherCapitals(self): + a = ConstBitStream('0XABC, 0O0, 0B11') + self.assertEqual(a, 'hex=0Xabc, oct=0, bin=0B11') + + def testEfficientOverwrite(self): + a = BitStream(1000000000) + a.overwrite([1], 123456) + self.assertEqual(a[123456], True) + a.overwrite('0xff', 1) + self.assertEqual(a[0:32:1], '0x7f800000') + b = BitStream('0xffff') + b.overwrite('0x0000') + self.assertEqual(b, '0x0000') + self.assertEqual(b.pos, 16) + c = BitStream(length=1000) + c.overwrite('0xaaaaaaaaaaaa', 81) + self.assertEqual(c[81:81 + 6 * 8], '0xaaaaaaaaaaaa') + self.assertEqual(len(list(c.findall('0b1'))), 24) + s = BitStream(length=1000) + s = s[5:] + s.overwrite('0xffffff', 500) + s.pos = 500 + self.assertEqual(s.read(4 * 8), '0xffffff00') + s.overwrite('0xff', 502) + self.assertEqual(s[502:518], '0xffff') + + def testPeekAndReadListErrors(self): + a = BitStream('0x123456') + self.assertRaises(ValueError, a.read, 'hex:8, hex:8') + self.assertRaises(ValueError, a.peek, 'hex:8, hex:8') + self.assertRaises(TypeError, a.read, 10, 12) + self.assertRaises(TypeError, a.peek, 12, 14) + self.assertRaises(TypeError, a.read, 8, 8) + self.assertRaises(TypeError, a.peek, 80, 80) + + def testStartswith(self): + a = BitStream() + self.assertTrue(a.startswith(BitStream())) + self.assertFalse(a.startswith('0b0')) + a = BitStream('0x12ff') + self.assertTrue(a.startswith('0x1')) + self.assertTrue(a.startswith('0b0001001')) + self.assertTrue(a.startswith('0x12ff')) + self.assertFalse(a.startswith('0x12ff, 0b1')) + self.assertFalse(a.startswith('0x2')) + + def testStartswithStartEnd(self): + s = BitStream('0x123456') + self.assertTrue(s.startswith('0x234', 4)) + self.assertFalse(s.startswith('0x123', end=11)) + self.assertTrue(s.startswith('0x123', end=12)) + self.assertTrue(s.startswith('0x34', 8, 16)) + self.assertFalse(s.startswith('0x34', 7, 16)) + self.assertFalse(s.startswith('0x34', 9, 16)) + self.assertFalse(s.startswith('0x34', 8, 15)) + + def testEndswith(self): + a = BitStream() + self.assertTrue(a.endswith('')) + self.assertFalse(a.endswith(BitStream('0b1'))) + a = BitStream('0xf2341') + self.assertTrue(a.endswith('0x41')) + self.assertTrue(a.endswith('0b001')) + self.assertTrue(a.endswith('0xf2341')) + self.assertFalse(a.endswith('0x1f2341')) + self.assertFalse(a.endswith('0o34')) + + def testEndswithStartEnd(self): + s = BitStream('0x123456') + self.assertTrue(s.endswith('0x234', end=16)) + self.assertFalse(s.endswith('0x456', start=13)) + self.assertTrue(s.endswith('0x456', start=12)) + self.assertTrue(s.endswith('0x34', 8, 16)) + self.assertTrue(s.endswith('0x34', 7, 16)) + self.assertFalse(s.endswith('0x34', 9, 16)) + self.assertFalse(s.endswith('0x34', 8, 15)) + + def testUnhashability(self): + s = BitStream('0xf') + self.assertRaises(TypeError, set, [s]) + self.assertRaises(TypeError, hash, [s]) + + def testConstBitStreamSetCreation(self): + sl = [ConstBitStream(uint=i, length=7) for i in range(15)] + s = set(sl) + self.assertEqual(len(s), 15) + s.add(ConstBitStream('0b0000011')) + self.assertEqual(len(s), 15) + self.assertRaises(TypeError, s.add, BitStream('0b0000011')) + + def testConstBitStreamFunctions(self): + s = ConstBitStream('0xf, 0b1') + self.assertEqual(type(s), ConstBitStream) + t = copy.copy(s) + self.assertEqual(type(t), ConstBitStream) + a = s + '0o3' + self.assertEqual(type(a), ConstBitStream) + b = a[0:4] + self.assertEqual(type(b), ConstBitStream) + b = a[4:3] + self.assertEqual(type(b), ConstBitStream) + b = a[5:2:-1] + self.assertEqual(type(b), ConstBitStream) + b = ~a + self.assertEqual(type(b), ConstBitStream) + b = a << 2 + self.assertEqual(type(b), ConstBitStream) + b = a >> 2 + self.assertEqual(type(b), ConstBitStream) + b = a * 2 + self.assertEqual(type(b), ConstBitStream) + b = a * 0 + self.assertEqual(type(b), ConstBitStream) + b = a & ~a + self.assertEqual(type(b), ConstBitStream) + b = a | ~a + self.assertEqual(type(b), ConstBitStream) + b = a ^ ~a + self.assertEqual(type(b), ConstBitStream) + b = a._slice(4, 4) + self.assertEqual(type(b), ConstBitStream) + b = a.read(4) + self.assertEqual(type(b), ConstBitStream) + + def testConstBitStreamProperties(self): + a = ConstBitStream('0x123123') + try: + a.hex = '0x234' + self.assertTrue(False) + except AttributeError: + pass + try: + a.oct = '0o234' + self.assertTrue(False) + except AttributeError: + pass + try: + a.bin = '0b101' + self.assertTrue(False) + except AttributeError: + pass + try: + a.ue = 3453 + self.assertTrue(False) + except AttributeError: + pass + try: + a.se = -123 + self.assertTrue(False) + except AttributeError: + pass + try: + a.int = 432 + self.assertTrue(False) + except AttributeError: + pass + try: + a.uint = 4412 + self.assertTrue(False) + except AttributeError: + pass + try: + a.intle = 123 + self.assertTrue(False) + except AttributeError: + pass + try: + a.uintle = 4412 + self.assertTrue(False) + except AttributeError: + pass + try: + a.intbe = 123 + self.assertTrue(False) + except AttributeError: + pass + try: + a.uintbe = 4412 + self.assertTrue(False) + except AttributeError: + pass + try: + a.intne = 123 + self.assertTrue(False) + except AttributeError: + pass + try: + a.uintne = 4412 + self.assertTrue(False) + except AttributeError: + pass + try: + a.bytes = b'hello' + self.assertTrue(False) + except AttributeError: + pass + + def testConstBitStreamMisc(self): + a = ConstBitStream('0xf') + b = a + a += '0xe' + self.assertEqual(b, '0xf') + self.assertEqual(a, '0xfe') + c = BitStream(a) + self.assertEqual(a, c) + a = ConstBitStream('0b1') + a._append(a) + self.assertEqual(a, '0b11') + self.assertEqual(type(a), ConstBitStream) + a._prepend(a) + self.assertEqual(a, '0b1111') + self.assertEqual(type(a), ConstBitStream) + + def testConstBitStreamHashibility(self): + a = ConstBitStream('0x1') + b = ConstBitStream('0x2') + c = ConstBitStream('0x1') + c.pos = 3 + s = set((a, b, c)) + self.assertEqual(len(s), 2) + self.assertEqual(hash(a), hash(c)) + + def testConstBitStreamCopy(self): + a = ConstBitStream('0xabc') + a.pos = 11 + b = copy.copy(a) + b.pos = 4 + self.assertEqual(id(a._datastore), id(b._datastore)) + self.assertEqual(a.pos, 11) + self.assertEqual(b.pos, 4) + + def testPython26stuff(self): + s = BitStream('0xff') + self.assertTrue(isinstance(s.tobytes(), bytes)) + self.assertTrue(isinstance(s.bytes, bytes)) + + def testReadFromBits(self): + a = ConstBitStream('0xaabbccdd') + b = a.read(8) + self.assertEqual(b, '0xaa') + self.assertEqual(a[0:8], '0xaa') + self.assertEqual(a[-1], True) + a.pos = 0 + self.assertEqual(a.read(4).uint, 10) + + +class Set(unittest.TestCase): + def testSet(self): + a = BitStream(length=16) + a.set(True, 0) + self.assertEqual(a, '0b10000000 00000000') + a.set(1, 15) + self.assertEqual(a, '0b10000000 00000001') + b = a[4:12] + b.set(True, 1) + self.assertEqual(b, '0b01000000') + b.set(True, -1) + self.assertEqual(b, '0b01000001') + b.set(1, -8) + self.assertEqual(b, '0b11000001') + self.assertRaises(IndexError, b.set, True, -9) + self.assertRaises(IndexError, b.set, True, 8) + + def testSetNegativeIndex(self): + a = BitStream(10) + a.set(1, -1) + self.assertEqual(a.bin, '0000000001') + a.set(1, [-1, -10]) + self.assertEqual(a.bin, '1000000001') + self.assertRaises(IndexError, a.set, 1, [-11]) + + def testFileBasedSetUnset(self): + a = BitStream(filename='test.m1v') + a.set(True, (0, 1, 2, 3, 4)) + self.assertEqual(a[0:32], '0xf80001b3') + a = BitStream(filename='test.m1v') + a.set(False, (28, 29, 30, 31)) + self.assertTrue(a.startswith('0x000001b0')) + + def testSetList(self): + a = BitStream(length=18) + a.set(True, range(18)) + self.assertEqual(a.int, -1) + a.set(False, range(18)) + self.assertEqual(a.int, 0) + + def testUnset(self): + a = BitStream(length=16, int=-1) + a.set(False, 0) + self.assertEqual(~a, '0b10000000 00000000') + a.set(0, 15) + self.assertEqual(~a, '0b10000000 00000001') + b = a[4:12] + b.set(False, 1) + self.assertEqual(~b, '0b01000000') + b.set(False, -1) + self.assertEqual(~b, '0b01000001') + b.set(False, -8) + self.assertEqual(~b, '0b11000001') + self.assertRaises(IndexError, b.set, False, -9) + self.assertRaises(IndexError, b.set, False, 8) + + def testSetWholeBitStream(self): + a = BitStream(14) + a.set(1) + self.assertTrue(a.all(1)) + a.set(0) + self.assertTrue(a.all(0)) + + +class Invert(unittest.TestCase): + def testInvertBits(self): + a = BitStream('0b111000') + a.invert(range(a.len)) + self.assertEqual(a, '0b000111') + a.invert([0, 1, -1]) + self.assertEqual(a, '0b110110') + + def testInvertWholeBitStream(self): + a = BitStream('0b11011') + a.invert() + self.assertEqual(a, '0b00100') + + def testInvertSingleBit(self): + a = BitStream('0b000001') + a.invert(0) + self.assertEqual(a.bin, '100001') + a.invert(-1) + self.assertEqual(a.bin, '100000') + + def testInvertErrors(self): + a = BitStream(10) + self.assertRaises(IndexError, a.invert, 10) + self.assertRaises(IndexError, a.invert, -11) + self.assertRaises(IndexError, a.invert, [1, 2, 10]) + + + ####################### + + def testIor(self): + a = BitStream('0b1101001') + a |= '0b1110000' + self.assertEqual(a, '0b1111001') + b = a[2:] + c = a[1:-1] + b |= c + self.assertEqual(c, '0b11100') + self.assertEqual(b, '0b11101') + + def testIand(self): + a = BitStream('0b0101010101000') + a &= '0b1111110000000' + self.assertEqual(a, '0b0101010000000') + s = BitStream(filename='test.m1v', offset=26, length=24) + s &= '0xff00ff' + self.assertEqual(s, '0xcc0004') + + def testIxor(self): + a = BitStream('0b11001100110011') + a ^= '0b11111100000010' + self.assertEqual(a, '0b00110000110001') + + def testLogicalInplaceErrors(self): + a = BitStream(4) + self.assertRaises(ValueError, a.__ior__, '0b111') + self.assertRaises(ValueError, a.__iand__, '0b111') + self.assertRaises(ValueError, a.__ixor__, '0b111') + + +class AllAndAny(unittest.TestCase): + def testAll(self): + a = BitStream('0b0111') + self.assertTrue(a.all(True, (1, 3))) + self.assertFalse(a.all(True, (0, 1, 2))) + self.assertTrue(a.all(True, [-1])) + self.assertFalse(a.all(True, [0])) + + def testFileBasedAll(self): + a = BitStream(filename='test.m1v') + self.assertTrue(a.all(True, [31])) + a = BitStream(filename='test.m1v') + self.assertTrue(a.all(False, (0, 1, 2, 3, 4))) + + def testFileBasedAny(self): + a = BitStream(filename='test.m1v') + self.assertTrue(a.any(True, (31, 12))) + a = BitStream(filename='test.m1v') + self.assertTrue(a.any(False, (0, 1, 2, 3, 4))) + + def testAny(self): + a = BitStream('0b10011011') + self.assertTrue(a.any(True, (1, 2, 3, 5))) + self.assertFalse(a.any(True, (1, 2, 5))) + self.assertTrue(a.any(True, (-1,))) + self.assertFalse(a.any(True, (1,))) + + def testAllFalse(self): + a = BitStream('0b0010011101') + self.assertTrue(a.all(False, (0, 1, 3, 4))) + self.assertFalse(a.all(False, (0, 1, 2, 3, 4))) + + def testAnyFalse(self): + a = BitStream('0b01001110110111111111111111111') + self.assertTrue(a.any(False, (4, 5, 6, 2))) + self.assertFalse(a.any(False, (1, 15, 20))) + + def testAnyEmptyBitstring(self): + a = ConstBitStream() + self.assertFalse(a.any(True)) + self.assertFalse(a.any(False)) + + def testAllEmptyBitStream(self): + a = ConstBitStream() + self.assertTrue(a.all(True)) + self.assertTrue(a.all(False)) + + def testAnyWholeBitstring(self): + a = ConstBitStream('0xfff') + self.assertTrue(a.any(True)) + self.assertFalse(a.any(False)) + + def testAllWholeBitstring(self): + a = ConstBitStream('0xfff') + self.assertTrue(a.all(True)) + self.assertFalse(a.all(False)) + + def testErrors(self): + a = BitStream('0xf') + self.assertRaises(IndexError, a.all, True, [5]) + self.assertRaises(IndexError, a.all, True, [-5]) + self.assertRaises(IndexError, a.any, True, [5]) + self.assertRaises(IndexError, a.any, True, [-5]) + + ################### + + def testFloatInitialisation(self): + for f in (0.0000001, -1.0, 1.0, 0.2, -3.1415265, 1.331e32): + a = BitStream(float=f, length=64) + a.pos = 6 + self.assertEqual(a.float, f) + a = BitStream('float:64=%s' % str(f)) + a.pos = 6 + self.assertEqual(a.float, f) + a = BitStream('floatbe:64=%s' % str(f)) + a.pos = 6 + self.assertEqual(a.floatbe, f) + a = BitStream('floatle:64=%s' % str(f)) + a.pos = 6 + self.assertEqual(a.floatle, f) + a = BitStream('floatne:64=%s' % str(f)) + a.pos = 6 + self.assertEqual(a.floatne, f) + b = BitStream(float=f, length=32) + b.pos = 6 + self.assertAlmostEqual(b.float / f, 1.0) + b = BitStream('float:32=%s' % str(f)) + b.pos = 6 + self.assertAlmostEqual(b.float / f, 1.0) + b = BitStream('floatbe:32=%s' % str(f)) + b.pos = 6 + self.assertAlmostEqual(b.floatbe / f, 1.0) + b = BitStream('floatle:32=%s' % str(f)) + b.pos = 6 + self.assertAlmostEqual(b.floatle / f, 1.0) + b = BitStream('floatne:32=%s' % str(f)) + b.pos = 6 + self.assertAlmostEqual(b.floatne / f, 1.0) + a = BitStream('0x12345678') + a.pos = 6 + a.float = 23 + self.assertEqual(a.float, 23.0) + + def testFloatInitStrings(self): + for s in ('5', '+0.0001', '-1e101', '4.', '.2', '-.65', '43.21E+32'): + a = BitStream('float:64=%s' % s) + self.assertEqual(a.float, float(s)) + + def testFloatPacking(self): + a = pack('>d', 0.01) + self.assertEqual(a.float, 0.01) + self.assertEqual(a.floatbe, 0.01) + a.byteswap() + self.assertEqual(a.floatle, 0.01) + b = pack('>f', 1e10) + self.assertAlmostEqual(b.float / 1e10, 1.0) + c = pack('5d', 10.0, 5.0, 2.5, 1.25, 0.1) + self.assertEqual(d.unpack('>5d'), [10.0, 5.0, 2.5, 1.25, 0.1]) + + def testFloatReading(self): + a = BitStream('floatle:64=12, floatbe:64=-0.01, floatne:64=3e33') + x, y, z = a.readlist('floatle:64, floatbe:64, floatne:64') + self.assertEqual(x, 12.0) + self.assertEqual(y, -0.01) + self.assertEqual(z, 3e33) + a = BitStream('floatle:32=12, floatbe:32=-0.01, floatne:32=3e33') + x, y, z = a.readlist('floatle:32, floatbe:32, floatne:32') + self.assertAlmostEqual(x / 12.0, 1.0) + self.assertAlmostEqual(y / -0.01, 1.0) + self.assertAlmostEqual(z / 3e33, 1.0) + a = BitStream('0b11, floatle:64=12, 0xfffff') + a.pos = 2 + self.assertEqual(a.read('floatle:64'), 12.0) + b = BitStream(floatle=20, length=32) + b.floatle = 10.0 + b = [0] + b + self.assertEqual(b[1:].floatle, 10.0) + + def testNonAlignedFloatReading(self): + s = BitStream('0b1, float:32 = 10.0') + x, y = s.readlist('1, float:32') + self.assertEqual(y, 10.0) + s[1:] = 'floatle:32=20.0' + x, y = s.unpack('1, floatle:32') + self.assertEqual(y, 20.0) + + def testFloatErrors(self): + a = BitStream('0x3') + self.assertRaises(bitstring.InterpretError, a._getfloat) + self.assertRaises(bitstring.CreationError, a._setfloat, -0.2) + for l in (8, 10, 12, 16, 30, 128, 200): + self.assertRaises(ValueError, BitStream, float=1.0, length=l) + self.assertRaises(bitstring.CreationError, BitStream, floatle=0.3, length=0) + self.assertRaises(bitstring.CreationError, BitStream, floatle=0.3, length=1) + self.assertRaises(bitstring.CreationError, BitStream, float=2) + self.assertRaises(bitstring.InterpretError, a.read, 'floatle:2') + + def testReadErrorChangesPos(self): + a = BitStream('0x123123') + try: + a.read('10, 5') + except ValueError: + pass + self.assertEqual(a.pos, 0) + + def testRor(self): + a = BitStream('0b11001') + a.ror(0) + self.assertEqual(a, '0b11001') + a.ror(1) + self.assertEqual(a, '0b11100') + a.ror(5) + self.assertEqual(a, '0b11100') + a.ror(101) + self.assertEqual(a, '0b01110') + a = BitStream('0b1') + a.ror(1000000) + self.assertEqual(a, '0b1') + + def testRorErrors(self): + a = BitStream() + self.assertRaises(bitstring.Error, a.ror, 0) + a += '0b001' + self.assertRaises(ValueError, a.ror, -1) + + def testRol(self): + a = BitStream('0b11001') + a.rol(0) + self.assertEqual(a, '0b11001') + a.rol(1) + self.assertEqual(a, '0b10011') + a.rol(5) + self.assertEqual(a, '0b10011') + a.rol(101) + self.assertEqual(a, '0b00111') + a = BitStream('0b1') + a.rol(1000000) + self.assertEqual(a, '0b1') + + def testRolFromFile(self): + a = BitStream(filename='test.m1v') + l = a.len + a.rol(1) + self.assertTrue(a.startswith('0x000003')) + self.assertEqual(a.len, l) + self.assertTrue(a.endswith('0x0036e')) + + def testRorFromFile(self): + a = BitStream(filename='test.m1v') + l = a.len + a.ror(1) + self.assertTrue(a.startswith('0x800000')) + self.assertEqual(a.len, l) + self.assertTrue(a.endswith('0x000db')) + + def testRolErrors(self): + a = BitStream() + self.assertRaises(bitstring.Error, a.rol, 0) + a += '0b001' + self.assertRaises(ValueError, a.rol, -1) + + def testBytesToken(self): + a = BitStream('0x010203') + b = a.read('bytes:1') + self.assertTrue(isinstance(b, bytes)) + self.assertEqual(b, b'\x01') + x, y, z = a.unpack('4, bytes:2, uint') + self.assertEqual(x, 0) + self.assertEqual(y, b'\x10\x20') + self.assertEqual(z, 3) + s = pack('bytes:4', b'abcd') + self.assertEqual(s.bytes, b'abcd') + + def testBytesTokenMoreThoroughly(self): + a = BitStream('0x0123456789abcdef') + a.pos += 16 + self.assertEqual(a.read('bytes:1'), b'\x45') + self.assertEqual(a.read('bytes:3'), b'\x67\x89\xab') + x, y, z = a.unpack('bits:28, bytes, bits:12') + self.assertEqual(y, b'\x78\x9a\xbc') + + def testDedicatedReadFunctions(self): + a = BitStream('0b11, uint:43=98798798172, 0b11111') + x = a._readuint(43, 2) + self.assertEqual(x, 98798798172) + self.assertEqual(a.pos, 0) + x = a._readint(43, 2) + self.assertEqual(x, 98798798172) + self.assertEqual(a.pos, 0) + + a = BitStream('0b11, uintbe:48=98798798172, 0b11111') + x = a._readuintbe(48, 2) + self.assertEqual(x, 98798798172) + self.assertEqual(a.pos, 0) + x = a._readintbe(48, 2) + self.assertEqual(x, 98798798172) + self.assertEqual(a.pos, 0) + + a = BitStream('0b111, uintle:40=123516, 0b111') + self.assertEqual(a._readuintle(40, 3), 123516) + b = BitStream('0xff, uintle:800=999, 0xffff') + self.assertEqual(b._readuintle(800, 8), 999) + + a = BitStream('0b111, intle:48=999999999, 0b111111111111') + self.assertEqual(a._readintle(48, 3), 999999999) + b = BitStream('0xff, intle:200=918019283740918263512351235, 0xfffffff') + self.assertEqual(b._readintle(200, 8), 918019283740918263512351235) + + a = BitStream('0b111, floatbe:64=-5.32, 0xffffffff') + self.assertEqual(a._readfloat(64, 3), -5.32) + + a = BitStream('0b111, floatle:64=9.9998, 0b111') + self.assertEqual(a._readfloatle(64, 3), 9.9998) + + def testAutoInitWithInt(self): + a = BitStream(0) + self.assertFalse(a) + a = BitStream(1) + self.assertEqual(a, '0b0') + a = BitStream(1007) + self.assertEqual(a, BitStream(length=1007)) + self.assertRaises(bitstring.CreationError, BitStream, -1) + + a = 6 + ConstBitStream('0b1') + 3 + self.assertEqual(a, '0b0000001000') + a += 1 + self.assertEqual(a, '0b00000010000') + self.assertEqual(ConstBitStream(13), 13) + + def testReadingProblems(self): + a = BitStream('0x000001') + b = a.read('uint:24') + self.assertEqual(b, 1) + a.pos = 0 + self.assertRaises(bitstring.ReadError, a.read, 'bytes:4') + + def testAddVersesInPlaceAdd(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 += '0xdef' + self.assertEqual(a1, '0xabcdef') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 + '0x0' + a2 += '0xdef' + self.assertEqual(a2, '0xabcdef') + self.assertEqual(b2, '0xabcdef') + self.assertEqual(c2, '0xabc0') + + def testAndVersesInPlaceAnd(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 &= '0xf0f' + self.assertEqual(a1, '0xa0c') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 & '0x00f' + a2 &= '0xf0f' + self.assertEqual(a2, '0xa0c') + self.assertEqual(b2, '0xa0c') + self.assertEqual(c2, '0x00c') + + def testOrVersesInPlaceOr(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 |= '0xf0f' + self.assertEqual(a1, '0xfbf') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 | '0x00f' + a2 |= '0xf0f' + self.assertEqual(a2, '0xfbf') + self.assertEqual(b2, '0xfbf') + self.assertEqual(c2, '0xabf') + + def testXorVersesInPlaceXor(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 ^= '0xf0f' + self.assertEqual(a1, '0x5b3') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 ^ '0x00f' + a2 ^= '0xf0f' + self.assertEqual(a2, '0x5b3') + self.assertEqual(b2, '0x5b3') + self.assertEqual(c2, '0xab3') + + def testMulVersesInPlaceMul(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 *= 3 + self.assertEqual(a1, '0xabcabcabc') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 * 2 + a2 *= 3 + self.assertEqual(a2, '0xabcabcabc') + self.assertEqual(b2, '0xabcabcabc') + self.assertEqual(c2, '0xabcabc') + + def testLshiftVersesInPlaceLshift(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 <<= 4 + self.assertEqual(a1, '0xbc0') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 << 8 + a2 <<= 4 + self.assertEqual(a2, '0xbc0') + self.assertEqual(b2, '0xbc0') + self.assertEqual(c2, '0xc00') + + def testRshiftVersesInPlaceRshift(self): + a1 = ConstBitStream('0xabc') + b1 = a1 + a1 >>= 4 + self.assertEqual(a1, '0x0ab') + self.assertEqual(b1, '0xabc') + + a2 = BitStream('0xabc') + b2 = a2 + c2 = a2 >> 8 + a2 >>= 4 + self.assertEqual(a2, '0x0ab') + self.assertEqual(b2, '0x0ab') + self.assertEqual(c2, '0x00a') + + def testAutoFromBool(self): + a = ConstBitStream() + True + False + True + self.assertEqual(a, '0b00') + # self.assertEqual(a, '0b101') + # b = ConstBitStream(False) + # self.assertEqual(b, '0b0') + # c = ConstBitStream(True) + # self.assertEqual(c, '0b1') + # self.assertEqual(b, False) + # self.assertEqual(c, True) + # self.assertEqual(b & True, False) + + +class Bugs(unittest.TestCase): + def testBugInReplace(self): + s = BitStream('0x00112233') + l = list(s.split('0x22', start=8, bytealigned=True)) + self.assertEqual(l, ['0x11', '0x2233']) + s = BitStream('0x00112233') + s.replace('0x22', '0xffff', start=8, bytealigned=True) + self.assertEqual(s, '0x0011ffff33') + s = BitStream('0x0123412341234') + s.replace('0x23', '0xf', start=9, bytealigned=True) + self.assertEqual(s, '0x012341f41f4') + + def testTruncateStartBug(self): + a = BitStream('0b000000111')[2:] + a._truncatestart(6) + self.assertEqual(a, '0b1') + + def testNullBits(self): + s = ConstBitStream(bin='') + t = ConstBitStream(oct='') + u = ConstBitStream(hex='') + v = ConstBitStream(bytes=b'') + self.assertFalse(s) + self.assertFalse(t) + self.assertFalse(u) + self.assertFalse(v) + + def testMultiplicativeFactorsCreation(self): + s = BitStream('1*0b1') + self.assertEqual(s, '0b1') + s = BitStream('4*0xc') + self.assertEqual(s, '0xcccc') + s = BitStream('0b1, 0*0b0') + self.assertEqual(s, '0b1') + s = BitStream('0b1, 3*uint:8=34, 2*0o755') + self.assertEqual(s, '0b1, uint:8=34, uint:8=34, uint:8=34, 0o755755') + s = BitStream('0*0b1001010') + self.assertFalse(s) + + def testMultiplicativeFactorsReading(self): + s = BitStream('0xc') * 5 + a, b, c, d, e = s.readlist('5*4') + self.assertTrue(a == b == c == d == e == 12) + s = ConstBitStream('2*0b101, 4*uint:7=3') + a, b, c, d, e = s.readlist('2*bin:3, 3*uint:7') + self.assertTrue(a == b == '101') + self.assertTrue(c == d == e == 3) + + def testMultiplicativeFactorsPacking(self): + s = pack('3*bin', '1', '001', '101') + self.assertEqual(s, '0b1001101') + s = pack('hex, 2*se=-56, 3*uint:37', '34', 1, 2, 3) + a, b, c, d, e, f = s.unpack('hex:8, 2*se, 3*uint:37') + self.assertEqual(a, '34') + self.assertEqual(b, -56) + self.assertEqual(c, -56) + self.assertEqual((d, e, f), (1, 2, 3)) + # This isn't allowed yet. See comment in tokenparser. + #s = pack('fluffy*uint:8', *range(3), fluffy=3) + #a, b, c = s.readlist('2*uint:8, 1*uint:8, 0*uint:8') + #self.assertEqual((a, b, c), (0, 1, 2)) + + def testMultiplicativeFactorsUnpacking(self): + s = ConstBitStream('0b10111') + a, b, c, d = s.unpack('3*bool, bin') + self.assertEqual((a, b, c), (True, False, True)) + self.assertEqual(d, '11') + + + def testPackingDefaultIntWithKeyword(self): + s = pack('12', 100) + self.assertEqual(s.unpack('12')[0], 100) + s = pack('oh_no_not_the_eyes=33', oh_no_not_the_eyes=17) + self.assertEqual(s.uint, 33) + self.assertEqual(s.len, 17) + + def testInitFromIterable(self): + self.assertTrue(isinstance(range(10), collections.Iterable)) + s = ConstBitStream(range(12)) + self.assertEqual(s, '0x7ff') + + def testFunctionNegativeIndices(self): + # insert + s = BitStream('0b0111') + s.insert('0b0', -1) + self.assertEqual(s, '0b01101') + self.assertRaises(ValueError, s.insert, '0b0', -1000) + + # reverse + s.reverse(-2) + self.assertEqual(s, '0b01110') + t = BitStream('0x778899abcdef') + t.reverse(-12, -4) + self.assertEqual(t, '0x778899abc7bf') + + # reversebytes + t.byteswap(0, -40, -16) + self.assertEqual(t, '0x77ab9988c7bf') + + # overwrite + t.overwrite('0x666', -20) + self.assertEqual(t, '0x77ab998666bf') + + # find + found = t.find('0x998', bytealigned=True, start=-31) + self.assertFalse(found) + found = t.find('0x998', bytealigned=True, start=-32) + self.assertTrue(found) + self.assertEqual(t.pos, 16) + t.pos = 0 + found = t.find('0x988', bytealigned=True, end=-21) + self.assertFalse(found) + found = t.find('0x998', bytealigned=True, end=-20) + self.assertTrue(found) + self.assertEqual(t.pos, 16) + + #findall + s = BitStream('0x1234151f') + l = list(s.findall('0x1', bytealigned=True, start=-15)) + self.assertEqual(l, [24]) + l = list(s.findall('0x1', bytealigned=True, start=-16)) + self.assertEqual(l, [16, 24]) + l = list(s.findall('0x1', bytealigned=True, end=-5)) + self.assertEqual(l, [0, 16]) + l = list(s.findall('0x1', bytealigned=True, end=-4)) + self.assertEqual(l, [0, 16, 24]) + + # rfind + found = s.rfind('0x1f', end=-1) + self.assertFalse(found) + found = s.rfind('0x12', start=-31) + self.assertFalse(found) + + # cut + s = BitStream('0x12345') + l = list(s.cut(4, start=-12, end=-4)) + self.assertEqual(l, ['0x3', '0x4']) + + # split + s = BitStream('0xfe0012fe1200fe') + l = list(s.split('0xfe', bytealigned=True, end=-1)) + self.assertEqual(l, ['', '0xfe0012', '0xfe1200f, 0b111']) + l = list(s.split('0xfe', bytealigned=True, start=-8)) + self.assertEqual(l, ['', '0xfe']) + + # startswith + self.assertTrue(s.startswith('0x00f', start=-16)) + self.assertTrue(s.startswith('0xfe00', end=-40)) + self.assertFalse(s.startswith('0xfe00', end=-41)) + + # endswith + self.assertTrue(s.endswith('0x00fe', start=-16)) + self.assertFalse(s.endswith('0x00fe', start=-15)) + self.assertFalse(s.endswith('0x00fe', end=-1)) + self.assertTrue(s.endswith('0x00f', end=-4)) + + # replace + s.replace('0xfe', '', end=-1) + self.assertEqual(s, '0x00121200fe') + s.replace('0x00', '', start=-24) + self.assertEqual(s, '0x001212fe') + + def testRotateStartAndEnd(self): + a = BitStream('0b110100001') + a.rol(1, 3, 6) + self.assertEqual(a, '0b110001001') + a.ror(1, start=-4) + self.assertEqual(a, '0b110001100') + a.rol(202, end=-5) + self.assertEqual(a, '0b001101100') + a.ror(3, end=4) + self.assertEqual(a, '0b011001100') + self.assertRaises(ValueError, a.rol, 5, start=-4, end=-6) + + def testByteSwapInt(self): + s = pack('5*uintle:16', *range(10, 15)) + self.assertEqual(list(range(10, 15)), s.unpack('5*uintle:16')) + swaps = s.byteswap(2) + self.assertEqual(list(range(10, 15)), s.unpack('5*uintbe:16')) + self.assertEqual(swaps, 5) + s = BitStream('0xf234567f') + swaps = s.byteswap(1, start=4) + self.assertEqual(swaps, 3) + self.assertEqual(s, '0xf234567f') + s.byteswap(2, start=4) + self.assertEqual(s, '0xf452367f') + s.byteswap(2, start=4, end=-4) + self.assertEqual(s, '0xf234567f') + s.byteswap(3) + self.assertEqual(s, '0x5634f27f') + s.byteswap(2, repeat=False) + self.assertEqual(s, '0x3456f27f') + swaps = s.byteswap(5) + self.assertEqual(swaps, 0) + swaps = s.byteswap(4, repeat=False) + self.assertEqual(swaps, 1) + self.assertEqual(s, '0x7ff25634') + + def testByteSwapPackCode(self): + s = BitStream('0x0011223344556677') + swaps = s.byteswap('b') + self.assertEqual(s, '0x0011223344556677') + self.assertEqual(swaps, 8) + swaps = s.byteswap('>3h', repeat=False) + self.assertEqual(s, '0x1100332255446677') + self.assertEqual(swaps, 1) + + def testByteSwapIterable(self): + s = BitStream('0x0011223344556677') + swaps = s.byteswap(range(1, 4), repeat=False) + self.assertEqual(swaps, 1) + self.assertEqual(s, '0x0022115544336677') + swaps = s.byteswap([2], start=8) + self.assertEqual(s, '0x0011224455663377') + self.assertEqual(3, swaps) + swaps = s.byteswap([2, 3], start=4) + self.assertEqual(swaps, 1) + self.assertEqual(s, '0x0120156452463377') + + def testByteSwapErrors(self): + s = BitStream('0x0011223344556677') + self.assertRaises(ValueError, s.byteswap, 'z') + self.assertRaises(ValueError, s.byteswap, -1) + self.assertRaises(ValueError, s.byteswap, [-1]) + self.assertRaises(ValueError, s.byteswap, [1, 'e']) + self.assertRaises(ValueError, s.byteswap, '!h') + self.assertRaises(ValueError, s.byteswap, 2, start=-1000) + self.assertRaises(TypeError, s.byteswap, 5.4) + + def testByteSwapFromFile(self): + s = BitStream(filename='smalltestfile') + swaps = s.byteswap('2bh') + self.assertEqual(s, '0x0123674589abefcd') + self.assertEqual(swaps, 2) + + def testBracketExpander(self): + be = bitstring.expand_brackets + self.assertEqual(be('hello'), 'hello') + self.assertEqual(be('(hello)'), 'hello') + self.assertEqual(be('1*(hello)'), 'hello') + self.assertEqual(be('2*(hello)'), 'hello,hello') + self.assertEqual(be('1*(a, b)'), 'a,b') + self.assertEqual(be('2*(a, b)'), 'a,b,a,b') + self.assertEqual(be('2*(a), 3*(b)'), 'a,a,b,b,b') + self.assertEqual(be('2*(a, b, 3*(c, d), e)'), 'a,b,c,d,c,d,c,d,e,a,b,c,d,c,d,c,d,e') + + def testBracketTokens(self): + s = BitStream('3*(0x0, 0b1)') + self.assertEqual(s, '0x0, 0b1, 0x0, 0b1, 0x0, 0b1') + s = pack('2*(uint:12, 3*(7, 6))', *range(3, 17)) + a = s.unpack('12, 7, 6, 7, 6, 7, 6, 12, 7, 6, 7, 6, 7, 6') + self.assertEqual(a, list(range(3, 17))) + b = s.unpack('2*(12,3*(7,6))') + self.assertEqual(a, b) + + def testPackCodeDicts(self): + self.assertEqual(sorted(bitstring.REPLACEMENTS_BE.keys()), + sorted(bitstring.REPLACEMENTS_LE.keys())) + self.assertEqual(sorted(bitstring.REPLACEMENTS_BE.keys()), + sorted(bitstring.PACK_CODE_SIZE.keys())) + for key in bitstring.PACK_CODE_SIZE: + be = pack(bitstring.REPLACEMENTS_BE[key], 0) + le = pack(bitstring.REPLACEMENTS_LE[key], 0) + self.assertEqual(be.len, bitstring.PACK_CODE_SIZE[key] * 8) + self.assertEqual(le.len, be.len) + + # These tests don't compile for Python 3, so they're commented out to save me stress. + #def testUnicode(self): + #a = ConstBitStream(u'uint:12=34') + #self.assertEqual(a.uint, 34) + #a += u'0xfe' + #self.assertEqual(a[12:], '0xfe') + #a = BitStream('0x1122') + #c = a.byteswap(u'h') + #self.assertEqual(c, 1) + #self.assertEqual(a, u'0x2211') + + #def testLongInt(self): + #a = BitStream(4L) + #self.assertEqual(a, '0b0000') + #a[1:3] = -1L + #self.assertEqual(a, '0b0110') + #a[0] = 1L + #self.assertEqual(a, '0b1110') + #a *= 4L + #self.assertEqual(a, '0xeeee') + #c = a.byteswap(2L) + #self.assertEqual(c, 1) + #a = BitStream('0x11223344') + #a.byteswap([1, 2L]) + #self.assertEqual(a, '0x11332244') + #b = a*2L + #self.assertEqual(b, '0x1133224411332244') + #s = pack('uint:12', 46L) + #self.assertEqual(s.uint, 46) + + +class UnpackWithDict(unittest.TestCase): + def testLengthKeywords(self): + a = ConstBitStream('2*13=100, 0b111') + x, y, z = a.unpack('n, uint:m, bin:q', n=13, m=13, q=3) + self.assertEqual(x, 100) + self.assertEqual(y, 100) + self.assertEqual(z, '111') + + def testLengthKeywordsWithStretch(self): + a = ConstBitStream('0xff, 0b000, 0xf') + x, y, z = a.unpack('hex:a, bin, hex:b', a=8, b=4) + self.assertEqual(y, '000') + + def testUnusedKeyword(self): + a = ConstBitStream('0b110') + x, = a.unpack('bin:3', notused=33) + self.assertEqual(x, '110') + + def testLengthKeywordErrors(self): + a = pack('uint:p=33', p=12) + self.assertRaises(ValueError, a.unpack, 'uint:p') + self.assertRaises(ValueError, a.unpack, 'uint:p', p='a_string') + + +class ReadWithDict(unittest.TestCase): + def testLengthKeywords(self): + s = BitStream('0x0102') + x, y = s.readlist('a, hex:b', a=8, b=4) + self.assertEqual((x, y), (1, '0')) + self.assertEqual(s.pos, 12) + + def testBytesKeywordProblem(self): + s = BitStream('0x01') + x, = s.unpack('bytes:a', a=1) + self.assertEqual(x, b'\x01') + + s = BitStream('0x000ff00a') + x, y, z = s.unpack('12, bytes:x, bits', x=2) + self.assertEqual((x, y, z), (0, b'\xff\x00', '0xa')) + + + +class PeekWithDict(unittest.TestCase): + def testLengthKeywords(self): + s = BitStream('0x0102') + x, y = s.peeklist('a, hex:b', a=8, b=4) + self.assertEqual((x, y), (1, '0')) + self.assertEqual(s.pos, 0) + +##class Miscellany(unittest.TestCase): +## +## def testNumpyInt(self): +## try: +## import numpy +## a = ConstBitStream(uint=numpy.uint8(5), length=3) +## self.assertEqual(a.uint, 5) +## except ImportError: +## # Not to worry +## pass + +class BoolToken(unittest.TestCase): + def testInterpretation(self): + a = ConstBitStream('0b1') + self.assertEqual(a.bool, True) + self.assertEqual(a.read('bool'), True) + self.assertEqual(a.unpack('bool')[0], True) + b = ConstBitStream('0b0') + self.assertEqual(b.bool, False) + self.assertEqual(b.peek('bool'), False) + self.assertEqual(b.unpack('bool')[0], False) + + def testPack(self): + a = pack('bool=True') + b = pack('bool=False') + self.assertEqual(a.bool, True) + self.assertEqual(b.bool, False) + c = pack('4*bool', False, True, 'False', 'True') + self.assertEqual(c, '0b0101') + + def testAssignment(self): + a = BitStream() + a.bool = True + self.assertEqual(a.bool, True) + a.hex = 'ee' + a.bool = False + self.assertEqual(a.bool, False) + a.bool = 'False' + self.assertEqual(a.bool, False) + a.bool = 'True' + self.assertEqual(a.bool, True) + a.bool = 0 + self.assertEqual(a.bool, False) + a.bool = 1 + self.assertEqual(a.bool, True) + + def testErrors(self): + self.assertRaises(bitstring.CreationError, pack, 'bool', 'hello') + self.assertRaises(bitstring.CreationError, pack, 'bool=true') + self.assertRaises(bitstring.CreationError, pack, 'True') + self.assertRaises(bitstring.CreationError, pack, 'bool', 2) + a = BitStream('0b11') + self.assertRaises(bitstring.InterpretError, a._getbool) + b = BitStream() + self.assertRaises(bitstring.InterpretError, a._getbool) + self.assertRaises(bitstring.CreationError, a._setbool, 'false') + + def testLengthWithBoolRead(self): + a = ConstBitStream('0xf') + self.assertRaises(ValueError, a.read, 'bool:0') + self.assertRaises(ValueError, a.read, 'bool:1') + self.assertRaises(ValueError, a.read, 'bool:2') + + +class ReadWithIntegers(unittest.TestCase): + def testReadInt(self): + a = ConstBitStream('0xffeedd') + b = a.read(8) + self.assertEqual(b.hex, 'ff') + self.assertEqual(a.pos, 8) + b = a.peek(8) + self.assertEqual(b.hex, 'ee') + self.assertEqual(a.pos, 8) + b = a.peek(1) + self.assertEqual(b, '0b1') + b = a.read(1) + self.assertEqual(b, '0b1') + + def testReadIntList(self): + a = ConstBitStream('0xab, 0b110') + b, c = a.readlist([8, 3]) + self.assertEqual(b.hex, 'ab') + self.assertEqual(c.bin, '110') + + +class FileReadingStrategy(unittest.TestCase): + def testBitStreamIsAlwaysRead(self): + a = BitStream(filename='smalltestfile') + self.assertTrue(isinstance(a._datastore, bitstring.ByteStore)) + f = open('smalltestfile', 'rb') + b = BitStream(f) + self.assertTrue(isinstance(b._datastore, bitstring.ByteStore)) + + def testBitsIsNeverRead(self): + a = ConstBitStream(filename='smalltestfile') + self.assertTrue(isinstance(a._datastore._rawarray, bitstring.MmapByteArray)) + f = open('smalltestfile', 'rb') + b = ConstBitStream(f) + self.assertTrue(isinstance(b._datastore._rawarray, bitstring.MmapByteArray)) + + +class Count(unittest.TestCase): + def testCount(self): + a = ConstBitStream('0xf0f') + self.assertEqual(a.count(True), 8) + self.assertEqual(a.count(False), 4) + + b = BitStream() + self.assertEqual(b.count(True), 0) + self.assertEqual(b.count(False), 0) + + def testCountWithOffsetData(self): + a = ConstBitStream('0xff0120ff') + b = a[1:-1] + self.assertEqual(b.count(1), 16) + self.assertEqual(b.count(0), 14) + + +class ZeroBitReads(unittest.TestCase): + def testInteger(self): + a = ConstBitStream('0x123456') + self.assertRaises(bitstring.InterpretError, a.read, 'uint:0') + self.assertRaises(bitstring.InterpretError, a.read, 'float:0') + +#class EfficientBitsCopies(unittest.TestCase): +# +# def testBitsCopy(self): +# a = ConstBitStream('0xff') +# b = ConstBitStream(a) +# c = a[:] +# d = copy.copy(a) +# self.assertTrue(a._datastore is b._datastore) +# self.assertTrue(a._datastore is c._datastore) +# self.assertTrue(a._datastore is d._datastore) + +class InitialiseFromBytes(unittest.TestCase): + def testBytesBehaviour(self): + a = ConstBitStream(b'uint:5=2') + b = ConstBitStream(b'') + c = ConstBitStream(bytes=b'uint:5=2') + if b'' == '': + # Python 2 + self.assertEqual(a, 'uint:5=2') + self.assertFalse(b) + self.assertEqual(c.bytes, b'uint:5=2') + else: + self.assertEqual(a.bytes, b'uint:5=2') + self.assertFalse(b) + self.assertEqual(c, b'uint:5=2') + + def testBytearrayBehaviour(self): + a = ConstBitStream(bytearray(b'uint:5=2')) + b = ConstBitStream(bytearray(4)) + c = ConstBitStream(bytes=bytearray(b'uint:5=2')) + self.assertEqual(a.bytes, b'uint:5=2') + self.assertEqual(b, '0x00000000') + self.assertEqual(c.bytes, b'uint:5=2') + + +class CoverageCompletionTests(unittest.TestCase): + def testUeReadError(self): + s = ConstBitStream('0b000000001') + self.assertRaises(bitstring.ReadError, s.read, 'ue') + + def testOverwriteWithSelf(self): + s = BitStream('0b1101') + s.overwrite(s) + self.assertEqual(s, '0b1101') + + +class Subclassing(unittest.TestCase): + + def testIsInstance(self): + class SubBits(BitStream): pass + a = SubBits() + self.assertTrue(isinstance(a, SubBits)) + + def testClassType(self): + class SubBits(BitStream): pass + self.assertEqual(SubBits().__class__, SubBits) + + +class BytesProblems(unittest.TestCase): + + def testOffsetButNoLength(self): + b = BitStream(bytes=b'\x00\xaa', offset=8) + self.assertEqual(b.hex, 'aa') + b = BitStream(bytes=b'\x00\xaa', offset=4) + self.assertEqual(b.hex, '0aa') + + def testInvert(self): + b = BitStream(bytes=b'\x00\xaa', offset=8, length=8) + self.assertEqual(b.hex, 'aa') + b.invert() + self.assertEqual(b.hex, '55') + + def testPrepend(self): + b = BitStream(bytes=b'\xaa\xbb', offset=8, length=4) + self.assertEqual(b.hex, 'b') + b.prepend('0xe') + self.assertEqual(b.hex, 'eb') + b = BitStream(bytes=b'\x00\xaa', offset=8, length=8) + b.prepend('0xee') + self.assertEqual(b.hex, 'eeaa') + + def testByteSwap(self): + b = BitStream(bytes=b'\x01\x02\x03\x04', offset=8) + b.byteswap() + self.assertEqual(b, '0x040302') + + def testBinProperty(self): + b = BitStream(bytes=b'\x00\xaa', offset=8, length=4) + self.assertEqual(b.bin, '1010') \ No newline at end of file diff --git a/python/bitstring/test/test_bitstring.py b/python/bitstring/test/test_bitstring.py new file mode 100644 index 000000000..1b52b7b80 --- /dev/null +++ b/python/bitstring/test/test_bitstring.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +""" +Module-level unit tests. +""" + +import unittest +import sys +sys.path.insert(0, '..') +import bitstring +import copy + + +class ModuleData(unittest.TestCase): + def testVersion(self): + self.assertEqual(bitstring.__version__, '3.1.3') + + def testAll(self): + exported = ['ConstBitArray', 'ConstBitStream', 'BitStream', 'BitArray', + 'Bits', 'BitString', 'pack', 'Error', 'ReadError', + 'InterpretError', 'ByteAlignError', 'CreationError', 'bytealigned'] + self.assertEqual(set(bitstring.__all__), set(exported)) + + def testReverseDict(self): + d = bitstring.BYTE_REVERSAL_DICT + for i in range(256): + a = bitstring.Bits(uint=i, length=8) + b = d[i] + self.assertEqual(a.bin[::-1], bitstring.Bits(bytes=b).bin) + + def testAliases(self): + self.assertTrue(bitstring.Bits is bitstring.ConstBitArray) + self.assertTrue(bitstring.BitStream is bitstring.BitString) + + +class MemoryUsage(unittest.TestCase): + def testBaselineMemory(self): + try: + import pympler.asizeof.asizeof as size + except ImportError: + return + # These values might be platform dependent, so don't fret too much. + self.assertEqual(size(bitstring.ConstBitStream([0])), 64) + self.assertEqual(size(bitstring.Bits([0])), 64) + self.assertEqual(size(bitstring.BitStream([0])), 64) + self.assertEqual(size(bitstring.BitArray([0])), 64) + from bitstring.bitstore import ByteStore + self.assertEqual(size(ByteStore(bytearray())), 100) + + +class Copy(unittest.TestCase): + def testConstBitArrayCopy(self): + import copy + cba = bitstring.Bits(100) + cba_copy = copy.copy(cba) + self.assertTrue(cba is cba_copy) + + def testBitArrayCopy(self): + ba = bitstring.BitArray(100) + ba_copy = copy.copy(ba) + self.assertFalse(ba is ba_copy) + self.assertFalse(ba._datastore is ba_copy._datastore) + self.assertTrue(ba == ba_copy) + + def testConstBitStreamCopy(self): + cbs = bitstring.ConstBitStream(100) + cbs.pos = 50 + cbs_copy = copy.copy(cbs) + self.assertEqual(cbs_copy.pos, 0) + self.assertTrue(cbs._datastore is cbs_copy._datastore) + self.assertTrue(cbs == cbs_copy) + + def testBitStreamCopy(self): + bs = bitstring.BitStream(100) + bs.pos = 50 + bs_copy = copy.copy(bs) + self.assertEqual(bs_copy.pos, 0) + self.assertFalse(bs._datastore is bs_copy._datastore) + self.assertTrue(bs == bs_copy) + + +class Interning(unittest.TestCase): + def testBits(self): + a = bitstring.Bits('0xf') + b = bitstring.Bits('0xf') + self.assertTrue(a is b) + c = bitstring.Bits('0b1111') + self.assertFalse(a is c) + + def testCBS(self): + a = bitstring.ConstBitStream('0b11000') + b = bitstring.ConstBitStream('0b11000') + self.assertFalse(a is b) + # self.assertTrue(a._datastore is b._datastore) + + + + \ No newline at end of file diff --git a/python/bitstring/test/test_constbitstream.py b/python/bitstring/test/test_constbitstream.py new file mode 100644 index 000000000..a1bef743f --- /dev/null +++ b/python/bitstring/test/test_constbitstream.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python + +import unittest +import sys +sys.path.insert(0, '..') +import bitstring +from bitstring import ConstBitStream as CBS + +class All(unittest.TestCase): + def testFromFile(self): + s = CBS(filename='test.m1v') + self.assertEqual(s[0:32].hex, '000001b3') + self.assertEqual(s.read(8 * 4).hex, '000001b3') + width = s.read(12).uint + height = s.read(12).uint + self.assertEqual((width, height), (352, 288)) + + +class InterleavedExpGolomb(unittest.TestCase): + def testReading(self): + s = CBS(uie=333) + a = s.read('uie') + self.assertEqual(a, 333) + s = CBS('uie=12, sie=-9, sie=9, uie=1000000') + u = s.unpack('uie, 2*sie, uie') + self.assertEqual(u, [12, -9, 9, 1000000]) + + def testReadingErrors(self): + s = CBS(10) + self.assertRaises(bitstring.ReadError, s.read, 'uie') + self.assertEqual(s.pos, 0) + self.assertRaises(bitstring.ReadError, s.read, 'sie') + self.assertEqual(s.pos, 0) + + +class ReadTo(unittest.TestCase): + def testByteAligned(self): + a = CBS('0xaabb00aa00bb') + b = a.readto('0x00', bytealigned=True) + self.assertEqual(b, '0xaabb00') + self.assertEqual(a.bytepos, 3) + b = a.readto('0xaa', bytealigned=True) + self.assertEqual(b, '0xaa') + self.assertRaises(bitstring.ReadError, a.readto, '0xcc', bytealigned=True) + + def testNotAligned(self): + a = CBS('0b00111001001010011011') + a.pos = 1 + self.assertEqual(a.readto('0b00'), '0b011100') + self.assertEqual(a.readto('0b110'), '0b10010100110') + self.assertRaises(ValueError, a.readto, '') + + def testDisallowIntegers(self): + a = CBS('0x0f') + self.assertRaises(ValueError, a.readto, 4) + + def testReadingLines(self): + s = b"This is a test\nof reading lines\nof text\n" + b = CBS(bytes=s) + n = bitstring.Bits(bytes=b'\n') + self.assertEqual(b.readto(n).bytes, b'This is a test\n') + self.assertEqual(b.readto(n).bytes, b'of reading lines\n') + self.assertEqual(b.readto(n).bytes, b'of text\n') + + +class Subclassing(unittest.TestCase): + + def testIsInstance(self): + class SubBits(CBS): pass + a = SubBits() + self.assertTrue(isinstance(a, SubBits)) + + def testClassType(self): + class SubBits(CBS): pass + self.assertEqual(SubBits().__class__, SubBits) + + +class PadToken(unittest.TestCase): + + def testRead(self): + s = CBS('0b100011110001') + a = s.read('pad:1') + self.assertEqual(a, None) + self.assertEqual(s.pos, 1) + a = s.read(3) + self.assertEqual(a, CBS('0b000')) + a = s.read('pad:0') + self.assertEqual(a, None) + self.assertEqual(s.pos, 4) + + def testReadList(self): + s = CBS('0b10001111001') + t = s.readlist('pad:1, uint:3, pad:4, uint:3') + self.assertEqual(t, [0, 1]) + s.pos = 0 + t = s.readlist('pad:1, pad:5') + self.assertEqual(t, []) + self.assertEqual(s.pos, 6) + s.pos = 0 + t = s.readlist('pad:1, bin, pad:4, uint:3') + self.assertEqual(t, ['000', 1]) + s.pos = 0 + t = s.readlist('pad, bin:3, pad:4, uint:3') + self.assertEqual(t, ['000', 1]) + +class ReadingBytes(unittest.TestCase): + + def testUnpackingBytes(self): + s = CBS(80) + t = s.unpack('bytes:1') + self.assertEqual(t[0], b'\x00') + a, b, c = s.unpack('bytes:1, bytes, bytes:2') + self.assertEqual(a, b'\x00') + self.assertEqual(b, b'\x00'*7) + self.assertEqual(c, b'\x00'*2) + + def testUnpackingBytesWithKeywords(self): + s = CBS('0x55'*10) + t = s.unpack('pad:a, bytes:b, bytes, pad:a', a=4, b=6) + self.assertEqual(t, [b'\x55'*6, b'\x55'*3]) + diff --git a/python/blessings/LICENSE b/python/blessings/LICENSE new file mode 100644 index 000000000..3d3a44e65 --- /dev/null +++ b/python/blessings/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011 Erik Rose + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/python/blessings/MANIFEST.in b/python/blessings/MANIFEST.in new file mode 100644 index 000000000..3f4fbd708 --- /dev/null +++ b/python/blessings/MANIFEST.in @@ -0,0 +1,3 @@ +include README.rst +include LICENSE +include tox.ini diff --git a/python/blessings/PKG-INFO b/python/blessings/PKG-INFO new file mode 100644 index 000000000..c52ca3cf9 --- /dev/null +++ b/python/blessings/PKG-INFO @@ -0,0 +1,426 @@ +Metadata-Version: 1.0 +Name: blessings +Version: 1.3 +Summary: A thin, practical wrapper around terminal formatting, positioning, and more +Home-page: https://github.com/erikrose/blessings +Author: Erik Rose +Author-email: erikrose@grinchcentral.com +License: MIT +Description: ========= + Blessings + ========= + + Coding with Blessings looks like this... :: + + from blessings import Terminal + + t = Terminal() + + print t.bold('Hi there!') + print t.bold_red_on_bright_green('It hurts my eyes!') + + with t.location(0, t.height - 1): + print 'This is at the bottom.' + + Or, for byte-level control, you can drop down and play with raw terminal + capabilities:: + + print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t) + print t.wingo(2) + + The Pitch + ========= + + Blessings lifts several of curses_' limiting assumptions, and it makes your + code pretty, too: + + * Use styles, color, and maybe a little positioning without clearing the whole + screen first. + * Leave more than one screenful of scrollback in the buffer after your program + exits, like a well-behaved command-line app should. + * Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so + your code doesn't get crowded out by terminal bookkeeping. + * Act intelligently when somebody redirects your output to a file, omitting the + terminal control codes the user doesn't want to see (optional). + + .. _curses: http://docs.python.org/library/curses.html + + Before And After + ---------------- + + Without Blessings, this is how you'd print some underlined text at the bottom + of the screen:: + + from curses import tigetstr, setupterm, tparm + from fcntl import ioctl + from os import isatty + import struct + import sys + from termios import TIOCGWINSZ + + # If we want to tolerate having our output piped to other commands or + # files without crashing, we need to do all this branching: + if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()): + setupterm() + sc = tigetstr('sc') + cup = tigetstr('cup') + rc = tigetstr('rc') + underline = tigetstr('smul') + normal = tigetstr('sgr0') + else: + sc = cup = rc = underline = normal = '' + print sc # Save cursor position. + if cup: + # tigetnum('lines') doesn't always update promptly, hence this: + height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0] + print tparm(cup, height - 1, 0) # Move cursor to bottom. + print 'This is {under}underlined{normal}!'.format(under=underline, + normal=normal) + print rc # Restore cursor position. + + Phew! That was long and full of incomprehensible trash! Let's try it again, + this time with Blessings:: + + from blessings import Terminal + + term = Terminal() + with term.location(0, term.height - 1): + print 'This is', term.underline('pretty!') + + Much better. + + What It Provides + ================ + + Blessings provides just one top-level object: ``Terminal``. Instantiating a + ``Terminal`` figures out whether you're on a terminal at all and, if so, does + any necessary terminal setup. After that, you can proceed to ask it all sorts + of things about the terminal. Terminal terminal terminal. + + Simple Formatting + ----------------- + + Lots of handy formatting codes ("capabilities" in low-level parlance) are + available as attributes on a ``Terminal``. For example:: + + from blessings import Terminal + + term = Terminal() + print 'I am ' + term.bold + 'bold' + term.normal + '!' + + You can also use them as wrappers so you don't have to say ``normal`` + afterward:: + + print 'I am', term.bold('bold') + '!' + + Or, if you want fine-grained control while maintaining some semblance of + brevity, you can combine it with Python's string formatting, which makes + attributes easy to access:: + + print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term) + + Simple capabilities of interest include... + + * ``bold`` + * ``reverse`` + * ``underline`` + * ``no_underline`` (which turns off underlining) + * ``blink`` + * ``normal`` (which turns off everything, even colors) + * ``clear_eol`` (clear to the end of the line) + * ``clear_bol`` (clear to beginning of line) + * ``clear_eos`` (clear to end of screen) + + Here are a few more which are less likely to work on all terminals: + + * ``dim`` + * ``italic`` and ``no_italic`` + * ``shadow`` and ``no_shadow`` + * ``standout`` and ``no_standout`` + * ``subscript`` and ``no_subscript`` + * ``superscript`` and ``no_superscript`` + * ``flash`` (which flashes the screen once) + + Note that, while the inverse of ``underline`` is ``no_underline``, the only way + to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any + custom colors. This is because there's no way to tell the terminal to undo + certain pieces of formatting, even at the lowest level. + + You might notice that the above aren't the typical incomprehensible terminfo + capability names; we alias a few of the harder-to-remember ones for + readability. However, you aren't limited to these: you can reference any + string-returning capability listed on the `terminfo man page`_ by the name + under the "Cap-name" column: for example, ``term.rum``. + + .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/ + + Color + ----- + + 16 colors, both foreground and background, are available as easy-to-remember + attributes:: + + from blessings import Terminal + + term = Terminal() + print term.red + term.on_green + 'Red on green? Ick!' + term.normal + print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal + + You can also call them as wrappers, which sets everything back to normal at the + end:: + + print term.red_on_green('Red on green? Ick!') + print term.yellow('I can barely see it.') + + The available colors are... + + * ``black`` + * ``red`` + * ``green`` + * ``yellow`` + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` + + You can set the background color instead of the foreground by prepending + ``on_``, as in ``on_blue``. There is also a ``bright`` version of each color: + for example, ``on_bright_blue``. + + There is also a numerical interface to colors, which takes an integer from + 0-15:: + + term.color(5) + 'Hello' + term.normal + term.on_color(3) + 'Hello' + term.normal + + term.color(5)('Hello') + term.on_color(3)('Hello') + + If some color is unsupported (for instance, if only the normal colors are + available, not the bright ones), trying to use it will, on most terminals, have + no effect: the foreground and background colors will stay as they were. You can + get fancy and do different things depending on the supported colors by checking + `number_of_colors`_. + + .. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors + + Compound Formatting + ------------------- + + If you want to do lots of crazy formatting all at once, you can just mash it + all together:: + + from blessings import Terminal + + term = Terminal() + print term.bold_underline_green_on_yellow + 'Woo' + term.normal + + Or you can use your newly coined attribute as a wrapper, which implicitly sets + everything back to normal afterward:: + + print term.bold_underline_green_on_yellow('Woo') + + This compound notation comes in handy if you want to allow users to customize + the formatting of your app: just have them pass in a format specifier like + "bold_green" on the command line, and do a quick ``getattr(term, + that_option)('Your text')`` when you do your formatting. + + I'd be remiss if I didn't credit couleur_, where I probably got the idea for + all this mashing. + + .. _couleur: http://pypi.python.org/pypi/couleur + + Parametrized Capabilities + ------------------------- + + Some capabilities take parameters. Rather than making you dig up ``tparm()`` + all the time, we simply make such capabilities into callable strings. You can + pass the parameters right in:: + + from blessings import Terminal + + term = Terminal() + print term.move(10, 1) + + Here are some of interest: + + ``move`` + Position the cursor elsewhere. Parameters are y coordinate, then x + coordinate. + ``move_x`` + Move the cursor to the given column. + ``move_y`` + Move the cursor to the given row. + + You can also reference any other string-returning capability listed on the + `terminfo man page`_ by its name under the "Cap-name" column. + + .. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/ + + Height and Width + ---------------- + + It's simple to get the height and width of the terminal, in characters:: + + from blessings import Terminal + + term = Terminal() + height = term.height + width = term.width + + These are newly updated each time you ask for them, so they're safe to use from + SIGWINCH handlers. + + Temporary Repositioning + ----------------------- + + Sometimes you need to flit to a certain location, print something, and then + return: for example, when updating a progress bar at the bottom of the screen. + ``Terminal`` provides a context manager for doing this concisely:: + + from blessings import Terminal + + term = Terminal() + with term.location(0, term.height - 1): + print 'Here is the bottom.' + print 'This is back where I came from.' + + Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass + just one of them, leaving the other alone. For example... :: + + with term.location(y=10): + print 'We changed just the row.' + + If you want to reposition permanently, see ``move``, in an example above. + + Pipe Savvy + ---------- + + If your program isn't attached to a terminal, like if it's being piped to + another command or redirected to a file, all the capability attributes on + ``Terminal`` will return empty strings. You'll get a nice-looking file without + any formatting codes gumming up the works. + + If you want to override this--like if you anticipate your program being piped + through ``less -r``, which handles terminal escapes just fine--pass + ``force_styling=True`` to the ``Terminal`` constructor. + + In any case, there is an ``is_a_tty`` attribute on ``Terminal`` that lets you + see whether the attached stream seems to be a terminal. If it's false, you + might refrain from drawing progress bars and other frippery, since you're + apparently headed into a pipe:: + + from blessings import Terminal + + term = Terminal() + if term.is_a_tty: + with term.location(0, term.height - 1): + print 'Progress: [=======> ]' + print term.bold('Important stuff') + + Shopping List + ============= + + There are decades of legacy tied up in terminal interaction, so attention to + detail and behavior in edge cases make a difference. Here are some ways + Blessings has your back: + + * Uses the terminfo database so it works with any terminal type + * Provides up-to-the-moment terminal height and width, so you can respond to + terminal size changes (SIGWINCH signals). (Most other libraries query the + ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines`` + terminal capabilities, which don't update promptly, if at all.) + * Avoids making a mess if the output gets piped to a non-terminal + * Works great with standard Python string templating + * Provides convenient access to all terminal capabilities, not just a sugared + few + * Outputs to any file-like object, not just stdout + * Keeps a minimum of internal state, so you can feel free to mix and match with + calls to curses or whatever other terminal libraries you like + + Blessings does not provide... + + * Native color support on the Windows command prompt. However, it should work + when used in concert with colorama_. + + .. _colorama: http://pypi.python.org/pypi/colorama/0.2.4 + + Bugs + ==== + + Bugs or suggestions? Visit the `issue tracker`_. + + .. _`issue tracker`: https://github.com/erikrose/blessings/issues/new + + License + ======= + + Blessings is under the MIT License. See the LICENSE file. + + Version History + =============== + + 1.3 + * Add ``number_of_colors``, which tells you how many colors the terminal + supports. + * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the + named colors can. Also, make them both fall back to the ``setf`` and + ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and + ``setab`` aren't available. + * Allow ``color`` attr to act as an unparametrized string, not just a + callable. + * Make ``height`` and ``width`` examine any passed-in stream before falling + back to stdout. (This rarely if ever affects actual behavior; it's mostly + philosophical.) + * Make caching simpler and slightly more efficient. + * Get rid of a reference cycle between Terminals and FormattingStrings. + * Update docs to reflect that terminal addressing (as in ``location()``) is + 0-based. + + 1.2 + * Added support for Python 3! We need 3.2.3 or greater, because the curses + library couldn't decide whether to accept strs or bytes before that + (http://bugs.python.org/issue10570). + * Everything that comes out of the library is now unicode. This lets us + support Python 3 without making a mess of the code, and Python 2 should + continue to work unless you were testing types (and badly). Please file a + bug if this causes trouble for you. + * Changed to the MIT License for better world domination. + * Added Sphinx docs. + + 1.1 + * Added nicely named attributes for colors. + * Introduced compound formatting. + * Added wrapper behavior for styling and colors. + * Let you force capabilities to be non-empty, even if the output stream is + not a terminal. + * Added the ``is_a_tty`` attribute for telling whether the output stream is a + terminal. + * Sugared the remaining interesting string capabilities. + * Let ``location()`` operate on just an x *or* y coordinate. + + 1.0 + * Extracted Blessings from nose-progressive, my `progress-bar-having, + traceback-shortcutting, rootin', tootin' testrunner`_. It provided the + tootin' functionality. + + .. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/ + +Keywords: terminal,tty,curses,ncurses,formatting,style,color,console +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: Console :: Curses +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: User Interfaces +Classifier: Topic :: Terminals diff --git a/python/blessings/README.rst b/python/blessings/README.rst new file mode 100644 index 000000000..59983de86 --- /dev/null +++ b/python/blessings/README.rst @@ -0,0 +1,399 @@ +========= +Blessings +========= + +Coding with Blessings looks like this... :: + + from blessings import Terminal + + t = Terminal() + + print t.bold('Hi there!') + print t.bold_red_on_bright_green('It hurts my eyes!') + + with t.location(0, t.height - 1): + print 'This is at the bottom.' + +Or, for byte-level control, you can drop down and play with raw terminal +capabilities:: + + print '{t.bold}All your {t.red}bold and red base{t.normal}'.format(t=t) + print t.wingo(2) + +The Pitch +========= + +Blessings lifts several of curses_' limiting assumptions, and it makes your +code pretty, too: + +* Use styles, color, and maybe a little positioning without clearing the whole + screen first. +* Leave more than one screenful of scrollback in the buffer after your program + exits, like a well-behaved command-line app should. +* Get rid of all those noisy, C-like calls to ``tigetstr`` and ``tparm``, so + your code doesn't get crowded out by terminal bookkeeping. +* Act intelligently when somebody redirects your output to a file, omitting the + terminal control codes the user doesn't want to see (optional). + +.. _curses: http://docs.python.org/library/curses.html + +Before And After +---------------- + +Without Blessings, this is how you'd print some underlined text at the bottom +of the screen:: + + from curses import tigetstr, setupterm, tparm + from fcntl import ioctl + from os import isatty + import struct + import sys + from termios import TIOCGWINSZ + + # If we want to tolerate having our output piped to other commands or + # files without crashing, we need to do all this branching: + if hasattr(sys.stdout, 'fileno') and isatty(sys.stdout.fileno()): + setupterm() + sc = tigetstr('sc') + cup = tigetstr('cup') + rc = tigetstr('rc') + underline = tigetstr('smul') + normal = tigetstr('sgr0') + else: + sc = cup = rc = underline = normal = '' + print sc # Save cursor position. + if cup: + # tigetnum('lines') doesn't always update promptly, hence this: + height = struct.unpack('hhhh', ioctl(0, TIOCGWINSZ, '\000' * 8))[0] + print tparm(cup, height - 1, 0) # Move cursor to bottom. + print 'This is {under}underlined{normal}!'.format(under=underline, + normal=normal) + print rc # Restore cursor position. + +Phew! That was long and full of incomprehensible trash! Let's try it again, +this time with Blessings:: + + from blessings import Terminal + + term = Terminal() + with term.location(0, term.height - 1): + print 'This is', term.underline('pretty!') + +Much better. + +What It Provides +================ + +Blessings provides just one top-level object: ``Terminal``. Instantiating a +``Terminal`` figures out whether you're on a terminal at all and, if so, does +any necessary terminal setup. After that, you can proceed to ask it all sorts +of things about the terminal. Terminal terminal terminal. + +Simple Formatting +----------------- + +Lots of handy formatting codes ("capabilities" in low-level parlance) are +available as attributes on a ``Terminal``. For example:: + + from blessings import Terminal + + term = Terminal() + print 'I am ' + term.bold + 'bold' + term.normal + '!' + +You can also use them as wrappers so you don't have to say ``normal`` +afterward:: + + print 'I am', term.bold('bold') + '!' + +Or, if you want fine-grained control while maintaining some semblance of +brevity, you can combine it with Python's string formatting, which makes +attributes easy to access:: + + print 'All your {t.red}base {t.underline}are belong to us{t.normal}'.format(t=term) + +Simple capabilities of interest include... + +* ``bold`` +* ``reverse`` +* ``underline`` +* ``no_underline`` (which turns off underlining) +* ``blink`` +* ``normal`` (which turns off everything, even colors) +* ``clear_eol`` (clear to the end of the line) +* ``clear_bol`` (clear to beginning of line) +* ``clear_eos`` (clear to end of screen) + +Here are a few more which are less likely to work on all terminals: + +* ``dim`` +* ``italic`` and ``no_italic`` +* ``shadow`` and ``no_shadow`` +* ``standout`` and ``no_standout`` +* ``subscript`` and ``no_subscript`` +* ``superscript`` and ``no_superscript`` +* ``flash`` (which flashes the screen once) + +Note that, while the inverse of ``underline`` is ``no_underline``, the only way +to turn off ``bold`` or ``reverse`` is ``normal``, which also cancels any +custom colors. This is because there's no way to tell the terminal to undo +certain pieces of formatting, even at the lowest level. + +You might notice that the above aren't the typical incomprehensible terminfo +capability names; we alias a few of the harder-to-remember ones for +readability. However, you aren't limited to these: you can reference any +string-returning capability listed on the `terminfo man page`_ by the name +under the "Cap-name" column: for example, ``term.rum``. + +.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/ + +Color +----- + +16 colors, both foreground and background, are available as easy-to-remember +attributes:: + + from blessings import Terminal + + term = Terminal() + print term.red + term.on_green + 'Red on green? Ick!' + term.normal + print term.bright_red + term.on_bright_blue + 'This is even worse!' + term.normal + +You can also call them as wrappers, which sets everything back to normal at the +end:: + + print term.red_on_green('Red on green? Ick!') + print term.yellow('I can barely see it.') + +The available colors are... + +* ``black`` +* ``red`` +* ``green`` +* ``yellow`` +* ``blue`` +* ``magenta`` +* ``cyan`` +* ``white`` + +You can set the background color instead of the foreground by prepending +``on_``, as in ``on_blue``. There is also a ``bright`` version of each color: +for example, ``on_bright_blue``. + +There is also a numerical interface to colors, which takes an integer from +0-15:: + + term.color(5) + 'Hello' + term.normal + term.on_color(3) + 'Hello' + term.normal + + term.color(5)('Hello') + term.on_color(3)('Hello') + +If some color is unsupported (for instance, if only the normal colors are +available, not the bright ones), trying to use it will, on most terminals, have +no effect: the foreground and background colors will stay as they were. You can +get fancy and do different things depending on the supported colors by checking +`number_of_colors`_. + +.. _`number_of_colors`: http://packages.python.org/blessings/#blessings.Terminal.number_of_colors + +Compound Formatting +------------------- + +If you want to do lots of crazy formatting all at once, you can just mash it +all together:: + + from blessings import Terminal + + term = Terminal() + print term.bold_underline_green_on_yellow + 'Woo' + term.normal + +Or you can use your newly coined attribute as a wrapper, which implicitly sets +everything back to normal afterward:: + + print term.bold_underline_green_on_yellow('Woo') + +This compound notation comes in handy if you want to allow users to customize +the formatting of your app: just have them pass in a format specifier like +"bold_green" on the command line, and do a quick ``getattr(term, +that_option)('Your text')`` when you do your formatting. + +I'd be remiss if I didn't credit couleur_, where I probably got the idea for +all this mashing. + +.. _couleur: http://pypi.python.org/pypi/couleur + +Parametrized Capabilities +------------------------- + +Some capabilities take parameters. Rather than making you dig up ``tparm()`` +all the time, we simply make such capabilities into callable strings. You can +pass the parameters right in:: + + from blessings import Terminal + + term = Terminal() + print term.move(10, 1) + +Here are some of interest: + +``move`` + Position the cursor elsewhere. Parameters are y coordinate, then x + coordinate. +``move_x`` + Move the cursor to the given column. +``move_y`` + Move the cursor to the given row. + +You can also reference any other string-returning capability listed on the +`terminfo man page`_ by its name under the "Cap-name" column. + +.. _`terminfo man page`: http://www.manpagez.com/man/5/terminfo/ + +Height and Width +---------------- + +It's simple to get the height and width of the terminal, in characters:: + + from blessings import Terminal + + term = Terminal() + height = term.height + width = term.width + +These are newly updated each time you ask for them, so they're safe to use from +SIGWINCH handlers. + +Temporary Repositioning +----------------------- + +Sometimes you need to flit to a certain location, print something, and then +return: for example, when updating a progress bar at the bottom of the screen. +``Terminal`` provides a context manager for doing this concisely:: + + from blessings import Terminal + + term = Terminal() + with term.location(0, term.height - 1): + print 'Here is the bottom.' + print 'This is back where I came from.' + +Parameters to ``location()`` are ``x`` and then ``y``, but you can also pass +just one of them, leaving the other alone. For example... :: + + with term.location(y=10): + print 'We changed just the row.' + +If you want to reposition permanently, see ``move``, in an example above. + +Pipe Savvy +---------- + +If your program isn't attached to a terminal, like if it's being piped to +another command or redirected to a file, all the capability attributes on +``Terminal`` will return empty strings. You'll get a nice-looking file without +any formatting codes gumming up the works. + +If you want to override this--like if you anticipate your program being piped +through ``less -r``, which handles terminal escapes just fine--pass +``force_styling=True`` to the ``Terminal`` constructor. + +In any case, there is an ``is_a_tty`` attribute on ``Terminal`` that lets you +see whether the attached stream seems to be a terminal. If it's false, you +might refrain from drawing progress bars and other frippery, since you're +apparently headed into a pipe:: + + from blessings import Terminal + + term = Terminal() + if term.is_a_tty: + with term.location(0, term.height - 1): + print 'Progress: [=======> ]' + print term.bold('Important stuff') + +Shopping List +============= + +There are decades of legacy tied up in terminal interaction, so attention to +detail and behavior in edge cases make a difference. Here are some ways +Blessings has your back: + +* Uses the terminfo database so it works with any terminal type +* Provides up-to-the-moment terminal height and width, so you can respond to + terminal size changes (SIGWINCH signals). (Most other libraries query the + ``COLUMNS`` and ``LINES`` environment variables or the ``cols`` or ``lines`` + terminal capabilities, which don't update promptly, if at all.) +* Avoids making a mess if the output gets piped to a non-terminal +* Works great with standard Python string templating +* Provides convenient access to all terminal capabilities, not just a sugared + few +* Outputs to any file-like object, not just stdout +* Keeps a minimum of internal state, so you can feel free to mix and match with + calls to curses or whatever other terminal libraries you like + +Blessings does not provide... + +* Native color support on the Windows command prompt. However, it should work + when used in concert with colorama_. + +.. _colorama: http://pypi.python.org/pypi/colorama/0.2.4 + +Bugs +==== + +Bugs or suggestions? Visit the `issue tracker`_. + +.. _`issue tracker`: https://github.com/erikrose/blessings/issues/new + +License +======= + +Blessings is under the MIT License. See the LICENSE file. + +Version History +=============== + +1.3 + * Add ``number_of_colors``, which tells you how many colors the terminal + supports. + * Made ``color(n)`` and ``on_color(n)`` callable to wrap a string, like the + named colors can. Also, make them both fall back to the ``setf`` and + ``setb`` capabilities (like the named colors do) if the ANSI ``setaf`` and + ``setab`` aren't available. + * Allow ``color`` attr to act as an unparametrized string, not just a + callable. + * Make ``height`` and ``width`` examine any passed-in stream before falling + back to stdout. (This rarely if ever affects actual behavior; it's mostly + philosophical.) + * Make caching simpler and slightly more efficient. + * Get rid of a reference cycle between Terminals and FormattingStrings. + * Update docs to reflect that terminal addressing (as in ``location()``) is + 0-based. + +1.2 + * Added support for Python 3! We need 3.2.3 or greater, because the curses + library couldn't decide whether to accept strs or bytes before that + (http://bugs.python.org/issue10570). + * Everything that comes out of the library is now unicode. This lets us + support Python 3 without making a mess of the code, and Python 2 should + continue to work unless you were testing types (and badly). Please file a + bug if this causes trouble for you. + * Changed to the MIT License for better world domination. + * Added Sphinx docs. + +1.1 + * Added nicely named attributes for colors. + * Introduced compound formatting. + * Added wrapper behavior for styling and colors. + * Let you force capabilities to be non-empty, even if the output stream is + not a terminal. + * Added the ``is_a_tty`` attribute for telling whether the output stream is a + terminal. + * Sugared the remaining interesting string capabilities. + * Let ``location()`` operate on just an x *or* y coordinate. + +1.0 + * Extracted Blessings from nose-progressive, my `progress-bar-having, + traceback-shortcutting, rootin', tootin' testrunner`_. It provided the + tootin' functionality. + +.. _`progress-bar-having, traceback-shortcutting, rootin', tootin' testrunner`: http://pypi.python.org/pypi/nose-progressive/ diff --git a/python/blessings/blessings/__init__.py b/python/blessings/blessings/__init__.py new file mode 100644 index 000000000..081288ba6 --- /dev/null +++ b/python/blessings/blessings/__init__.py @@ -0,0 +1,450 @@ +from collections import defaultdict +import curses +from curses import tigetstr, tigetnum, setupterm, tparm +from fcntl import ioctl +try: + from io import UnsupportedOperation as IOUnsupportedOperation +except ImportError: + class IOUnsupportedOperation(Exception): + """A dummy exception to take the place of Python 3's ``io.UnsupportedOperation`` in Python 2""" + pass +import os +from os import isatty, environ +from platform import python_version_tuple +import struct +import sys +from termios import TIOCGWINSZ + + +if ('3', '0', '0') <= python_version_tuple() < ('3', '2', '2+'): # Good till 3.2.10 + # Python 3.x < 3.2.3 has a bug in which tparm() erroneously takes a string. + raise ImportError('Blessings needs Python 3.2.3 or greater for Python 3 ' + 'support due to http://bugs.python.org/issue10570.') + + +__all__ = ['Terminal'] + + +class Terminal(object): + """An abstraction around terminal capabilities + + Unlike curses, this doesn't require clearing the screen before doing + anything, and it's friendlier to use. It keeps the endless calls to + ``tigetstr()`` and ``tparm()`` out of your code, and it acts intelligently + when somebody pipes your output to a non-terminal. + + Instance attributes: + + ``stream`` + The stream the terminal outputs to. It's convenient to pass the stream + around with the terminal; it's almost always needed when the terminal + is and saves sticking lots of extra args on client functions in + practice. + ``is_a_tty`` + Whether ``stream`` appears to be a terminal. You can examine this value + to decide whether to draw progress bars or other frippery. + + """ + def __init__(self, kind=None, stream=None, force_styling=False): + """Initialize the terminal. + + If ``stream`` is not a tty, I will default to returning an empty + Unicode string for all capability values, so things like piping your + output to a file won't strew escape sequences all over the place. The + ``ls`` command sets a precedent for this: it defaults to columnar + output when being sent to a tty and one-item-per-line when not. + + :arg kind: A terminal string as taken by ``setupterm()``. Defaults to + the value of the ``TERM`` environment variable. + :arg stream: A file-like object representing the terminal. Defaults to + the original value of stdout, like ``curses.initscr()`` does. + :arg force_styling: Whether to force the emission of capabilities, even + if we don't seem to be in a terminal. This comes in handy if users + are trying to pipe your output through something like ``less -r``, + which supports terminal codes just fine but doesn't appear itself + to be a terminal. Just expose a command-line option, and set + ``force_styling`` based on it. Terminal initialization sequences + will be sent to ``stream`` if it has a file descriptor and to + ``sys.__stdout__`` otherwise. (``setupterm()`` demands to send them + somewhere, and stdout is probably where the output is ultimately + headed. If not, stderr is probably bound to the same terminal.) + + """ + if stream is None: + stream = sys.__stdout__ + try: + stream_descriptor = (stream.fileno() if hasattr(stream, 'fileno') + and callable(stream.fileno) + else None) + except IOUnsupportedOperation: + stream_descriptor = None + + self.is_a_tty = stream_descriptor is not None and isatty(stream_descriptor) + self._does_styling = self.is_a_tty or force_styling + + # The desciptor to direct terminal initialization sequences to. + # sys.__stdout__ seems to always have a descriptor of 1, even if output + # is redirected. + self._init_descriptor = (sys.__stdout__.fileno() + if stream_descriptor is None + else stream_descriptor) + if self._does_styling: + # Make things like tigetstr() work. Explicit args make setupterm() + # work even when -s is passed to nosetests. Lean toward sending + # init sequences to the stream if it has a file descriptor, and + # send them to stdout as a fallback, since they have to go + # somewhere. + setupterm(kind or environ.get('TERM', 'unknown'), + self._init_descriptor) + + self.stream = stream + + # Sugary names for commonly-used capabilities, intended to help avoid trips + # to the terminfo man page and comments in your code: + _sugar = dict( + # Don't use "on" or "bright" as an underscore-separated chunk in any of + # these (e.g. on_cology or rock_on) so we don't interfere with + # __getattr__. + save='sc', + restore='rc', + + clear_eol='el', + clear_bol='el1', + clear_eos='ed', + position='cup', # deprecated + move='cup', + move_x='hpa', + move_y='vpa', + + reset_colors='op', # oc doesn't work on my OS X terminal. + + normal='sgr0', + reverse='rev', + # 'bold' is just 'bold'. Similarly... + # blink + # dim + # flash + italic='sitm', + no_italic='ritm', + shadow='sshm', + no_shadow='rshm', + standout='smso', + no_standout='rmso', + subscript='ssubm', + no_subscript='rsubm', + superscript='ssupm', + no_superscript='rsupm', + underline='smul', + no_underline='rmul') + + def __getattr__(self, attr): + """Return parametrized terminal capabilities, like bold. + + For example, you can say ``term.bold`` to get the string that turns on + bold formatting and ``term.normal`` to get the string that turns it off + again. Or you can take a shortcut: ``term.bold('hi')`` bolds its + argument and sets everything to normal afterward. You can even combine + things: ``term.bold_underline_red_on_bright_green('yowzers!')``. + + For a parametrized capability like ``cup``, pass the parameters too: + ``some_term.cup(line, column)``. + + ``man terminfo`` for a complete list of capabilities. + + Return values are always Unicode. + + """ + resolution = self._resolve_formatter(attr) if self._does_styling else NullCallableString() + setattr(self, attr, resolution) # Cache capability codes. + return resolution + + @property + def height(self): + """The height of the terminal in characters + + If no stream or a stream not representing a terminal was passed in at + construction, return the dimension of the controlling terminal so + piping to things that eventually display on the terminal (like ``less + -R``) work. If a stream representing a terminal was passed in, return + the dimensions of that terminal. If there somehow is no controlling + terminal, return ``None``. (Thus, you should check that ``is_a_tty`` is + true before doing any math on the result.) + + """ + return self._height_and_width()[0] + + @property + def width(self): + """The width of the terminal in characters + + See ``height()`` for some corner cases. + + """ + return self._height_and_width()[1] + + def _height_and_width(self): + """Return a tuple of (terminal height, terminal width).""" + # tigetnum('lines') and tigetnum('cols') update only if we call + # setupterm() again. + for descriptor in self._init_descriptor, sys.__stdout__: + try: + return struct.unpack('hhhh', ioctl(descriptor, TIOCGWINSZ, '\000' * 8))[0:2] + except IOError: + pass + return None, None # Should never get here + + def location(self, x=None, y=None): + """Return a context manager for temporarily moving the cursor. + + Move the cursor to a certain position on entry, let you print stuff + there, then return the cursor to its original position:: + + term = Terminal() + with term.location(2, 5): + print 'Hello, world!' + for x in xrange(10): + print 'I can do it %i times!' % x + + Specify ``x`` to move to a certain column, ``y`` to move to a certain + row, or both. + + """ + return Location(self, x, y) + + @property + def color(self): + """Return a capability that sets the foreground color. + + The capability is unparametrized until called and passed a number + (0-15), at which point it returns another string which represents a + specific color change. This second string can further be called to + color a piece of text and set everything back to normal afterward. + + :arg num: The number, 0-15, of the color + + """ + return ParametrizingString(self._foreground_color, self.normal) + + @property + def on_color(self): + """Return a capability that sets the background color. + + See ``color()``. + + """ + return ParametrizingString(self._background_color, self.normal) + + @property + def number_of_colors(self): + """Return the number of colors the terminal supports. + + Common values are 0, 8, 16, 88, and 256. + + Though the underlying capability returns -1 when there is no color + support, we return 0. This lets you test more Pythonically:: + + if term.number_of_colors: + ... + + We also return 0 if the terminal won't tell us how many colors it + supports, which I think is rare. + + """ + # This is actually the only remotely useful numeric capability. We + # don't name it after the underlying capability, because we deviate + # slightly from its behavior, and we might someday wish to give direct + # access to it. + colors = tigetnum('colors') # Returns -1 if no color support, -2 if no such cap. + #self.__dict__['colors'] = ret # Cache it. It's not changing. (Doesn't work.) + return colors if colors >= 0 else 0 + + def _resolve_formatter(self, attr): + """Resolve a sugary or plain capability name, color, or compound formatting function name into a callable capability.""" + if attr in COLORS: + return self._resolve_color(attr) + elif attr in COMPOUNDABLES: + # Bold, underline, or something that takes no parameters + return self._formatting_string(self._resolve_capability(attr)) + else: + formatters = split_into_formatters(attr) + if all(f in COMPOUNDABLES for f in formatters): + # It's a compound formatter, like "bold_green_on_red". Future + # optimization: combine all formatting into a single escape + # sequence. + return self._formatting_string( + u''.join(self._resolve_formatter(s) for s in formatters)) + else: + return ParametrizingString(self._resolve_capability(attr)) + + def _resolve_capability(self, atom): + """Return a terminal code for a capname or a sugary name, or an empty Unicode. + + The return value is always Unicode, because otherwise it is clumsy + (especially in Python 3) to concatenate with real (Unicode) strings. + + """ + code = tigetstr(self._sugar.get(atom, atom)) + if code: + # We can encode escape sequences as UTF-8 because they never + # contain chars > 127, and UTF-8 never changes anything within that + # range.. + return code.decode('utf-8') + return u'' + + def _resolve_color(self, color): + """Resolve a color like red or on_bright_green into a callable capability.""" + # TODO: Does curses automatically exchange red and blue and cyan and + # yellow when a terminal supports setf/setb rather than setaf/setab? + # I'll be blasted if I can find any documentation. The following + # assumes it does. + color_cap = (self._background_color if 'on_' in color else + self._foreground_color) + # curses constants go up to only 7, so add an offset to get at the + # bright colors at 8-15: + offset = 8 if 'bright_' in color else 0 + base_color = color.rsplit('_', 1)[-1] + return self._formatting_string( + color_cap(getattr(curses, 'COLOR_' + base_color.upper()) + offset)) + + @property + def _foreground_color(self): + return self.setaf or self.setf + + @property + def _background_color(self): + return self.setab or self.setb + + def _formatting_string(self, formatting): + """Return a new ``FormattingString`` which implicitly receives my notion of "normal".""" + return FormattingString(formatting, self.normal) + + +def derivative_colors(colors): + """Return the names of valid color variants, given the base colors.""" + return set([('on_' + c) for c in colors] + + [('bright_' + c) for c in colors] + + [('on_bright_' + c) for c in colors]) + + +COLORS = set(['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white']) +COLORS.update(derivative_colors(COLORS)) +COMPOUNDABLES = (COLORS | + set(['bold', 'underline', 'reverse', 'blink', 'dim', 'italic', + 'shadow', 'standout', 'subscript', 'superscript'])) + + +class ParametrizingString(unicode): + """A Unicode string which can be called to parametrize it as a terminal capability""" + def __new__(cls, formatting, normal=None): + """Instantiate. + + :arg normal: If non-None, indicates that, once parametrized, this can + be used as a ``FormattingString``. The value is used as the + "normal" capability. + + """ + new = unicode.__new__(cls, formatting) + new._normal = normal + return new + + def __call__(self, *args): + try: + # Re-encode the cap, because tparm() takes a bytestring in Python + # 3. However, appear to be a plain Unicode string otherwise so + # concats work. + parametrized = tparm(self.encode('utf-8'), *args).decode('utf-8') + return (parametrized if self._normal is None else + FormattingString(parametrized, self._normal)) + except curses.error: + # Catch "must call (at least) setupterm() first" errors, as when + # running simply `nosetests` (without progressive) on nose- + # progressive. Perhaps the terminal has gone away between calling + # tigetstr and calling tparm. + return u'' + except TypeError: + # If the first non-int (i.e. incorrect) arg was a string, suggest + # something intelligent: + if len(args) == 1 and isinstance(args[0], basestring): + raise TypeError( + 'A native or nonexistent capability template received ' + '%r when it was expecting ints. You probably misspelled a ' + 'formatting call like bright_red_on_white(...).' % args) + else: + # Somebody passed a non-string; I don't feel confident + # guessing what they were trying to do. + raise + + +class FormattingString(unicode): + """A Unicode string which can be called upon a piece of text to wrap it in formatting""" + def __new__(cls, formatting, normal): + new = unicode.__new__(cls, formatting) + new._normal = normal + return new + + def __call__(self, text): + """Return a new string that is ``text`` formatted with my contents. + + At the beginning of the string, I prepend the formatting that is my + contents. At the end, I append the "normal" sequence to set everything + back to defaults. The return value is always a Unicode. + + """ + return self + text + self._normal + + +class NullCallableString(unicode): + """A dummy class to stand in for ``FormattingString`` and ``ParametrizingString`` + + A callable bytestring that returns an empty Unicode when called with an int + and the arg otherwise. We use this when there is no tty and so all + capabilities are blank. + + """ + def __new__(cls): + new = unicode.__new__(cls, u'') + return new + + def __call__(self, arg): + if isinstance(arg, int): + return u'' + return arg # TODO: Force even strs in Python 2.x to be unicodes? Nah. How would I know what encoding to use to convert it? + + +def split_into_formatters(compound): + """Split a possibly compound format string into segments. + + >>> split_into_formatters('bold_underline_bright_blue_on_red') + ['bold', 'underline', 'bright_blue', 'on_red'] + + """ + merged_segs = [] + # These occur only as prefixes, so they can always be merged: + mergeable_prefixes = ['on', 'bright', 'on_bright'] + for s in compound.split('_'): + if merged_segs and merged_segs[-1] in mergeable_prefixes: + merged_segs[-1] += '_' + s + else: + merged_segs.append(s) + return merged_segs + + +class Location(object): + """Context manager for temporarily moving the cursor""" + def __init__(self, term, x=None, y=None): + self.x, self.y = x, y + self.term = term + + def __enter__(self): + """Save position and move to the requested column, row, or both.""" + self.term.stream.write(self.term.save) # save position + if self.x and self.y: + self.term.stream.write(self.term.move(self.y, self.x)) + elif self.x: + self.term.stream.write(self.term.move_x(self.x)) + elif self.y: + self.term.stream.write(self.term.move_y(self.y)) + + def __exit__(self, type, value, tb): + """Restore original cursor position.""" + self.term.stream.write(self.term.restore) diff --git a/python/blessings/blessings/tests.py b/python/blessings/blessings/tests.py new file mode 100644 index 000000000..a02a3924a --- /dev/null +++ b/python/blessings/blessings/tests.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +"""Automated tests (as opposed to human-verified test patterns) + +It was tempting to mock out curses to get predictable output from ``tigetstr``, +but there are concrete integration-testing benefits in not doing so. For +instance, ``tigetstr`` changed its return type in Python 3.2.3. So instead, we +simply create all our test ``Terminal`` instances with a known terminal type. +All we require from the host machine is that a standard terminfo definition of +xterm-256color exists. + +""" +from __future__ import with_statement # Make 2.5-compatible +from curses import tigetstr, tparm +from functools import partial +from StringIO import StringIO +import sys + +from nose import SkipTest +from nose.tools import eq_ + +# This tests that __all__ is correct, since we use below everything that should +# be imported: +from blessings import * + + +TestTerminal = partial(Terminal, kind='xterm-256color') + + +def unicode_cap(cap): + """Return the result of ``tigetstr`` except as Unicode.""" + return tigetstr(cap).decode('utf-8') + + +def unicode_parm(cap, *parms): + """Return the result of ``tparm(tigetstr())`` except as Unicode.""" + return tparm(tigetstr(cap), *parms).decode('utf-8') + + +def test_capability(): + """Check that a capability lookup works. + + Also test that Terminal grabs a reasonable default stream. This test + assumes it will be run from a tty. + + """ + t = TestTerminal() + sc = unicode_cap('sc') + eq_(t.save, sc) + eq_(t.save, sc) # Make sure caching doesn't screw it up. + + +def test_capability_without_tty(): + """Assert capability templates are '' when stream is not a tty.""" + t = TestTerminal(stream=StringIO()) + eq_(t.save, u'') + eq_(t.red, u'') + + +def test_capability_with_forced_tty(): + """If we force styling, capabilities had better not (generally) be empty.""" + t = TestTerminal(stream=StringIO(), force_styling=True) + eq_(t.save, unicode_cap('sc')) + + +def test_parametrization(): + """Test parametrizing a capability.""" + eq_(TestTerminal().cup(3, 4), unicode_parm('cup', 3, 4)) + + +def height_and_width(): + """Assert that ``height_and_width()`` returns ints.""" + t = TestTerminal() # kind shouldn't matter. + assert isinstance(int, t.height) + assert isinstance(int, t.width) + + +def test_stream_attr(): + """Make sure Terminal exposes a ``stream`` attribute that defaults to something sane.""" + eq_(Terminal().stream, sys.__stdout__) + + +def test_location(): + """Make sure ``location()`` does what it claims.""" + t = TestTerminal(stream=StringIO(), force_styling=True) + + with t.location(3, 4): + t.stream.write(u'hi') + + eq_(t.stream.getvalue(), unicode_cap('sc') + + unicode_parm('cup', 4, 3) + + u'hi' + + unicode_cap('rc')) + + +def test_horizontal_location(): + """Make sure we can move the cursor horizontally without changing rows.""" + t = TestTerminal(stream=StringIO(), force_styling=True) + with t.location(x=5): + pass + eq_(t.stream.getvalue(), unicode_cap('sc') + + unicode_parm('hpa', 5) + + unicode_cap('rc')) + + +def test_null_fileno(): + """Make sure ``Terminal`` works when ``fileno`` is ``None``. + + This simulates piping output to another program. + + """ + out = StringIO() + out.fileno = None + t = TestTerminal(stream=out) + eq_(t.save, u'') + + +def test_mnemonic_colors(): + """Make sure color shortcuts work.""" + def color(num): + return unicode_parm('setaf', num) + + def on_color(num): + return unicode_parm('setab', num) + + # Avoid testing red, blue, yellow, and cyan, since they might someday + # change depending on terminal type. + t = TestTerminal() + eq_(t.white, color(7)) + eq_(t.green, color(2)) # Make sure it's different than white. + eq_(t.on_black, on_color(0)) + eq_(t.on_green, on_color(2)) + eq_(t.bright_black, color(8)) + eq_(t.bright_green, color(10)) + eq_(t.on_bright_black, on_color(8)) + eq_(t.on_bright_green, on_color(10)) + + +def test_callable_numeric_colors(): + """``color(n)`` should return a formatting wrapper.""" + t = TestTerminal() + eq_(t.color(5)('smoo'), t.magenta + 'smoo' + t.normal) + eq_(t.color(5)('smoo'), t.color(5) + 'smoo' + t.normal) + eq_(t.on_color(2)('smoo'), t.on_green + 'smoo' + t.normal) + eq_(t.on_color(2)('smoo'), t.on_color(2) + 'smoo' + t.normal) + + +def test_null_callable_numeric_colors(): + """``color(n)`` should be a no-op on null terminals.""" + t = TestTerminal(stream=StringIO()) + eq_(t.color(5)('smoo'), 'smoo') + eq_(t.on_color(6)('smoo'), 'smoo') + + +def test_naked_color_cap(): + """``term.color`` should return a stringlike capability.""" + t = TestTerminal() + eq_(t.color + '', t.setaf + '') + + +def test_number_of_colors_without_tty(): + """``number_of_colors`` should return 0 when there's no tty.""" + # Hypothesis: once setupterm() has run and decided the tty supports 256 + # colors, it never changes its mind. + raise SkipTest + + t = TestTerminal(stream=StringIO()) + eq_(t.number_of_colors, 0) + t = TestTerminal(stream=StringIO(), force_styling=True) + eq_(t.number_of_colors, 0) + + +def test_number_of_colors_with_tty(): + """``number_of_colors`` should work.""" + t = TestTerminal() + eq_(t.number_of_colors, 256) + + +def test_formatting_functions(): + """Test crazy-ass formatting wrappers, both simple and compound.""" + t = TestTerminal() + # By now, it should be safe to use sugared attributes. Other tests test those. + eq_(t.bold(u'hi'), t.bold + u'hi' + t.normal) + eq_(t.green('hi'), t.green + u'hi' + t.normal) # Plain strs for Python 2.x + # Test some non-ASCII chars, probably not necessary: + eq_(t.bold_green(u'boö'), t.bold + t.green + u'boö' + t.normal) + eq_(t.bold_underline_green_on_red('boo'), + t.bold + t.underline + t.green + t.on_red + u'boo' + t.normal) + # Don't spell things like this: + eq_(t.on_bright_red_bold_bright_green_underline('meh'), + t.on_bright_red + t.bold + t.bright_green + t.underline + u'meh' + t.normal) + + +def test_formatting_functions_without_tty(): + """Test crazy-ass formatting wrappers when there's no tty.""" + t = TestTerminal(stream=StringIO()) + eq_(t.bold(u'hi'), u'hi') + eq_(t.green('hi'), u'hi') + # Test non-ASCII chars, no longer really necessary: + eq_(t.bold_green(u'boö'), u'boö') + eq_(t.bold_underline_green_on_red('loo'), u'loo') + eq_(t.on_bright_red_bold_bright_green_underline('meh'), u'meh') + + +def test_nice_formatting_errors(): + """Make sure you get nice hints if you misspell a formatting wrapper.""" + t = TestTerminal() + try: + t.bold_misspelled('hey') + except TypeError, e: + assert 'probably misspelled' in e.args[0] + + try: + t.bold_misspelled(u'hey') # unicode + except TypeError, e: + assert 'probably misspelled' in e.args[0] + + try: + t.bold_misspelled(None) # an arbitrary non-string + except TypeError, e: + assert 'probably misspelled' not in e.args[0] + + try: + t.bold_misspelled('a', 'b') # >1 string arg + except TypeError, e: + assert 'probably misspelled' not in e.args[0] + + +def test_init_descriptor_always_initted(): + """We should be able to get a height and width even on no-tty Terminals.""" + t = Terminal(stream=StringIO()) + eq_(type(t.height), int) diff --git a/python/blessings/setup.cfg b/python/blessings/setup.cfg new file mode 100644 index 000000000..861a9f554 --- /dev/null +++ b/python/blessings/setup.cfg @@ -0,0 +1,5 @@ +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/python/blessings/setup.py b/python/blessings/setup.py new file mode 100644 index 000000000..6af55452d --- /dev/null +++ b/python/blessings/setup.py @@ -0,0 +1,42 @@ +import sys + +from setuptools import setup, find_packages + + +extra_setup = {} +if sys.version_info >= (3,): + extra_setup['use_2to3'] = True + +setup( + name='blessings', + version='1.3', + description='A thin, practical wrapper around terminal formatting, positioning, and more', + long_description=open('README.rst').read(), + author='Erik Rose', + author_email='erikrose@grinchcentral.com', + license='MIT', + packages=find_packages(exclude=['ez_setup']), + tests_require=['Nose'], + url='https://github.com/erikrose/blessings', + include_package_data=True, + classifiers=[ + 'Intended Audience :: Developers', + 'Natural Language :: English', + 'Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Environment :: Console :: Curses', + 'License :: OSI Approved :: MIT License', + 'Operating System :: POSIX', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.2', + 'Topic :: Software Development :: Libraries', + 'Topic :: Software Development :: User Interfaces', + 'Topic :: Terminals' + ], + keywords=['terminal', 'tty', 'curses', 'ncurses', 'formatting', 'style', 'color', 'console'], + **extra_setup +) diff --git a/python/blessings/tox.ini b/python/blessings/tox.ini new file mode 100644 index 000000000..e1753f261 --- /dev/null +++ b/python/blessings/tox.ini @@ -0,0 +1,7 @@ +[tox] +envlist = py25, py26, py27, py32 + +[testenv] +commands = nosetests blessings +deps = nose +changedir = .tox # So Python 3 runs don't pick up incompatible, un-2to3'd source from the cwd diff --git a/python/compare-locales/compare_locales/__init__.py b/python/compare-locales/compare_locales/__init__.py new file mode 100644 index 000000000..bad265e4f --- /dev/null +++ b/python/compare-locales/compare_locales/__init__.py @@ -0,0 +1 @@ +version = "1.1" diff --git a/python/compare-locales/compare_locales/checks.py b/python/compare-locales/compare_locales/checks.py new file mode 100644 index 000000000..ee3bef03d --- /dev/null +++ b/python/compare-locales/compare_locales/checks.py @@ -0,0 +1,438 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import re +from difflib import SequenceMatcher +from xml import sax +try: + from cStringIO import StringIO +except ImportError: + from StringIO import StringIO + +from compare_locales.parser import DTDParser, PropertiesParser + + +class Checker(object): + '''Abstract class to implement checks per file type. + ''' + pattern = None + + @classmethod + def use(cls, file): + return cls.pattern.match(file.file) + + def check(self, refEnt, l10nEnt): + '''Given the reference and localized Entities, performs checks. + + This is a generator yielding tuples of + - "warning" or "error", depending on what should be reported, + - tuple of line, column info for the error within the string + - description string to be shown in the report + ''' + if True: + raise NotImplementedError("Need to subclass") + yield ("error", (0, 0), "This is an example error", "example") + + +class PrintfException(Exception): + def __init__(self, msg, pos): + self.pos = pos + self.msg = msg + + +class PropertiesChecker(Checker): + '''Tests to run on .properties files. + ''' + pattern = re.compile('.*\.properties$') + printf = re.compile(r'%(?P%|' + r'(?:(?P[1-9][0-9]*)\$)?' + r'(?P\*|[0-9]+)?' + r'(?P\.(?:\*|[0-9]+)?)?' + r'(?P[duxXosScpfg]))?') + + def check(self, refEnt, l10nEnt): + '''Test for the different variable formats. + ''' + refValue, l10nValue = refEnt.val, l10nEnt.val + refSpecs = None + # check for PluralForm.jsm stuff, should have the docs in the + # comment + if 'Localization_and_Plurals' in refEnt.pre_comment: + # For plurals, common variable pattern is #1. Try that. + pats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)', + refValue)) + if len(pats) == 0: + return + lpats = set(int(m.group(1)) for m in re.finditer('#([0-9]+)', + l10nValue)) + if pats - lpats: + yield ('warning', 0, 'not all variables used in l10n', + 'plural') + return + if lpats - pats: + yield ('error', 0, 'unreplaced variables in l10n', + 'plural') + return + return + # check for lost escapes + raw_val = l10nEnt.raw_val + for m in PropertiesParser.escape.finditer(raw_val): + if m.group('single') and \ + m.group('single') not in PropertiesParser.known_escapes: + yield ('warning', m.start(), + 'unknown escape sequence, \\' + m.group('single'), + 'escape') + try: + refSpecs = self.getPrintfSpecs(refValue) + except PrintfException: + refSpecs = [] + if refSpecs: + for t in self.checkPrintf(refSpecs, l10nValue): + yield t + return + + def checkPrintf(self, refSpecs, l10nValue): + try: + l10nSpecs = self.getPrintfSpecs(l10nValue) + except PrintfException, e: + yield ('error', e.pos, e.msg, 'printf') + return + if refSpecs != l10nSpecs: + sm = SequenceMatcher() + sm.set_seqs(refSpecs, l10nSpecs) + msgs = [] + warn = None + for action, i1, i2, j1, j2 in sm.get_opcodes(): + if action == 'equal': + continue + if action == 'delete': + # missing argument in l10n + if i2 == len(refSpecs): + # trailing specs missing, that's just a warning + warn = ', '.join('trailing argument %d `%s` missing' % + (i+1, refSpecs[i]) + for i in xrange(i1, i2)) + else: + for i in xrange(i1, i2): + msgs.append('argument %d `%s` missing' % + (i+1, refSpecs[i])) + continue + if action == 'insert': + # obsolete argument in l10n + for i in xrange(j1, j2): + msgs.append('argument %d `%s` obsolete' % + (i+1, l10nSpecs[i])) + continue + if action == 'replace': + for i, j in zip(xrange(i1, i2), xrange(j1, j2)): + msgs.append('argument %d `%s` should be `%s`' % + (j+1, l10nSpecs[j], refSpecs[i])) + if msgs: + yield ('error', 0, ', '.join(msgs), 'printf') + if warn is not None: + yield ('warning', 0, warn, 'printf') + + def getPrintfSpecs(self, val): + hasNumber = False + specs = [] + for m in self.printf.finditer(val): + if m.group("good") is None: + # found just a '%', signal an error + raise PrintfException('Found single %', m.start()) + if m.group("good") == '%': + # escaped % + continue + if ((hasNumber and m.group('number') is None) or + (not hasNumber and specs and + m.group('number') is not None)): + # mixed style, numbered and not + raise PrintfException('Mixed ordered and non-ordered args', + m.start()) + hasNumber = m.group('number') is not None + if hasNumber: + pos = int(m.group('number')) - 1 + ls = len(specs) + if pos >= ls: + # pad specs + nones = pos - ls + specs[ls:pos] = nones*[None] + specs.append(m.group('spec')) + else: + if specs[pos] is not None: + raise PrintfException('Double ordered argument %d' % + (pos+1), + m.start()) + specs[pos] = m.group('spec') + else: + specs.append(m.group('spec')) + # check for missing args + if hasNumber and not all(specs): + raise PrintfException('Ordered argument missing', 0) + return specs + + +class DTDChecker(Checker): + """Tests to run on DTD files. + + Uses xml.sax for the heavy lifting of xml parsing. + + The code tries to parse until it doesn't find any unresolved entities + anymore. If it finds one, it tries to grab the key, and adds an empty + definition to the header. + + Also checks for some CSS and number heuristics in the values. + """ + pattern = re.compile('.*\.dtd$') + + eref = re.compile('&(%s);' % DTDParser.Name) + tmpl = ''' +%s +''' + xmllist = set(('amp', 'lt', 'gt', 'apos', 'quot')) + + def __init__(self, reference): + self.reference = reference + self.__known_entities = None + + def known_entities(self, refValue): + if self.__known_entities is None and self.reference is not None: + self.__known_entities = set() + for ent in self.reference: + self.__known_entities.update(self.entities_for_value(ent.val)) + return self.__known_entities if self.__known_entities is not None \ + else self.entities_for_value(refValue) + + def entities_for_value(self, value): + reflist = set(m.group(1).encode('utf-8') + for m in self.eref.finditer(value)) + reflist -= self.xmllist + return reflist + + # Setup for XML parser, with default and text-only content handler + class TextContent(sax.handler.ContentHandler): + textcontent = '' + + def characters(self, content): + self.textcontent += content + + defaulthandler = sax.handler.ContentHandler() + texthandler = TextContent() + + numPattern = r'([0-9]+|[0-9]*\.[0-9]+)' + num = re.compile('^%s$' % numPattern) + lengthPattern = '%s(em|px|ch|cm|in)' % numPattern + length = re.compile('^%s$' % lengthPattern) + spec = re.compile(r'((?:min\-)?(?:width|height))\s*:\s*%s' % + lengthPattern) + style = re.compile(r'^%(spec)s\s*(;\s*%(spec)s\s*)*;?$' % + {'spec': spec.pattern}) + + processContent = None + + def check(self, refEnt, l10nEnt): + """Try to parse the refvalue inside a dummy element, and keep + track of entities that we need to define to make that work. + + Return a checker that offers just those entities. + """ + refValue, l10nValue = refEnt.val, l10nEnt.val + # find entities the refValue references, + # reusing markup from DTDParser. + reflist = self.known_entities(refValue) + inContext = self.entities_for_value(refValue) + entities = ''.join('' % s for s in sorted(reflist)) + parser = sax.make_parser() + parser.setFeature(sax.handler.feature_external_ges, False) + + parser.setContentHandler(self.defaulthandler) + try: + parser.parse(StringIO(self.tmpl % + (entities, refValue.encode('utf-8')))) + # also catch stray % + parser.parse(StringIO(self.tmpl % + (refEnt.all.encode('utf-8') + entities, + '&%s;' % refEnt.key.encode('utf-8')))) + except sax.SAXParseException, e: + yield ('warning', + (0, 0), + "can't parse en-US value", 'xmlparse') + + # find entities the l10nValue references, + # reusing markup from DTDParser. + l10nlist = self.entities_for_value(l10nValue) + missing = sorted(l10nlist - reflist) + _entities = entities + ''.join('' % s for s in missing) + if self.processContent is not None: + self.texthandler.textcontent = '' + parser.setContentHandler(self.texthandler) + try: + parser.parse(StringIO(self.tmpl % (_entities, + l10nValue.encode('utf-8')))) + # also catch stray % + # if this fails, we need to substract the entity definition + parser.setContentHandler(self.defaulthandler) + parser.parse(StringIO(self.tmpl % ( + l10nEnt.all.encode('utf-8') + _entities, + '&%s;' % l10nEnt.key.encode('utf-8')))) + except sax.SAXParseException, e: + # xml parse error, yield error + # sometimes, the error is reported on our fake closing + # element, make that the end of the last line + lnr = e.getLineNumber() - 1 + lines = l10nValue.splitlines() + if lnr > len(lines): + lnr = len(lines) + col = len(lines[lnr-1]) + else: + col = e.getColumnNumber() + if lnr == 1: + # first line starts with , substract + col -= len("") + elif lnr == 0: + col -= len("[\"']).*(?P=q)$") + + def unicode_escape(self, str): + """Helper method to try to decode all unicode escapes in a string. + + This code uses the standard python decode for unicode-escape, but + that's somewhat tricky, as its input needs to be ascii. To get to + ascii, the unicode string gets converted to ascii with + backslashreplace, i.e., all non-ascii unicode chars get unicode + escaped. And then we try to roll all of that back. + Now, when that hits an error, that's from the original string, and we + need to search for the actual error position in the original string, + as the backslashreplace code changes string positions quite badly. + See also the last check in TestAndroid.test_android_dtd, with a + lengthy chinese string. + """ + val = str.encode('ascii', 'backslashreplace') + try: + val.decode('unicode-escape') + except UnicodeDecodeError, e: + args = list(e.args) + badstring = args[1][args[2]:args[3]] + i = len(args[1][:args[2]].decode('unicode-escape')) + args[2] = i + args[3] = i + len(badstring) + raise UnicodeDecodeError(*args) + + @classmethod + def use(cls, file): + """Use this Checker only for DTD files in embedding/android.""" + return (file.module in ("embedding/android", + "mobile/android/base") and + cls.pattern.match(file.file)) + + def processContent(self, val): + """Actual check code. + Check for unicode escapes and unescaped quotes and apostrophes, + if string's not quoted. + """ + # first, try to decode unicode escapes + try: + self.unicode_escape(val) + except UnicodeDecodeError, e: + yield ('error', e.args[2], e.args[4], 'android') + # check for unescaped single or double quotes. + # first, see if the complete string is single or double quoted, + # that changes the rules + m = self.quoted.match(val) + if m: + q = m.group('q') + offset = 0 + val = val[1:-1] # strip quotes + else: + q = "[\"']" + offset = -1 + stray_quot = re.compile(r"[\\\\]*(%s)" % q) + + for m in stray_quot.finditer(val): + if len(m.group(0)) % 2: + # found an unescaped single or double quote, which message? + if m.group(1) == '"': + msg = u"Quotes in Android DTDs need escaping with \\\" "\ + u"or \\u0022, or put string in apostrophes." + else: + msg = u"Apostrophes in Android DTDs need escaping with "\ + u"\\' or \\u0027, or use \u2019, or put string in "\ + u"quotes." + yield ('error', m.end(0)+offset, msg, 'android') + + +def getChecker(file, reference=None): + if PropertiesChecker.use(file): + return PropertiesChecker() + if PrincessAndroid.use(file): + return PrincessAndroid(reference) + if DTDChecker.use(file): + return DTDChecker(reference) + return None diff --git a/python/compare-locales/compare_locales/commands.py b/python/compare-locales/compare_locales/commands.py new file mode 100644 index 000000000..61b58ec4b --- /dev/null +++ b/python/compare-locales/compare_locales/commands.py @@ -0,0 +1,154 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +'Commands exposed to commandlines' + +import logging +from optparse import OptionParser, make_option + +from compare_locales.paths import EnumerateApp +from compare_locales.compare import compareApp, compareDirs +from compare_locales.webapps import compare_web_app + + +class BaseCommand(object): + """Base class for compare-locales commands. + This handles command line parsing, and general sugar for setuptools + entry_points. + """ + options = [ + make_option('-v', '--verbose', action='count', dest='v', default=0, + help='Make more noise'), + make_option('-q', '--quiet', action='count', dest='q', default=0, + help='Make less noise'), + make_option('-m', '--merge', + help='''Use this directory to stage merged files, +use {ab_CD} to specify a different directory for each locale'''), + ] + data_option = make_option('--data', choices=['text', 'exhibit', 'json'], + default='text', + help='''Choose data and format (one of text, +exhibit, json); text: (default) Show which files miss which strings, together +with warnings and errors. Also prints a summary; json: Serialize the internal +tree, useful for tools. Also always succeeds; exhibit: Serialize the summary +data in a json useful for Exhibit +''') + + def __init__(self): + self.parser = None + + def get_parser(self): + """Get an OptionParser, with class docstring as usage, and + self.options. + """ + parser = OptionParser() + parser.set_usage(self.__doc__) + for option in self.options: + parser.add_option(option) + return parser + + @classmethod + def call(cls): + """Entry_point for setuptools. + The actual command handling is done in the handle() method of the + subclasses. + """ + cmd = cls() + cmd.handle_() + + def handle_(self): + """The instance part of the classmethod call.""" + self.parser = self.get_parser() + (options, args) = self.parser.parse_args() + # log as verbose or quiet as we want, warn by default + logging.basicConfig() + logging.getLogger().setLevel(logging.WARNING - + (options.v - options.q)*10) + observer = self.handle(args, options) + print observer.serialize(type=options.data).encode('utf-8', 'replace') + + def handle(self, args, options): + """Subclasses need to implement this method for the actual + command handling. + """ + raise NotImplementedError + + +class CompareLocales(BaseCommand): + """usage: %prog [options] l10n.ini l10n_base_dir [locale ...] + +Check the localization status of a gecko application. +The first argument is a path to the l10n.ini file for the application, +followed by the base directory of the localization repositories. +Then you pass in the list of locale codes you want to compare. If there are +not locales given, the list of locales will be taken from the all-locales file +of the application\'s l10n.ini.""" + + options = BaseCommand.options + [ + make_option('--clobber-merge', action="store_true", default=False, + dest='clobber', + help="""WARNING: DATALOSS. +Use this option with care. If specified, the merge directory will +be clobbered for each module. That means, the subdirectory will +be completely removed, any files that were there are lost. +Be careful to specify the right merge directory when using this option."""), + make_option('-r', '--reference', default='en-US', dest='reference', + help='Explicitly set the reference ' + 'localization. [default: en-US]'), + BaseCommand.data_option + ] + + def handle(self, args, options): + if len(args) < 2: + self.parser.error('Need to pass in list of languages') + inipath, l10nbase = args[:2] + locales = args[2:] + app = EnumerateApp(inipath, l10nbase, locales) + app.reference = options.reference + try: + observer = compareApp(app, merge_stage=options.merge, + clobber=options.clobber) + except (OSError, IOError), exc: + print "FAIL: " + str(exc) + self.parser.exit(2) + return observer + + +class CompareDirs(BaseCommand): + """usage: %prog [options] reference localization + +Check the localization status of a directory tree. +The first argument is a path to the reference data,the second is the +localization to be tested.""" + + options = BaseCommand.options + [ + BaseCommand.data_option + ] + + def handle(self, args, options): + if len(args) != 2: + self.parser.error('Reference and localizatino required') + reference, locale = args + observer = compareDirs(reference, locale, merge_stage=options.merge) + return observer + + +class CompareWebApp(BaseCommand): + """usage: %prog [options] webapp [locale locale] + +Check the localization status of a gaia-style web app. +The first argument is the directory of the web app. +Following arguments explicitly state the locales to test. +If none are given, test all locales in manifest.webapp or files.""" + + options = BaseCommand.options[:-1] + [ + BaseCommand.data_option] + + def handle(self, args, options): + if len(args) < 1: + self.parser.error('Webapp directory required') + basedir = args[0] + locales = args[1:] + observer = compare_web_app(basedir, locales) + return observer diff --git a/python/compare-locales/compare_locales/compare.py b/python/compare-locales/compare_locales/compare.py new file mode 100644 index 000000000..4f71c46f8 --- /dev/null +++ b/python/compare-locales/compare_locales/compare.py @@ -0,0 +1,638 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +'Mozilla l10n compare locales tool' + +import codecs +import os +import os.path +import shutil +import re +from difflib import SequenceMatcher +from collections import defaultdict + +try: + from json import dumps +except: + from simplejson import dumps + +from compare_locales import parser +from compare_locales import paths +from compare_locales.checks import getChecker + + +class Tree(object): + def __init__(self, valuetype): + self.branches = dict() + self.valuetype = valuetype + self.value = None + + def __getitem__(self, leaf): + parts = [] + if isinstance(leaf, paths.File): + parts = [p for p in [leaf.locale, leaf.module] if p] + \ + leaf.file.split('/') + else: + parts = leaf.split('/') + return self.__get(parts) + + def __get(self, parts): + common = None + old = None + new = tuple(parts) + t = self + for k, v in self.branches.iteritems(): + for i, part in enumerate(zip(k, parts)): + if part[0] != part[1]: + i -= 1 + break + if i < 0: + continue + i += 1 + common = tuple(k[:i]) + old = tuple(k[i:]) + new = tuple(parts[i:]) + break + if old: + self.branches.pop(k) + t = Tree(self.valuetype) + t.branches[old] = v + self.branches[common] = t + elif common: + t = self.branches[common] + if new: + if common: + return t.__get(new) + t2 = t + t = Tree(self.valuetype) + t2.branches[new] = t + if t.value is None: + t.value = t.valuetype() + return t.value + + indent = ' ' + + def getContent(self, depth=0): + ''' + Returns iterator of (depth, flag, key_or_value) tuples. + If flag is 'value', key_or_value is a value object, otherwise + (flag is 'key') it's a key string. + ''' + keys = self.branches.keys() + keys.sort() + if self.value is not None: + yield (depth, 'value', self.value) + for key in keys: + yield (depth, 'key', key) + for child in self.branches[key].getContent(depth + 1): + yield child + + def toJSON(self): + ''' + Returns this Tree as a JSON-able tree of hashes. + Only the values need to take care that they're JSON-able. + ''' + json = {} + keys = self.branches.keys() + keys.sort() + if self.value is not None: + json['value'] = self.value + children = [('/'.join(key), self.branches[key].toJSON()) + for key in keys] + if children: + json['children'] = children + return json + + def getStrRows(self): + def tostr(t): + if t[1] == 'key': + return self.indent * t[0] + '/'.join(t[2]) + return self.indent * (t[0] + 1) + str(t[2]) + + return map(tostr, self.getContent()) + + def __str__(self): + return '\n'.join(self.getStrRows()) + + +class AddRemove(SequenceMatcher): + def __init__(self): + SequenceMatcher.__init__(self, None, None, None) + + def set_left(self, left): + if not isinstance(left, list): + left = [l for l in left] + self.set_seq1(left) + + def set_right(self, right): + if not isinstance(right, list): + right = [l for l in right] + self.set_seq2(right) + + def __iter__(self): + for tag, i1, i2, j1, j2 in self.get_opcodes(): + if tag == 'equal': + for pair in zip(self.a[i1:i2], self.b[j1:j2]): + yield ('equal', pair) + elif tag == 'delete': + for item in self.a[i1:i2]: + yield ('delete', item) + elif tag == 'insert': + for item in self.b[j1:j2]: + yield ('add', item) + else: + # tag == 'replace' + for item in self.a[i1:i2]: + yield ('delete', item) + for item in self.b[j1:j2]: + yield ('add', item) + + +class DirectoryCompare(SequenceMatcher): + def __init__(self, reference): + SequenceMatcher.__init__(self, None, [i for i in reference], + []) + self.watcher = None + + def setWatcher(self, watcher): + self.watcher = watcher + + def compareWith(self, other): + if not self.watcher: + return + self.set_seq2([i for i in other]) + for tag, i1, i2, j1, j2 in self.get_opcodes(): + if tag == 'equal': + for i, j in zip(xrange(i1, i2), xrange(j1, j2)): + self.watcher.compare(self.a[i], self.b[j]) + elif tag == 'delete': + for i in xrange(i1, i2): + self.watcher.add(self.a[i], other.cloneFile(self.a[i])) + elif tag == 'insert': + for j in xrange(j1, j2): + self.watcher.remove(self.b[j]) + else: + for j in xrange(j1, j2): + self.watcher.remove(self.b[j]) + for i in xrange(i1, i2): + self.watcher.add(self.a[i], other.cloneFile(self.a[i])) + + +class Observer(object): + stat_cats = ['missing', 'obsolete', 'missingInFiles', 'report', + 'changed', 'unchanged', 'keys'] + + def __init__(self): + class intdict(defaultdict): + def __init__(self): + defaultdict.__init__(self, int) + + self.summary = defaultdict(intdict) + self.details = Tree(dict) + self.filter = None + + # support pickling + def __getstate__(self): + return dict(summary=self.getSummary(), details=self.details) + + def __setstate__(self, state): + class intdict(defaultdict): + def __init__(self): + defaultdict.__init__(self, int) + + self.summary = defaultdict(intdict) + if 'summary' in state: + for loc, stats in state['summary'].iteritems(): + self.summary[loc].update(stats) + self.details = state['details'] + self.filter = None + + def getSummary(self): + plaindict = {} + for k, v in self.summary.iteritems(): + plaindict[k] = dict(v) + return plaindict + + def toJSON(self): + return dict(summary=self.getSummary(), details=self.details.toJSON()) + + def notify(self, category, file, data): + rv = "error" + if category in self.stat_cats: + # these get called post reporting just for stats + # return "error" to forward them to other other_observers + self.summary[file.locale][category] += data + # keep track of how many strings are in a missing file + # we got the {'missingFile': 'error'} from the first pass + if category == 'missingInFiles': + self.details[file]['strings'] = data + return "error" + if category in ['missingFile', 'obsoleteFile']: + if self.filter is not None: + rv = self.filter(file) + if rv != "ignore": + self.details[file][category] = rv + return rv + if category in ['missingEntity', 'obsoleteEntity']: + if self.filter is not None: + rv = self.filter(file, data) + if rv == "ignore": + return rv + v = self.details[file] + try: + v[category].append(data) + except KeyError: + v[category] = [data] + return rv + if category == 'error': + try: + self.details[file][category].append(data) + except KeyError: + self.details[file][category] = [data] + self.summary[file.locale]['errors'] += 1 + elif category == 'warning': + try: + self.details[file][category].append(data) + except KeyError: + self.details[file][category] = [data] + self.summary[file.locale]['warnings'] += 1 + return rv + + def toExhibit(self): + items = [] + for locale in sorted(self.summary.iterkeys()): + summary = self.summary[locale] + if locale is not None: + item = {'id': 'xxx/' + locale, + 'label': locale, + 'locale': locale} + else: + item = {'id': 'xxx', + 'label': 'xxx', + 'locale': 'xxx'} + item['type'] = 'Build' + total = sum([summary[k] + for k in ('changed', 'unchanged', 'report', 'missing', + 'missingInFiles') + if k in summary]) + rate = (('changed' in summary and summary['changed'] * 100) or + 0) / total + item.update((k, summary.get(k, 0)) + for k in ('changed', 'unchanged')) + item.update((k, summary[k]) + for k in ('report', 'errors', 'warnings') + if k in summary) + item['missing'] = summary.get('missing', 0) + \ + summary.get('missingInFiles', 0) + item['completion'] = rate + item['total'] = total + result = 'success' + if item.get('warnings', 0): + result = 'warning' + if item.get('errors', 0) or item.get('missing', 0): + result = 'failure' + item['result'] = result + items.append(item) + data = { + "properties": dict.fromkeys( + ("completion", "errors", "warnings", "missing", "report", + "unchanged", "changed", "obsolete"), + {"valueType": "number"}), + "types": { + "Build": {"pluralLabel": "Builds"} + }} + data['items'] = items + return dumps(data, indent=2) + + def serialize(self, type="text"): + if type == "exhibit": + return self.toExhibit() + if type == "json": + return dumps(self.toJSON()) + + def tostr(t): + if t[1] == 'key': + return ' ' * t[0] + '/'.join(t[2]) + o = [] + indent = ' ' * (t[0] + 1) + if 'error' in t[2]: + o += [indent + 'ERROR: ' + e for e in t[2]['error']] + if 'warning' in t[2]: + o += [indent + 'WARNING: ' + e for e in t[2]['warning']] + if 'missingEntity' in t[2] or 'obsoleteEntity' in t[2]: + missingEntities = ('missingEntity' in t[2] and + t[2]['missingEntity']) or [] + obsoleteEntities = ('obsoleteEntity' in t[2] and + t[2]['obsoleteEntity']) or [] + entities = missingEntities + obsoleteEntities + entities.sort() + for entity in entities: + op = '+' + if entity in obsoleteEntities: + op = '-' + o.append(indent + op + entity) + elif 'missingFile' in t[2]: + o.append(indent + '// add and localize this file') + elif 'obsoleteFile' in t[2]: + o.append(indent + '// remove this file') + return '\n'.join(o) + + out = [] + for locale, summary in sorted(self.summary.iteritems()): + if locale is not None: + out.append(locale + ':') + out += [k + ': ' + str(v) for k, v in sorted(summary.iteritems())] + total = sum([summary[k] + for k in ['changed', 'unchanged', 'report', 'missing', + 'missingInFiles'] + if k in summary]) + rate = 0 + if total: + rate = (('changed' in summary and summary['changed'] * 100) or + 0) / total + out.append('%d%% of entries changed' % rate) + return '\n'.join(map(tostr, self.details.getContent()) + out) + + def __str__(self): + return 'observer' + + +class ContentComparer: + keyRE = re.compile('[kK]ey') + nl = re.compile('\n', re.M) + + def __init__(self): + '''Create a ContentComparer. + observer is usually a instance of Observer. The return values + of the notify method are used to control the handling of missing + entities. + ''' + self.reference = dict() + self.observer = Observer() + self.other_observers = [] + self.merge_stage = None + + def add_observer(self, obs): + '''Add a non-filtering observer. + Results from the notify calls are ignored. + ''' + self.other_observers.append(obs) + + def set_merge_stage(self, merge_stage): + self.merge_stage = merge_stage + + def merge(self, ref_entities, ref_map, ref_file, l10n_file, missing, + skips, p): + outfile = os.path.join(self.merge_stage, l10n_file.module, + l10n_file.file) + outdir = os.path.dirname(outfile) + if not os.path.isdir(outdir): + os.makedirs(outdir) + if not p.canMerge: + shutil.copyfile(ref_file.fullpath, outfile) + print "copied reference to " + outfile + return + if skips: + # skips come in ordered by key name, we need them in file order + skips.sort(key=lambda s: s.span[0]) + trailing = (['\n'] + + [ref_entities[ref_map[key]].all for key in missing] + + [ref_entities[ref_map[skip.key]].all for skip in skips + if not isinstance(skip, parser.Junk)]) + if skips: + # we need to skip a few errornous blocks in the input, copy by hand + f = codecs.open(outfile, 'wb', p.encoding) + offset = 0 + for skip in skips: + chunk = skip.span + f.write(p.contents[offset:chunk[0]]) + offset = chunk[1] + f.write(p.contents[offset:]) + else: + shutil.copyfile(l10n_file.fullpath, outfile) + f = codecs.open(outfile, 'ab', p.encoding) + print "adding to " + outfile + + def ensureNewline(s): + if not s.endswith('\n'): + return s + '\n' + return s + + f.write(''.join(map(ensureNewline, trailing))) + f.close() + + def notify(self, category, file, data): + """Check observer for the found data, and if it's + not to ignore, notify other_observers. + """ + rv = self.observer.notify(category, file, data) + if rv == 'ignore': + return rv + for obs in self.other_observers: + # non-filtering other_observers, ignore results + obs.notify(category, file, data) + return rv + + def remove(self, obsolete): + self.notify('obsoleteFile', obsolete, None) + pass + + def compare(self, ref_file, l10n): + try: + p = parser.getParser(ref_file.file) + except UserWarning: + # no comparison, XXX report? + return + if ref_file not in self.reference: + # we didn't parse this before + try: + p.readContents(ref_file.getContents()) + except Exception, e: + self.notify('error', ref_file, str(e)) + return + self.reference[ref_file] = p.parse() + ref = self.reference[ref_file] + ref_list = ref[1].keys() + ref_list.sort() + try: + p.readContents(l10n.getContents()) + l10n_entities, l10n_map = p.parse() + except Exception, e: + self.notify('error', l10n, str(e)) + return + lines = [] + + def _getLine(offset): + if not lines: + lines.append(0) + for m in self.nl.finditer(p.contents): + lines.append(m.end()) + for i in xrange(len(lines), 0, -1): + if offset >= lines[i - 1]: + return (i, offset - lines[i - 1]) + return (1, offset) + + l10n_list = l10n_map.keys() + l10n_list.sort() + ar = AddRemove() + ar.set_left(ref_list) + ar.set_right(l10n_list) + report = missing = obsolete = changed = unchanged = keys = 0 + missings = [] + skips = [] + checker = getChecker(l10n, reference=ref[0]) + for action, item_or_pair in ar: + if action == 'delete': + # missing entity + _rv = self.notify('missingEntity', l10n, item_or_pair) + if _rv == "ignore": + continue + if _rv == "error": + # only add to missing entities for l10n-merge on error, + # not report + missings.append(item_or_pair) + missing += 1 + else: + # just report + report += 1 + elif action == 'add': + # obsolete entity or junk + if isinstance(l10n_entities[l10n_map[item_or_pair]], + parser.Junk): + junk = l10n_entities[l10n_map[item_or_pair]] + params = (junk.val,) + junk.span + self.notify('error', l10n, + 'Unparsed content "%s" at %d-%d' % params) + if self.merge_stage is not None: + skips.append(junk) + elif self.notify('obsoleteEntity', l10n, + item_or_pair) != 'ignore': + obsolete += 1 + else: + # entity found in both ref and l10n, check for changed + entity = item_or_pair[0] + refent = ref[0][ref[1][entity]] + l10nent = l10n_entities[l10n_map[entity]] + if self.keyRE.search(entity): + keys += 1 + else: + if refent.val == l10nent.val: + self.doUnchanged(l10nent) + unchanged += 1 + else: + self.doChanged(ref_file, refent, l10nent) + changed += 1 + # run checks: + if checker: + for tp, pos, msg, cat in checker.check(refent, l10nent): + # compute real src position, if first line, + # col needs adjustment + _l, _offset = _getLine(l10nent.val_span[0]) + if isinstance(pos, tuple): + # line, column + if pos[0] == 1: + col = pos[1] + _offset + else: + col = pos[1] + _l += pos[0] - 1 + else: + _l, col = _getLine(l10nent.val_span[0] + pos) + # skip error entities when merging + if tp == 'error' and self.merge_stage is not None: + skips.append(l10nent) + self.notify(tp, l10n, + u"%s at line %d, column %d for %s" % + (msg, _l, col, refent.key)) + pass + if missing: + self.notify('missing', l10n, missing) + if self.merge_stage is not None and (missings or skips): + self.merge(ref[0], ref[1], ref_file, l10n, missings, skips, p) + if report: + self.notify('report', l10n, report) + if obsolete: + self.notify('obsolete', l10n, obsolete) + if changed: + self.notify('changed', l10n, changed) + if unchanged: + self.notify('unchanged', l10n, unchanged) + if keys: + self.notify('keys', l10n, keys) + pass + + def add(self, orig, missing): + if self.notify('missingFile', missing, None) == "ignore": + # filter said that we don't need this file, don't count it + return + f = orig + try: + p = parser.getParser(f.file) + except UserWarning: + return + try: + p.readContents(f.getContents()) + entities, map = p.parse() + except Exception, e: + self.notify('error', f, str(e)) + return + self.notify('missingInFiles', missing, len(map)) + + def doUnchanged(self, entity): + # overload this if needed + pass + + def doChanged(self, file, ref_entity, l10n_entity): + # overload this if needed + pass + + +def compareApp(app, other_observer=None, merge_stage=None, clobber=False): + '''Compare locales set in app. + + Optional arguments are: + - other_observer. A object implementing + notify(category, _file, data) + The return values of that callback are ignored. + - merge_stage. A directory to be used for staging the output of + l10n-merge. + - clobber. Clobber the module subdirectories of the merge dir as we go. + Use wisely, as it might cause data loss. + ''' + comparer = ContentComparer() + if other_observer is not None: + comparer.add_observer(other_observer) + comparer.observer.filter = app.filter + for module, reference, locales in app: + dir_comp = DirectoryCompare(reference) + dir_comp.setWatcher(comparer) + for _, localization in locales: + if merge_stage is not None: + locale_merge = merge_stage.format(ab_CD=localization.locale) + comparer.set_merge_stage(locale_merge) + if clobber: + # if clobber, remove the stage for the module if it exists + clobberdir = os.path.join(locale_merge, module) + if os.path.exists(clobberdir): + shutil.rmtree(clobberdir) + print "clobbered " + clobberdir + dir_comp.compareWith(localization) + return comparer.observer + + +def compareDirs(reference, locale, other_observer=None, merge_stage=None): + '''Compare reference and locale dir. + + Optional arguments are: + - other_observer. A object implementing + notify(category, _file, data) + The return values of that callback are ignored. + ''' + comparer = ContentComparer() + if other_observer is not None: + comparer.add_observer(other_observer) + comparer.set_merge_stage(merge_stage) + dir_comp = DirectoryCompare(paths.EnumerateDir(reference)) + dir_comp.setWatcher(comparer) + dir_comp.compareWith(paths.EnumerateDir(locale)) + return comparer.observer diff --git a/python/compare-locales/compare_locales/parser.py b/python/compare-locales/compare_locales/parser.py new file mode 100644 index 000000000..a97cf201b --- /dev/null +++ b/python/compare-locales/compare_locales/parser.py @@ -0,0 +1,521 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import re +import codecs +import logging +from HTMLParser import HTMLParser + +__constructors = [] + + +class Entity(object): + ''' + Abstraction layer for a localizable entity. + Currently supported are grammars of the form: + + 1: pre white space + 2: pre comments + 3: entity definition + 4: entity key (name) + 5: entity value + 6: post comment (and white space) in the same line (dtd only) + <--[1] + <--[2] + + + <-------[3]---------><------[6]------> + ''' + def __init__(self, contents, pp, + span, pre_ws_span, pre_comment_span, def_span, + key_span, val_span, post_span): + self.contents = contents + self.span = span + self.pre_ws_span = pre_ws_span + self.pre_comment_span = pre_comment_span + self.def_span = def_span + self.key_span = key_span + self.val_span = val_span + self.post_span = post_span + self.pp = pp + pass + + # getter helpers + + def get_all(self): + return self.contents[self.span[0]:self.span[1]] + + def get_pre_ws(self): + return self.contents[self.pre_ws_span[0]:self.pre_ws_span[1]] + + def get_pre_comment(self): + return self.contents[self.pre_comment_span[0]: + self.pre_comment_span[1]] + + def get_def(self): + return self.contents[self.def_span[0]:self.def_span[1]] + + def get_key(self): + return self.contents[self.key_span[0]:self.key_span[1]] + + def get_val(self): + return self.pp(self.contents[self.val_span[0]:self.val_span[1]]) + + def get_raw_val(self): + return self.contents[self.val_span[0]:self.val_span[1]] + + def get_post(self): + return self.contents[self.post_span[0]:self.post_span[1]] + + # getters + + all = property(get_all) + pre_ws = property(get_pre_ws) + pre_comment = property(get_pre_comment) + definition = property(get_def) + key = property(get_key) + val = property(get_val) + raw_val = property(get_raw_val) + post = property(get_post) + + def __repr__(self): + return self.key + + +class Junk(object): + ''' + An almost-Entity, representing junk data that we didn't parse. + This way, we can signal bad content as stuff we don't understand. + And the either fix that, or report real bugs in localizations. + ''' + junkid = 0 + + def __init__(self, contents, span): + self.contents = contents + self.span = span + self.pre_ws = self.pre_comment = self.definition = self.post = '' + self.__class__.junkid += 1 + self.key = '_junk_%d_%d-%d' % (self.__class__.junkid, span[0], span[1]) + + # getter helpers + def get_all(self): + return self.contents[self.span[0]:self.span[1]] + + # getters + all = property(get_all) + val = property(get_all) + + def __repr__(self): + return self.key + + +class Parser: + canMerge = True + + def __init__(self): + if not hasattr(self, 'encoding'): + self.encoding = 'utf-8' + pass + + def readFile(self, file): + f = codecs.open(file, 'r', self.encoding) + try: + self.contents = f.read() + except UnicodeDecodeError, e: + (logging.getLogger('locales') + .error("Can't read file: " + file + '; ' + str(e))) + self.contents = u'' + f.close() + + def readContents(self, contents): + (self.contents, length) = codecs.getdecoder(self.encoding)(contents) + + def parse(self): + l = [] + m = {} + for e in self: + m[e.key] = len(l) + l.append(e) + return (l, m) + + def postProcessValue(self, val): + return val + + def __iter__(self): + contents = self.contents + offset = 0 + self.header, offset = self.getHeader(contents, offset) + self.footer = '' + entity, offset = self.getEntity(contents, offset) + while entity: + yield entity + entity, offset = self.getEntity(contents, offset) + f = self.reFooter.match(contents, offset) + if f: + self.footer = f.group() + offset = f.end() + if len(contents) > offset: + yield Junk(contents, (offset, len(contents))) + pass + + def getHeader(self, contents, offset): + header = '' + h = self.reHeader.match(contents) + if h: + header = h.group() + offset = h.end() + return (header, offset) + + def getEntity(self, contents, offset): + m = self.reKey.match(contents, offset) + if m: + offset = m.end() + entity = self.createEntity(contents, m) + return (entity, offset) + # first check if footer has a non-empty match, + # 'cause then we don't find junk + m = self.reFooter.match(contents, offset) + if m and m.end() > offset: + return (None, offset) + m = self.reKey.search(contents, offset) + if m: + # we didn't match, but search, so there's junk between offset + # and start. We'll match() on the next turn + junkend = m.start() + return (Junk(contents, (offset, junkend)), junkend) + return (None, offset) + + def createEntity(self, contents, m): + return Entity(contents, self.postProcessValue, + *[m.span(i) for i in xrange(7)]) + + +def getParser(path): + for item in __constructors: + if re.search(item[0], path): + return item[1] + raise UserWarning("Cannot find Parser") + + +# Subgroups of the match will: +# 1: pre white space +# 2: pre comments +# 3: entity definition +# 4: entity key (name) +# 5: entity value +# 6: post comment (and white space) in the same line (dtd only) +# <--[1] +# <--[2] +# +# +# <-------[3]---------><------[6]------> + + +class DTDParser(Parser): + # http://www.w3.org/TR/2006/REC-xml11-20060816/#NT-NameStartChar + # ":" | [A-Z] | "_" | [a-z] | + # [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] + # | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | + # [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | + # [#x10000-#xEFFFF] + CharMinusDash = u'\x09\x0A\x0D\u0020-\u002C\u002E-\uD7FF\uE000-\uFFFD' + XmlComment = '' % CharMinusDash + NameStartChar = u':A-Z_a-z\xC0-\xD6\xD8-\xF6\xF8-\u02FF' + \ + u'\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F' + \ + u'\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD' + # + \U00010000-\U000EFFFF seems to be unsupported in python + + # NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | + # [#x0300-#x036F] | [#x203F-#x2040] + NameChar = NameStartChar + ur'\-\.0-9' + u'\xB7\u0300-\u036F\u203F-\u2040' + Name = '[' + NameStartChar + '][' + NameChar + ']*' + reKey = re.compile('(?:(?P
      \s*)(?P(?:' + XmlComment +
      +                       '\s*)*)(?P' + Name +
      +                       ')\s+(?P\"[^\"]*\"|\'[^\']*\'?)\s*>)'
      +                       '(?P[ \t]*(?:' + XmlComment + '\s*)*\n?)?)',
      +                       re.DOTALL)
      +    # add BOM to DTDs, details in bug 435002
      +    reHeader = re.compile(u'^\ufeff?'
      +                          u'(\s*)?', re.S)
      +    reFooter = re.compile('\s*(\s*)*$')
      +    rePE = re.compile('(?:(\s*)((?:' + XmlComment + '\s*)*)'
      +                      '(\s*%' + Name +
      +                      ';)([ \t]*(?:' + XmlComment + '\s*)*\n?)?)')
      +
      +    def getEntity(self, contents, offset):
      +        '''
      +        Overload Parser.getEntity to special-case ParsedEntities.
      +        Just check for a parsed entity if that method claims junk.
      +
      +        
      +        %foo;
      +        '''
      +        entity, inneroffset = Parser.getEntity(self, contents, offset)
      +        if (entity and isinstance(entity, Junk)) or entity is None:
      +            m = self.rePE.match(contents, offset)
      +            if m:
      +                inneroffset = m.end()
      +                entity = Entity(contents, self.postProcessValue,
      +                                *[m.span(i) for i in xrange(7)])
      +        return (entity, inneroffset)
      +
      +    def createEntity(self, contents, m):
      +        valspan = m.span('val')
      +        valspan = (valspan[0]+1, valspan[1]-1)
      +        return Entity(contents, self.postProcessValue, m.span(),
      +                      m.span('pre'), m.span('precomment'),
      +                      m.span('entity'), m.span('key'), valspan,
      +                      m.span('post'))
      +
      +
      +class PropertiesParser(Parser):
      +    escape = re.compile(r'\\((?Pu[0-9a-fA-F]{1,4})|'
      +                        '(?P\n\s*)|(?P.))', re.M)
      +    known_escapes = {'n': '\n', 'r': '\r', 't': '\t', '\\': '\\'}
      +
      +    def __init__(self):
      +        self.reKey = re.compile('^(\s*)'
      +                                '((?:[#!].*?\n\s*)*)'
      +                                '([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', re.M)
      +        self.reHeader = re.compile('^\s*([#!].*\s*)+')
      +        self.reFooter = re.compile('\s*([#!].*\s*)*$')
      +        self._escapedEnd = re.compile(r'\\+$')
      +        self._trailingWS = re.compile(r'[ \t]*$')
      +        Parser.__init__(self)
      +
      +    def getHeader(self, contents, offset):
      +        header = ''
      +        h = self.reHeader.match(contents, offset)
      +        if h:
      +            candidate = h.group()
      +            if 'http://mozilla.org/MPL/2.0/' in candidate or \
      +                    'LICENSE BLOCK' in candidate:
      +                header = candidate
      +                offset = h.end()
      +        return (header, offset)
      +
      +    def getEntity(self, contents, offset):
      +        # overwritten to parse values line by line
      +        m = self.reKey.match(contents, offset)
      +        if m:
      +            offset = m.end()
      +            while True:
      +                endval = nextline = contents.find('\n', offset)
      +                if nextline == -1:
      +                    endval = offset = len(contents)
      +                    break
      +                # is newline escaped?
      +                _e = self._escapedEnd.search(contents, offset, nextline)
      +                offset = nextline + 1
      +                if _e is None:
      +                    break
      +                # backslashes at end of line, if 2*n, not escaped
      +                if len(_e.group()) % 2 == 0:
      +                    break
      +            # strip trailing whitespace
      +            ws = self._trailingWS.search(contents, m.end(), offset)
      +            if ws:
      +                endval -= ws.end() - ws.start()
      +            entity = Entity(contents, self.postProcessValue,
      +                            (m.start(), offset),   # full span
      +                            m.span(1),  # leading whitespan
      +                            m.span(2),  # leading comment span
      +                            (m.start(3), offset),   # entity def span
      +                            m.span(3),   # key span
      +                            (m.end(), endval),   # value span
      +                            (offset, offset))  # post comment span, empty
      +            return (entity, offset)
      +        m = self.reKey.search(contents, offset)
      +        if m:
      +            # we didn't match, but search, so there's junk between offset
      +            # and start. We'll match() on the next turn
      +            junkend = m.start()
      +            return (Junk(contents, (offset, junkend)), junkend)
      +        return (None, offset)
      +
      +    def postProcessValue(self, val):
      +
      +        def unescape(m):
      +            found = m.groupdict()
      +            if found['uni']:
      +                return unichr(int(found['uni'][1:], 16))
      +            if found['nl']:
      +                return ''
      +            return self.known_escapes.get(found['single'], found['single'])
      +        val = self.escape.sub(unescape, val)
      +        return val
      +
      +
      +class DefinesParser(Parser):
      +    # can't merge, #unfilter needs to be the last item, which we don't support
      +    canMerge = False
      +
      +    def __init__(self):
      +        self.reKey = re.compile('^(\s*)((?:^#(?!define\s).*\s*)*)'
      +                                '(#define[ \t]+(\w+)[ \t]+(.*?))([ \t]*$\n?)',
      +                                re.M)
      +        self.reHeader = re.compile('^\s*(#(?!define\s).*\s*)*')
      +        self.reFooter = re.compile('\s*(#(?!define\s).*\s*)*$', re.M)
      +        Parser.__init__(self)
      +
      +
      +class IniParser(Parser):
      +    '''
      +    Parse files of the form:
      +    # initial comment
      +    [cat]
      +    whitespace*
      +    #comment
      +    string=value
      +    ...
      +    '''
      +    def __init__(self):
      +        self.reHeader = re.compile('^((?:\s*|[;#].*)\n)*\[.+?\]\n', re.M)
      +        self.reKey = re.compile('(\s*)((?:[;#].*\n\s*)*)((.+?)=(.*))(\n?)')
      +        self.reFooter = re.compile('\s*([;#].*\s*)*$')
      +        Parser.__init__(self)
      +
      +
      +DECL, COMMENT, START, END, CONTENT = range(5)
      +
      +
      +class BookmarksParserInner(HTMLParser):
      +
      +    class Token(object):
      +        _type = None
      +        content = ''
      +
      +        def __str__(self):
      +            return self.content
      +
      +    class DeclToken(Token):
      +        _type = DECL
      +
      +        def __init__(self, decl):
      +            self.content = decl
      +            pass
      +
      +        def __str__(self):
      +            return '' % self.content
      +        pass
      +
      +    class CommentToken(Token):
      +        _type = COMMENT
      +
      +        def __init__(self, comment):
      +            self.content = comment
      +            pass
      +
      +        def __str__(self):
      +            return '' % self.content
      +        pass
      +
      +    class StartToken(Token):
      +        _type = START
      +
      +        def __init__(self, tag, attrs, content):
      +            self.tag = tag
      +            self.attrs = dict(attrs)
      +            self.content = content
      +            pass
      +        pass
      +
      +    class EndToken(Token):
      +        _type = END
      +
      +        def __init__(self, tag):
      +            self.tag = tag
      +            pass
      +
      +        def __str__(self):
      +            return '' % self.tag.upper()
      +        pass
      +
      +    class ContentToken(Token):
      +        _type = CONTENT
      +
      +        def __init__(self, content):
      +            self.content = content
      +            pass
      +        pass
      +
      +    def __init__(self):
      +        HTMLParser.__init__(self)
      +        self.tokens = []
      +
      +    def parse(self, contents):
      +        self.tokens = []
      +        self.feed(contents)
      +        self.close()
      +        return self.tokens
      +
      +    # Called when we hit an end DL tag to reset the folder selections
      +    def handle_decl(self, decl):
      +        self.tokens.append(self.DeclToken(decl))
      +
      +    # Called when we hit an end DL tag to reset the folder selections
      +    def handle_comment(self, comment):
      +        self.tokens.append(self.CommentToken(comment))
      +
      +    def handle_starttag(self, tag, attrs):
      +        self.tokens.append(self.StartToken(tag, attrs,
      +                                           self.get_starttag_text()))
      +
      +    # Called when text data is encountered
      +    def handle_data(self, data):
      +        if self.tokens[-1]._type == CONTENT:
      +            self.tokens[-1].content += data
      +        else:
      +            self.tokens.append(self.ContentToken(data))
      +
      +    def handle_charref(self, data):
      +        self.handle_data('&#%s;' % data)
      +
      +    def handle_entityref(self, data):
      +        self.handle_data('&%s;' % data)
      +
      +    # Called when we hit an end DL tag to reset the folder selections
      +    def handle_endtag(self, tag):
      +        self.tokens.append(self.EndToken(tag))
      +
      +
      +class BookmarksParser(Parser):
      +    canMerge = False
      +
      +    class BMEntity(object):
      +        def __init__(self, key, val):
      +            self.key = key
      +            self.val = val
      +
      +    def __iter__(self):
      +        p = BookmarksParserInner()
      +        tks = p.parse(self.contents)
      +        i = 0
      +        k = []
      +        for i in xrange(len(tks)):
      +            t = tks[i]
      +            if t._type == START:
      +                k.append(t.tag)
      +                keys = t.attrs.keys()
      +                keys.sort()
      +                for attrname in keys:
      +                    yield self.BMEntity('.'.join(k) + '.@' + attrname,
      +                                        t.attrs[attrname])
      +                if i + 1 < len(tks) and tks[i+1]._type == CONTENT:
      +                    i += 1
      +                    t = tks[i]
      +                    v = t.content.strip()
      +                    if v:
      +                        yield self.BMEntity('.'.join(k), v)
      +            elif t._type == END:
      +                k.pop()
      +
      +
      +__constructors = [('\\.dtd$', DTDParser()),
      +                  ('\\.properties$', PropertiesParser()),
      +                  ('\\.ini$', IniParser()),
      +                  ('\\.inc$', DefinesParser()),
      +                  ('bookmarks\\.html$', BookmarksParser())]
      diff --git a/python/compare-locales/compare_locales/paths.py b/python/compare-locales/compare_locales/paths.py
      new file mode 100644
      index 000000000..f72b3a2e7
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/paths.py
      @@ -0,0 +1,398 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import os.path
      +import os
      +from ConfigParser import ConfigParser, NoSectionError, NoOptionError
      +from urlparse import urlparse, urljoin
      +from urllib import pathname2url, url2pathname
      +from urllib2 import urlopen
      +from collections import defaultdict
      +from compare_locales import util
      +
      +
      +class L10nConfigParser(object):
      +    '''Helper class to gather application information from ini files.
      +
      +    This class is working on synchronous open to read files or web data.
      +    Subclass this and overwrite loadConfigs and addChild if you need async.
      +    '''
      +    def __init__(self, inipath, **kwargs):
      +        """Constructor for L10nConfigParsers
      +
      +        inipath -- l10n.ini path
      +        Optional keyword arguments are fowarded to the inner ConfigParser as
      +        defaults.
      +        """
      +        if os.path.isabs(inipath):
      +            self.inipath = 'file:%s' % pathname2url(inipath)
      +        else:
      +            pwdurl = 'file:%s/' % pathname2url(os.getcwd())
      +            self.inipath = urljoin(pwdurl, inipath)
      +        # l10n.ini files can import other l10n.ini files, store the
      +        # corresponding L10nConfigParsers
      +        self.children = []
      +        # we really only care about the l10n directories described in l10n.ini
      +        self.dirs = []
      +        # optional defaults to be passed to the inner ConfigParser (unused?)
      +        self.defaults = kwargs
      +
      +    def getDepth(self, cp):
      +        '''Get the depth for the comparison from the parsed l10n.ini.
      +
      +        Overloadable to get the source depth for fennec and friends.
      +        '''
      +        try:
      +            depth = cp.get('general', 'depth')
      +        except:
      +            depth = '.'
      +        return depth
      +
      +    def getFilters(self):
      +        '''Get the test functions from this ConfigParser and all children.
      +
      +        Only works with synchronous loads, used by compare-locales, which
      +        is local anyway.
      +        '''
      +        filterurl = urljoin(self.inipath, 'filter.py')
      +        try:
      +            l = {}
      +            execfile(url2pathname(urlparse(filterurl).path), {}, l)
      +            if 'test' in l and callable(l['test']):
      +                filters = [l['test']]
      +            else:
      +                filters = []
      +        except:
      +            filters = []
      +
      +        for c in self.children:
      +            filters += c.getFilters()
      +
      +        return filters
      +
      +    def loadConfigs(self):
      +        """Entry point to load the l10n.ini file this Parser refers to.
      +
      +        This implementation uses synchronous loads, subclasses might overload
      +        this behaviour. If you do, make sure to pass a file-like object
      +        to onLoadConfig.
      +        """
      +        self.onLoadConfig(urlopen(self.inipath))
      +
      +    def onLoadConfig(self, inifile):
      +        """Parse a file-like object for the loaded l10n.ini file."""
      +        cp = ConfigParser(self.defaults)
      +        cp.readfp(inifile)
      +        depth = self.getDepth(cp)
      +        self.baseurl = urljoin(self.inipath, depth)
      +        # create child loaders for any other l10n.ini files to be included
      +        try:
      +            for title, path in cp.items('includes'):
      +                # skip default items
      +                if title in self.defaults:
      +                    continue
      +                # add child config parser
      +                self.addChild(title, path, cp)
      +        except NoSectionError:
      +            pass
      +        # try to load the "dirs" defined in the "compare" section
      +        try:
      +            self.dirs.extend(cp.get('compare', 'dirs').split())
      +        except (NoOptionError, NoSectionError):
      +            pass
      +        # try getting a top level compare dir, as used for fennec
      +        try:
      +            self.tld = cp.get('compare', 'tld')
      +            # remove tld from comparison dirs
      +            if self.tld in self.dirs:
      +                self.dirs.remove(self.tld)
      +        except (NoOptionError, NoSectionError):
      +            self.tld = None
      +        # try to set "all_path" and "all_url"
      +        try:
      +            self.all_path = cp.get('general', 'all')
      +            self.all_url = urljoin(self.baseurl, self.all_path)
      +        except (NoOptionError, NoSectionError):
      +            self.all_path = None
      +            self.all_url = None
      +        return cp
      +
      +    def addChild(self, title, path, orig_cp):
      +        """Create a child L10nConfigParser and load it.
      +
      +        title -- indicates the module's name
      +        path -- indicates the path to the module's l10n.ini file
      +        orig_cp -- the configuration parser of this l10n.ini
      +        """
      +        cp = L10nConfigParser(urljoin(self.baseurl, path), **self.defaults)
      +        cp.loadConfigs()
      +        self.children.append(cp)
      +
      +    def getTLDPathsTuple(self, basepath):
      +        """Given the basepath, return the path fragments to be used for
      +        self.tld. For build runs, this is (basepath, self.tld), for
      +        source runs, just (basepath,).
      +
      +        @see overwritten method in SourceTreeConfigParser.
      +        """
      +        return (basepath, self.tld)
      +
      +    def dirsIter(self):
      +        """Iterate over all dirs and our base path for this l10n.ini"""
      +        url = urlparse(self.baseurl)
      +        basepath = url2pathname(url.path)
      +        if self.tld is not None:
      +            yield self.tld, self.getTLDPathsTuple(basepath)
      +        for dir in self.dirs:
      +            yield dir, (basepath, dir)
      +
      +    def directories(self):
      +        """Iterate over all dirs and base paths for this l10n.ini as well
      +        as the included ones.
      +        """
      +        for t in self.dirsIter():
      +            yield t
      +        for child in self.children:
      +            for t in child.directories():
      +                yield t
      +
      +    def allLocales(self):
      +        """Return a list of all the locales of this project"""
      +        return util.parseLocales(urlopen(self.all_url).read())
      +
      +
      +class SourceTreeConfigParser(L10nConfigParser):
      +    '''Subclassing L10nConfigParser to work with just the repos
      +    checked out next to each other instead of intermingled like
      +    we do for real builds.
      +    '''
      +
      +    def __init__(self, inipath, basepath):
      +        '''Add additional arguments basepath.
      +
      +        basepath is used to resolve local paths via branchnames.
      +        '''
      +        L10nConfigParser.__init__(self, inipath)
      +        self.basepath = basepath
      +        self.tld = None
      +
      +    def getDepth(self, cp):
      +        '''Get the depth for the comparison from the parsed l10n.ini.
      +
      +        Overloaded to get the source depth for fennec and friends.
      +        '''
      +        try:
      +            depth = cp.get('general', 'source-depth')
      +        except:
      +            try:
      +                depth = cp.get('general', 'depth')
      +            except:
      +                depth = '.'
      +        return depth
      +
      +    def addChild(self, title, path, orig_cp):
      +        # check if there's a section with details for this include
      +        # we might have to check a different repo, or even VCS
      +        # for example, projects like "mail" indicate in
      +        # an "include_" section where to find the l10n.ini for "toolkit"
      +        details = 'include_' + title
      +        if orig_cp.has_section(details):
      +            branch = orig_cp.get(details, 'mozilla')
      +            inipath = orig_cp.get(details, 'l10n.ini')
      +            path = self.basepath + '/' + branch + '/' + inipath
      +        else:
      +            path = urljoin(self.baseurl, path)
      +        cp = SourceTreeConfigParser(path, self.basepath, **self.defaults)
      +        cp.loadConfigs()
      +        self.children.append(cp)
      +
      +    def getTLDPathsTuple(self, basepath):
      +        """Overwrite L10nConfigParser's getTLDPathsTuple to just return
      +        the basepath.
      +        """
      +        return (basepath, )
      +
      +
      +class File(object):
      +
      +    def __init__(self, fullpath, file, module=None, locale=None):
      +        self.fullpath = fullpath
      +        self.file = file
      +        self.module = module
      +        self.locale = locale
      +        pass
      +
      +    def getContents(self):
      +        # open with universal line ending support and read
      +        return open(self.fullpath, 'rU').read()
      +
      +    def __hash__(self):
      +        f = self.file
      +        if self.module:
      +            f = self.module + '/' + f
      +        return hash(f)
      +
      +    def __str__(self):
      +        return self.fullpath
      +
      +    def __cmp__(self, other):
      +        if not isinstance(other, File):
      +            raise NotImplementedError
      +        rv = cmp(self.module, other.module)
      +        if rv != 0:
      +            return rv
      +        return cmp(self.file, other.file)
      +
      +
      +class EnumerateDir(object):
      +    ignore_dirs = ['CVS', '.svn', '.hg', '.git']
      +
      +    def __init__(self, basepath, module='', locale=None, ignore_subdirs=[]):
      +        self.basepath = basepath
      +        self.module = module
      +        self.locale = locale
      +        self.ignore_subdirs = ignore_subdirs
      +        pass
      +
      +    def cloneFile(self, other):
      +        '''
      +        Return a File object that this enumerator would return, if it had it.
      +        '''
      +        return File(os.path.join(self.basepath, other.file), other.file,
      +                    self.module, self.locale)
      +
      +    def __iter__(self):
      +        # our local dirs are given as a tuple of path segments, starting off
      +        # with an empty sequence for the basepath.
      +        dirs = [()]
      +        while dirs:
      +            dir = dirs.pop(0)
      +            fulldir = os.path.join(self.basepath, *dir)
      +            try:
      +                entries = os.listdir(fulldir)
      +            except OSError:
      +                # we probably just started off in a non-existing dir, ignore
      +                continue
      +            entries.sort()
      +            for entry in entries:
      +                leaf = os.path.join(fulldir, entry)
      +                if os.path.isdir(leaf):
      +                    if entry not in self.ignore_dirs and \
      +                        leaf not in [os.path.join(self.basepath, d)
      +                                     for d in self.ignore_subdirs]:
      +                        dirs.append(dir + (entry,))
      +                    continue
      +                yield File(leaf, '/'.join(dir + (entry,)),
      +                           self.module, self.locale)
      +
      +
      +class LocalesWrap(object):
      +
      +    def __init__(self, base, module, locales, ignore_subdirs=[]):
      +        self.base = base
      +        self.module = module
      +        self.locales = locales
      +        self.ignore_subdirs = ignore_subdirs
      +
      +    def __iter__(self):
      +        for locale in self.locales:
      +            path = os.path.join(self.base, locale, self.module)
      +            yield (locale, EnumerateDir(path, self.module, locale,
      +                                        self.ignore_subdirs))
      +
      +
      +class EnumerateApp(object):
      +    reference = 'en-US'
      +
      +    def __init__(self, inipath, l10nbase, locales=None):
      +        self.setupConfigParser(inipath)
      +        self.modules = defaultdict(dict)
      +        self.l10nbase = os.path.abspath(l10nbase)
      +        self.filters = []
      +        drive, tail = os.path.splitdrive(inipath)
      +        self.addFilters(*self.config.getFilters())
      +        self.locales = locales or self.config.allLocales()
      +        self.locales.sort()
      +
      +    def setupConfigParser(self, inipath):
      +        self.config = L10nConfigParser(inipath)
      +        self.config.loadConfigs()
      +
      +    def addFilters(self, *args):
      +        self.filters += args
      +
      +    value_map = {None: None, 'error': 0, 'ignore': 1, 'report': 2}
      +
      +    def filter(self, l10n_file, entity=None):
      +        '''Go through all added filters, and,
      +        - map "error" -> 0, "ignore" -> 1, "report" -> 2
      +        - if filter.test returns a bool, map that to
      +            False -> "ignore" (1), True -> "error" (0)
      +        - take the max of all reported
      +        '''
      +        rv = 0
      +        for f in reversed(self.filters):
      +            try:
      +                _r = f(l10n_file.module, l10n_file.file, entity)
      +            except:
      +                # XXX error handling
      +                continue
      +            if isinstance(_r, bool):
      +                _r = [1, 0][_r]
      +            else:
      +                # map string return value to int, default to 'error',
      +                # None is None
      +                _r = self.value_map.get(_r, 0)
      +            if _r is not None:
      +                rv = max(rv, _r)
      +        return ['error', 'ignore', 'report'][rv]
      +
      +    def __iter__(self):
      +        '''
      +        Iterate over all modules, return en-US directory enumerator, and an
      +        iterator over all locales in each iteration. Per locale, the locale
      +        code and an directory enumerator will be given.
      +        '''
      +        dirmap = dict(self.config.directories())
      +        mods = dirmap.keys()
      +        mods.sort()
      +        for mod in mods:
      +            if self.reference == 'en-US':
      +                base = os.path.join(*(dirmap[mod] + ('locales', 'en-US')))
      +            else:
      +                base = os.path.join(self.l10nbase, self.reference, mod)
      +            yield (mod, EnumerateDir(base, mod, self.reference),
      +                   LocalesWrap(self.l10nbase, mod, self.locales,
      +                   [m[len(mod)+1:] for m in mods if m.startswith(mod+'/')]))
      +
      +
      +class EnumerateSourceTreeApp(EnumerateApp):
      +    '''Subclass EnumerateApp to work on side-by-side checked out
      +    repos, and to no pay attention to how the source would actually
      +    be checked out for building.
      +
      +    It's supporting applications like Fennec, too, which have
      +    'locales/en-US/...' in their root dir, but claim to be 'mobile'.
      +    '''
      +
      +    def __init__(self, inipath, basepath, l10nbase, locales=None):
      +        self.basepath = basepath
      +        EnumerateApp.__init__(self, inipath, l10nbase, locales)
      +
      +    def setupConfigParser(self, inipath):
      +        self.config = SourceTreeConfigParser(inipath, self.basepath)
      +        self.config.loadConfigs()
      +
      +
      +def get_base_path(mod, loc):
      +    'statics for path patterns and conversion'
      +    __l10n = 'l10n/%(loc)s/%(mod)s'
      +    __en_US = 'mozilla/%(mod)s/locales/en-US'
      +    if loc == 'en-US':
      +        return __en_US % {'mod': mod}
      +    return __l10n % {'mod': mod, 'loc': loc}
      +
      +
      +def get_path(mod, loc, leaf):
      +    return get_base_path(mod, loc) + '/' + leaf
      diff --git a/python/compare-locales/compare_locales/tests/__init__.py b/python/compare-locales/compare_locales/tests/__init__.py
      new file mode 100644
      index 000000000..8808d78f4
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/__init__.py
      @@ -0,0 +1,49 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +'''Mixins for parser tests.
      +'''
      +
      +from itertools import izip_longest
      +from pkg_resources import resource_string
      +import re
      +
      +from compare_locales.parser import getParser
      +
      +
      +class ParserTestMixin():
      +    '''Utility methods used by the parser tests.
      +    '''
      +    filename = None
      +
      +    def setUp(self):
      +        '''Create a parser for this test.
      +        '''
      +        self.parser = getParser(self.filename)
      +
      +    def tearDown(self):
      +        'tear down this test'
      +        del self.parser
      +
      +    def resource(self, name):
      +        testcontent = resource_string(__name__, 'data/' + name)
      +        # fake universal line endings
      +        testcontent = re.sub('\r\n?', lambda m: '\n', testcontent)
      +        return testcontent
      +
      +    def _test(self, content, refs):
      +        '''Helper to test the parser.
      +        Compares the result of parsing content with the given list
      +        of reference keys and values.
      +        '''
      +        self.parser.readContents(content)
      +        entities = [entity for entity in self.parser]
      +        for entity, ref in izip_longest(entities, refs):
      +            self.assertTrue(entity, 'excess reference entity')
      +            self.assertTrue(ref, 'excess parsed entity')
      +            self.assertEqual(entity.val, ref[1])
      +            if ref[0].startswith('_junk'):
      +                self.assertTrue(re.match(ref[0], entity.key))
      +            else:
      +                self.assertEqual(entity.key, ref[0])
      diff --git a/python/compare-locales/compare_locales/tests/data/bug121341.properties b/python/compare-locales/compare_locales/tests/data/bug121341.properties
      new file mode 100644
      index 000000000..b45fc9698
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/data/bug121341.properties
      @@ -0,0 +1,68 @@
      +# simple check
      +1=abc
      +# test whitespace trimming in key and value
      +  2	=   xy	
      +# test parsing of escaped values
      +3 = \u1234\t\r\n\uAB\
      +\u1\n
      +# test multiline properties
      +4 = this is \
      +multiline property
      +5 = this is \
      +	   another multiline property
      +# property with DOS EOL
      +6 = test\u0036
      +# test multiline property with with DOS EOL
      +7 = yet another multi\
      +    line propery
      +# trimming should not trim escaped whitespaces
      +8 =	\ttest5\u0020	
      +# another variant of #8
      +9 =     \ test6\t	    
      +# test UTF-8 encoded property/value
      +10aሴb = c췯d
      +# next property should test unicode escaping at the boundary of parsing buffer
      +# buffer size is expected to be 4096 so add comments to get to this offset
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +################################################################################
      +###############################################################################
      +11 = \uABCD
      diff --git a/python/compare-locales/compare_locales/tests/data/test.properties b/python/compare-locales/compare_locales/tests/data/test.properties
      new file mode 100644
      index 000000000..19cae9702
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/data/test.properties
      @@ -0,0 +1,14 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +1=1
      + 2=2
      +3 =3
      + 4 =4
      +5=5
      +6= 6
      +7=7 
      +8= 8 
      +# this is a comment
      +9=this is the first part of a continued line \
      + and here is the 2nd part
      diff --git a/python/compare-locales/compare_locales/tests/data/triple-license.dtd b/python/compare-locales/compare_locales/tests/data/triple-license.dtd
      new file mode 100644
      index 000000000..4a28b17a6
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/data/triple-license.dtd
      @@ -0,0 +1,38 @@
      +
      +
      +
      diff --git a/python/compare-locales/compare_locales/tests/test_checks.py b/python/compare-locales/compare_locales/tests/test_checks.py
      new file mode 100644
      index 000000000..b995d43f9
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_checks.py
      @@ -0,0 +1,403 @@
      +# -*- coding: utf-8 -*-
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales.checks import getChecker
      +from compare_locales.parser import getParser, Entity
      +from compare_locales.paths import File
      +
      +
      +class BaseHelper(unittest.TestCase):
      +    file = None
      +    refContent = None
      +
      +    def setUp(self):
      +        p = getParser(self.file.file)
      +        p.readContents(self.refContent)
      +        self.refList, self.refMap = p.parse()
      +
      +    def _test(self, content, refWarnOrErrors, with_ref_file=False):
      +        p = getParser(self.file.file)
      +        p.readContents(content)
      +        l10n = [e for e in p]
      +        assert len(l10n) == 1
      +        l10n = l10n[0]
      +        if with_ref_file:
      +            kwargs = {
      +                'reference': self.refList
      +            }
      +        else:
      +            kwargs = {}
      +        checker = getChecker(self.file, **kwargs)
      +        ref = self.refList[self.refMap[l10n.key]]
      +        found = tuple(checker.check(ref, l10n))
      +        self.assertEqual(found, refWarnOrErrors)
      +
      +
      +class TestProperties(BaseHelper):
      +    file = File('foo.properties', 'foo.properties')
      +    refContent = '''some = value
      +'''
      +
      +    def testGood(self):
      +        self._test('''some = localized''',
      +                   tuple())
      +
      +    def testMissedEscape(self):
      +        self._test(r'''some = \u67ood escape, bad \escape''',
      +                   (('warning', 20, r'unknown escape sequence, \e',
      +                     'escape'),))
      +
      +
      +class TestPlurals(BaseHelper):
      +    file = File('foo.properties', 'foo.properties')
      +    refContent = '''\
      +# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
      +# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
      +# #1 number of files
      +# example: 111 files - Downloads
      +downloadsTitleFiles=#1 file - Downloads;#1 files - #2
      +'''
      +
      +    def testGood(self):
      +        self._test('''\
      +# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
      +# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
      +# #1 number of files
      +# example: 111 files - Downloads
      +downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 filers
      +''',
      +                   tuple())
      +
      +    def testNotUsed(self):
      +        self._test('''\
      +# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
      +# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
      +# #1 number of files
      +# example: 111 files - Downloads
      +downloadsTitleFiles=#1 file - Downloads;#1 files - Downloads;#1 filers
      +''',
      +                   (('warning', 0, 'not all variables used in l10n',
      +                     'plural'),))
      +
      +    def testNotDefined(self):
      +        self._test('''\
      +# LOCALIZATION NOTE (downloadsTitleFiles): Semi-colon list of plural forms.
      +# See: http://developer.mozilla.org/en/docs/Localization_and_Plurals
      +# #1 number of files
      +# example: 111 files - Downloads
      +downloadsTitleFiles=#1 file - Downloads;#1 files - #2;#1 #3
      +''',
      +                   (('error', 0, 'unreplaced variables in l10n', 'plural'),))
      +
      +
      +class TestDTDs(BaseHelper):
      +    file = File('foo.dtd', 'foo.dtd')
      +    refContent = '''
      +
      +
      +
      +
      +
      +
      +'''
      +
      +    def testWarning(self):
      +        self._test('''
      +''',
      +                   (('warning', (0, 0), 'Referencing unknown entity `not`',
      +                     'xmlparse'),))
      +        # make sure we only handle translated entity references
      +        self._test(u'''
      +'''.encode('utf-8'),
      +            (('warning', (0, 0), u'Referencing unknown entity `ƞǿŧ`',
      +              'xmlparse'),))
      +
      +    def testErrorFirstLine(self):
      +        self._test(''' stuff">
      +''',
      +                   (('error', (1, 10), 'mismatched tag', 'xmlparse'),))
      +
      +    def testErrorSecondLine(self):
      +        self._test('''
      +stuff">
      +''',
      +                   (('error', (2, 4), 'mismatched tag', 'xmlparse'),))
      +
      +    def testKeyErrorSingleAmpersand(self):
      +        self._test('''
      +''',
      +                   (('error', (1, 1), 'not well-formed (invalid token)',
      +                     'xmlparse'),))
      +
      +    def testXMLEntity(self):
      +        self._test('''
      +''',
      +                   tuple())
      +
      +    def testPercentEntity(self):
      +        self._test('''
      +''',
      +                   tuple())
      +        self._test('''
      +''',
      +                   (('error', (0, 32), 'not well-formed (invalid token)',
      +                     'xmlparse'),))
      +
      +    def testNoNumber(self):
      +        self._test('''''',
      +                   (('warning', 0, 'reference is a number', 'number'),))
      +
      +    def testNoLength(self):
      +        self._test('''''',
      +                   (('error', 0, 'reference is a CSS length', 'css'),))
      +
      +    def testNoStyle(self):
      +        self._test('''''',
      +                   (('error', 0, 'reference is a CSS spec', 'css'),))
      +        self._test('''''',
      +                   (('error', 0, 'reference is a CSS spec', 'css'),))
      +
      +    def testStyleWarnings(self):
      +        self._test('''''',
      +                   (('warning', 0, 'height only in reference', 'css'),))
      +        self._test('''''',
      +                   (('warning', 0, "units for width don't match (em != ch)",
      +                     'css'),))
      +
      +    def testNoWarning(self):
      +        self._test('''''', tuple())
      +        self._test('''''', tuple())
      +        self._test('''''', tuple())
      +
      +
      +class TestEntitiesInDTDs(BaseHelper):
      +    file = File('foo.dtd', 'foo.dtd')
      +    refContent = '''
      +
      +
      +
      +'''
      +
      +    def testOK(self):
      +        self._test('''''', tuple(),
      +                   with_ref_file=True)
      +
      +    def testMismatch(self):
      +        self._test('''''',
      +                   (('warning', (0, 0),
      +                     'Entity brandShortName referenced, '
      +                     'but brandShorterName used in context',
      +                     'xmlparse'),),
      +                   with_ref_file=True)
      +
      +    def testAcross(self):
      +        self._test('''''',
      +                   tuple(),
      +                   with_ref_file=True)
      +
      +    def testAcrossWithMismatch(self):
      +        '''If we could tell that ent.start and ent.end are one string,
      +        we should warn. Sadly, we can't, so this goes without warning.'''
      +        self._test('''''',
      +                   tuple(),
      +                   with_ref_file=True)
      +
      +    def testUnknownWithRef(self):
      +        self._test('''''',
      +                   (('warning',
      +                     (0, 0),
      +                     'Referencing unknown entity `foopy` '
      +                     '(brandShorterName used in context, '
      +                     'brandShortName known)',
      +                     'xmlparse'),),
      +                   with_ref_file=True)
      +
      +    def testUnknown(self):
      +        self._test('''''',
      +                   (('warning',
      +                     (0, 0),
      +                     'Referencing unknown entity `foopy`'
      +                     ' (brandShortName, brandShorterName known)',
      +                     'xmlparse'),),
      +                   with_ref_file=True)
      +
      +
      +class TestAndroid(unittest.TestCase):
      +    """Test Android checker
      +
      +    Make sure we're hitting our extra rules only if
      +    we're passing in a DTD file in the embedding/android module.
      +    """
      +    apos_msg = u"Apostrophes in Android DTDs need escaping with \\' or " + \
      +               u"\\u0027, or use \u2019, or put string in quotes."
      +    quot_msg = u"Quotes in Android DTDs need escaping with \\\" or " + \
      +               u"\\u0022, or put string in apostrophes."
      +
      +    def getEntity(self, v):
      +        return Entity(v, lambda s: s, (0, len(v)), (), (0, 0), (), (),
      +                      (0, len(v)), ())
      +
      +    def getDTDEntity(self, v):
      +        v = v.replace('"', '"')
      +        return Entity('' % v,
      +                      lambda s: s,
      +                      (0, len(v) + 16), (), (0, 0), (), (9, 12),
      +                      (14, len(v) + 14), ())
      +
      +    def test_android_dtd(self):
      +        """Testing the actual android checks. The logic is involved,
      +        so this is a lot of nitty gritty detail tests.
      +        """
      +        f = File("embedding/android/strings.dtd", "strings.dtd",
      +                 "embedding/android")
      +        checker = getChecker(f)
      +        # good string
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # dtd warning
      +        l10n = self.getDTDEntity("plain localized string &ref;")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('warning', (0, 0),
      +                           'Referencing unknown entity `ref`', 'xmlparse'),))
      +        # no report on stray ampersand or quote, if not completely quoted
      +        for i in xrange(3):
      +            # make sure we're catching unescaped apostrophes,
      +            # try 0..5 backticks
      +            l10n = self.getDTDEntity("\\"*(2*i) + "'")
      +            self.assertEqual(tuple(checker.check(ref, l10n)),
      +                             (('error', 2*i, self.apos_msg, 'android'),))
      +            l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
      +            self.assertEqual(tuple(checker.check(ref, l10n)),
      +                             ())
      +            # make sure we don't report if apos string is quoted
      +            l10n = self.getDTDEntity('"' + "\\"*(2*i) + "'\"")
      +            tpl = tuple(checker.check(ref, l10n))
      +            self.assertEqual(tpl, (),
      +                             "`%s` shouldn't fail but got %s"
      +                             % (l10n.val, str(tpl)))
      +            l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
      +            tpl = tuple(checker.check(ref, l10n))
      +            self.assertEqual(tpl, (),
      +                             "`%s` shouldn't fail but got %s"
      +                             % (l10n.val, str(tpl)))
      +            # make sure we're catching unescaped quotes, try 0..5 backticks
      +            l10n = self.getDTDEntity("\\"*(2*i) + "\"")
      +            self.assertEqual(tuple(checker.check(ref, l10n)),
      +                             (('error', 2*i, self.quot_msg, 'android'),))
      +            l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
      +            self.assertEqual(tuple(checker.check(ref, l10n)),
      +                             ())
      +            # make sure we don't report if quote string is single quoted
      +            l10n = self.getDTDEntity("'" + "\\"*(2*i) + "\"'")
      +            tpl = tuple(checker.check(ref, l10n))
      +            self.assertEqual(tpl, (),
      +                             "`%s` shouldn't fail but got %s" %
      +                             (l10n.val, str(tpl)))
      +            l10n = self.getDTDEntity('"' + "\\"*(2*i+1) + "'\"")
      +            tpl = tuple(checker.check(ref, l10n))
      +            self.assertEqual(tpl, (),
      +                             "`%s` shouldn't fail but got %s" %
      +                             (l10n.val, str(tpl)))
      +        # check for mixed quotes and ampersands
      +        l10n = self.getDTDEntity("'\"")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 0, self.apos_msg, 'android'),
      +                          ('error', 1, self.quot_msg, 'android')))
      +        l10n = self.getDTDEntity("''\"'")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 1, self.apos_msg, 'android'),))
      +        l10n = self.getDTDEntity('"\'""')
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 2, self.quot_msg, 'android'),))
      +
      +        # broken unicode escape
      +        l10n = self.getDTDEntity("Some broken \u098 unicode")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 12, 'truncated \\uXXXX escape',
      +                           'android'),))
      +        # broken unicode escape, try to set the error off
      +        l10n = self.getDTDEntity(u"\u9690"*14+"\u006"+"  "+"\u0064")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 14, 'truncated \\uXXXX escape',
      +                           'android'),))
      +
      +    def test_android_prop(self):
      +        f = File("embedding/android/strings.properties", "strings.properties",
      +                 "embedding/android")
      +        checker = getChecker(f)
      +        # good plain string
      +        ref = self.getEntity("plain string")
      +        l10n = self.getEntity("plain localized string")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # no dtd warning
      +        ref = self.getEntity("plain string")
      +        l10n = self.getEntity("plain localized string &ref;")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # no report on stray ampersand
      +        ref = self.getEntity("plain string")
      +        l10n = self.getEntity("plain localized string with apos: '")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # report on bad printf
      +        ref = self.getEntity("string with %s")
      +        l10n = self.getEntity("string with %S")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('error', 0, 'argument 1 `S` should be `s`',
      +                           'printf'),))
      +
      +    def test_non_android_dtd(self):
      +        f = File("browser/strings.dtd", "strings.dtd", "browser")
      +        checker = getChecker(f)
      +        # good string
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # dtd warning
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string &ref;")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('warning', (0, 0),
      +                          'Referencing unknown entity `ref`', 'xmlparse'),))
      +        # no report on stray ampersand
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string with apos: '")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +
      +    def test_entities_across_dtd(self):
      +        f = File("browser/strings.dtd", "strings.dtd", "browser")
      +        p = getParser(f.file)
      +        p.readContents('')
      +        ref = p.parse()
      +        checker = getChecker(f, reference=ref[0])
      +        # good string
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +        # dtd warning
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string &ref;")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         (('warning', (0, 0),
      +                           'Referencing unknown entity `ref` (good.ref known)',
      +                           'xmlparse'),))
      +        # no report on stray ampersand
      +        ref = self.getDTDEntity("plain string")
      +        l10n = self.getDTDEntity("plain localized string with &good.ref;")
      +        self.assertEqual(tuple(checker.check(ref, l10n)),
      +                         ())
      +
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/compare-locales/compare_locales/tests/test_compare.py b/python/compare-locales/compare_locales/tests/test_compare.py
      new file mode 100644
      index 000000000..51ba7cd8c
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_compare.py
      @@ -0,0 +1,90 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales import compare
      +
      +
      +class TestTree(unittest.TestCase):
      +    '''Test the Tree utility class
      +
      +    Tree value classes need to be in-place editable
      +    '''
      +
      +    def test_empty_dict(self):
      +        tree = compare.Tree(dict)
      +        self.assertEqual(list(tree.getContent()), [])
      +        self.assertDictEqual(
      +            tree.toJSON(),
      +            {}
      +        )
      +
      +    def test_disjoint_dict(self):
      +        tree = compare.Tree(dict)
      +        tree['one/entry']['leaf'] = 1
      +        tree['two/other']['leaf'] = 2
      +        self.assertEqual(
      +            list(tree.getContent()),
      +            [
      +                (0, 'key', ('one', 'entry')),
      +                (1, 'value', {'leaf': 1}),
      +                (0, 'key', ('two', 'other')),
      +                (1, 'value', {'leaf': 2})
      +            ]
      +        )
      +        self.assertDictEqual(
      +            tree.toJSON(),
      +            {
      +                'children': [
      +                    ('one/entry',
      +                     {'value': {'leaf': 1}}
      +                     ),
      +                    ('two/other',
      +                     {'value': {'leaf': 2}}
      +                     )
      +                ]
      +            }
      +        )
      +        self.assertMultiLineEqual(
      +            str(tree),
      +            '''\
      +one/entry
      +    {'leaf': 1}
      +two/other
      +    {'leaf': 2}\
      +'''
      +        )
      +
      +    def test_overlapping_dict(self):
      +        tree = compare.Tree(dict)
      +        tree['one/entry']['leaf'] = 1
      +        tree['one/other']['leaf'] = 2
      +        self.assertEqual(
      +            list(tree.getContent()),
      +            [
      +                (0, 'key', ('one',)),
      +                (1, 'key', ('entry',)),
      +                (2, 'value', {'leaf': 1}),
      +                (1, 'key', ('other',)),
      +                (2, 'value', {'leaf': 2})
      +            ]
      +        )
      +        self.assertDictEqual(
      +            tree.toJSON(),
      +            {
      +                'children': [
      +                    ('one', {
      +                        'children': [
      +                            ('entry',
      +                             {'value': {'leaf': 1}}
      +                             ),
      +                            ('other',
      +                             {'value': {'leaf': 2}}
      +                             )
      +                        ]
      +                    })
      +                ]
      +            }
      +        )
      diff --git a/python/compare-locales/compare_locales/tests/test_dtd.py b/python/compare-locales/compare_locales/tests/test_dtd.py
      new file mode 100644
      index 000000000..87ddcde30
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_dtd.py
      @@ -0,0 +1,86 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +'''Tests for the DTD parser.
      +'''
      +
      +import unittest
      +import re
      +
      +from compare_locales.parser import getParser
      +from compare_locales.tests import ParserTestMixin
      +
      +
      +class TestDTD(ParserTestMixin, unittest.TestCase):
      +    '''Tests for the DTD Parser.'''
      +    filename = 'foo.dtd'
      +
      +    def test_one_entity(self):
      +        self._test('''''',
      +                   (('foo.label', 'stuff'),))
      +
      +    quoteContent = '''
      +
      +
      +
      +
      +
      +
      +'''
      +    quoteRef = (
      +        ('good.one', 'one'),
      +        ('_junk_\\d_25-56$', ''),
      +        ('good.two', 'two'),
      +        ('_junk_\\d_82-119$', ''),
      +        ('good.three', 'three'),
      +        ('good.four', 'good \' quote'),
      +        ('good.five', 'good \'quoted\' word'),)
      +
      +    def test_quotes(self):
      +        self._test(self.quoteContent, self.quoteRef)
      +
      +    def test_apos(self):
      +        qr = re.compile('[\'"]', re.M)
      +
      +        def quot2apos(s):
      +            return qr.sub(lambda m: m.group(0) == '"' and "'" or '"', s)
      +
      +        self._test(quot2apos(self.quoteContent),
      +                   map(lambda t: (t[0], quot2apos(t[1])), self.quoteRef))
      +
      +    def test_parsed_ref(self):
      +        self._test('''
      +  %fooDTD;
      +''',
      +                   (('fooDTD', '"chrome://brand.dtd"'),))
      +
      +    def test_trailing_comment(self):
      +        self._test('''
      +
      +
      +''',
      +                   (('first', 'string'), ('second', 'string')))
      +
      +    def test_license_header(self):
      +        p = getParser('foo.dtd')
      +        p.readContents(self.resource('triple-license.dtd'))
      +        for e in p:
      +            self.assertEqual(e.key, 'foo')
      +            self.assertEqual(e.val, 'value')
      +        self.assert_('MPL' in p.header)
      +        p.readContents('''\
      +
      +
      +''')
      +        for e in p:
      +            self.assertEqual(e.key, 'foo')
      +            self.assertEqual(e.val, 'value')
      +        self.assert_('MPL' in p.header)
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/compare-locales/compare_locales/tests/test_ini.py b/python/compare-locales/compare_locales/tests/test_ini.py
      new file mode 100644
      index 000000000..4c8cc03e1
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_ini.py
      @@ -0,0 +1,115 @@
      +# -*- coding: utf-8 -*-
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales.tests import ParserTestMixin
      +
      +
      +mpl2 = '''\
      +; This Source Code Form is subject to the terms of the Mozilla Public
      +; License, v. 2.0. If a copy of the MPL was not distributed with this file,
      +; You can obtain one at http://mozilla.org/MPL/2.0/.
      +'''
      +
      +
      +class TestIniParser(ParserTestMixin, unittest.TestCase):
      +
      +    filename = 'foo.ini'
      +
      +    def testSimpleHeader(self):
      +        self._test('''; This file is in the UTF-8 encoding
      +[Strings]
      +TitleText=Some Title
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('UTF-8' in self.parser.header)
      +
      +    def testMPL2_Space_UTF(self):
      +        self._test(mpl2 + '''
      +; This file is in the UTF-8 encoding
      +[Strings]
      +TitleText=Some Title
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def testMPL2_Space(self):
      +        self._test(mpl2 + '''
      +[Strings]
      +TitleText=Some Title
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def testMPL2_MultiSpace(self):
      +        self._test(mpl2 + '''\
      +
      +; more comments
      +
      +[Strings]
      +TitleText=Some Title
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def testMPL2_JunkBeforeCategory(self):
      +        self._test(mpl2 + '''\
      +Junk
      +[Strings]
      +TitleText=Some Title
      +''', (('_junk_\\d+_0-213$', mpl2 + '''\
      +Junk
      +[Strings]'''), ('TitleText', 'Some Title')))
      +        self.assert_('MPL' not in self.parser.header)
      +
      +    def test_TrailingComment(self):
      +        self._test(mpl2 + '''
      +[Strings]
      +TitleText=Some Title
      +;Stray trailing comment
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def test_SpacedTrailingComments(self):
      +        self._test(mpl2 + '''
      +[Strings]
      +TitleText=Some Title
      +
      +;Stray trailing comment
      +;Second stray comment
      +
      +''', (('TitleText', 'Some Title'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def test_TrailingCommentsAndJunk(self):
      +        self._test(mpl2 + '''
      +[Strings]
      +TitleText=Some Title
      +
      +;Stray trailing comment
      +Junk
      +;Second stray comment
      +
      +''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-284$', '''\
      +
      +;Stray trailing comment
      +Junk
      +;Second stray comment
      +
      +''')))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def test_JunkInbetweenEntries(self):
      +        self._test(mpl2 + '''
      +[Strings]
      +TitleText=Some Title
      +
      +Junk
      +
      +Good=other string
      +''', (('TitleText', 'Some Title'), ('_junk_\\d+_231-236$', '''\
      +
      +Junk'''), ('Good', 'other string')))
      +        self.assert_('MPL' in self.parser.header)
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/compare-locales/compare_locales/tests/test_merge.py b/python/compare-locales/compare_locales/tests/test_merge.py
      new file mode 100644
      index 000000000..c006edbb5
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_merge.py
      @@ -0,0 +1,265 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +import os
      +from tempfile import mkdtemp
      +import shutil
      +
      +from compare_locales.parser import getParser
      +from compare_locales.paths import File
      +from compare_locales.compare import ContentComparer
      +
      +
      +class ContentMixin(object):
      +    maxDiff = None  # we got big dictionaries to compare
      +    extension = None  # OVERLOAD
      +
      +    def reference(self, content):
      +        self.ref = os.path.join(self.tmp, "en-reference" + self.extension)
      +        open(self.ref, "w").write(content)
      +
      +    def localized(self, content):
      +        self.l10n = os.path.join(self.tmp, "l10n" + self.extension)
      +        open(self.l10n, "w").write(content)
      +
      +
      +class TestProperties(unittest.TestCase, ContentMixin):
      +    extension = '.properties'
      +
      +    def setUp(self):
      +        self.tmp = mkdtemp()
      +        os.mkdir(os.path.join(self.tmp, "merge"))
      +
      +    def tearDown(self):
      +        shutil.rmtree(self.tmp)
      +        del self.tmp
      +
      +    def testGood(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""foo = fooVal
      +bar = barVal
      +eff = effVal""")
      +        self.localized("""foo = lFoo
      +bar = lBar
      +eff = lEff
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.properties", ""),
      +                   File(self.l10n, "l10n.properties", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 3
      +                }},
      +             'details': {}
      +             }
      +        )
      +        self.assert_(not os.path.exists(os.path.join(cc.merge_stage,
      +                                                     'l10n.properties')))
      +
      +    def testMissing(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""foo = fooVal
      +bar = barVal
      +eff = effVal""")
      +        self.localized("""bar = lBar
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.properties", ""),
      +                   File(self.l10n, "l10n.properties", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 1, 'missing': 2
      +                }},
      +             'details': {
      +                 'children': [
      +                     ('l10n.properties',
      +                         {'value': {'missingEntity': [u'eff', u'foo']}}
      +                      )
      +                 ]}
      +             }
      +        )
      +        mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
      +        self.assertTrue(os.path.isfile(mergefile))
      +        p = getParser(mergefile)
      +        p.readFile(mergefile)
      +        [m, n] = p.parse()
      +        self.assertEqual(map(lambda e: e.key,  m), ["bar", "eff", "foo"])
      +
      +    def testError(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""foo = fooVal
      +bar = %d barVal
      +eff = effVal""")
      +        self.localized("""bar = %S lBar
      +eff = leffVal
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.properties", ""),
      +                   File(self.l10n, "l10n.properties", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 2, 'errors': 1, 'missing': 1
      +                }},
      +             'details': {
      +                 'children': [
      +                     ('l10n.properties',
      +                         {'value': {
      +                          'error': [u'argument 1 `S` should be `d` '
      +                                    u'at line 1, column 6 for bar'],
      +                          'missingEntity': [u'foo']}}
      +                      )
      +                 ]}
      +             }
      +        )
      +        mergefile = os.path.join(self.tmp, "merge", "l10n.properties")
      +        self.assertTrue(os.path.isfile(mergefile))
      +        p = getParser(mergefile)
      +        p.readFile(mergefile)
      +        [m, n] = p.parse()
      +        self.assertEqual([e.key for e in m], ["eff", "foo", "bar"])
      +        self.assertEqual(m[n['bar']].val, '%d barVal')
      +
      +    def testObsolete(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""foo = fooVal
      +eff = effVal""")
      +        self.localized("""foo = fooVal
      +other = obsolete
      +eff = leffVal
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.properties", ""),
      +                   File(self.l10n, "l10n.properties", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 1, 'obsolete': 1, 'unchanged': 1
      +                }},
      +             'details': {
      +                 'children': [
      +                     ('l10n.properties',
      +                         {'value': {'obsoleteEntity': [u'other']}})]},
      +             }
      +        )
      +
      +
      +class TestDTD(unittest.TestCase, ContentMixin):
      +    extension = '.dtd'
      +
      +    def setUp(self):
      +        self.tmp = mkdtemp()
      +        os.mkdir(os.path.join(self.tmp, "merge"))
      +
      +    def tearDown(self):
      +        shutil.rmtree(self.tmp)
      +        del self.tmp
      +
      +    def testGood(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""
      +
      +""")
      +        self.localized("""
      +
      +
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.dtd", ""),
      +                   File(self.l10n, "l10n.dtd", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 3
      +                }},
      +             'details': {}
      +             }
      +        )
      +        self.assert_(
      +            not os.path.exists(os.path.join(cc.merge_stage, 'l10n.dtd')))
      +
      +    def testMissing(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""
      +
      +""")
      +        self.localized("""
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.dtd", ""),
      +                   File(self.l10n, "l10n.dtd", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'changed': 1, 'missing': 2
      +                }},
      +             'details': {
      +                 'children': [
      +                     ('l10n.dtd',
      +                         {'value': {'missingEntity': [u'eff', u'foo']}}
      +                      )
      +                 ]}
      +             }
      +        )
      +        mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
      +        self.assertTrue(os.path.isfile(mergefile))
      +        p = getParser(mergefile)
      +        p.readFile(mergefile)
      +        [m, n] = p.parse()
      +        self.assertEqual(map(lambda e: e.key,  m), ["bar", "eff", "foo"])
      +
      +    def testJunk(self):
      +        self.assertTrue(os.path.isdir(self.tmp))
      +        self.reference("""
      +
      +""")
      +        self.localized("""
      +
      +
      +""")
      +        cc = ContentComparer()
      +        cc.set_merge_stage(os.path.join(self.tmp, "merge"))
      +        cc.compare(File(self.ref, "en-reference.dtd", ""),
      +                   File(self.l10n, "l10n.dtd", ""))
      +        self.assertDictEqual(
      +            cc.observer.toJSON(),
      +            {'summary':
      +                {None: {
      +                    'errors': 1, 'missing': 1, 'unchanged': 2
      +                }},
      +             'details': {
      +                 'children': [
      +                     ('l10n.dtd',
      +                         {'value': {
      +                             'error': [u'Unparsed content "" at 23-44'],
      +                             'missingEntity': [u'bar']}}
      +                      )
      +                 ]}
      +             }
      +        )
      +        mergefile = os.path.join(self.tmp, "merge", "l10n.dtd")
      +        self.assertTrue(os.path.isfile(mergefile))
      +        p = getParser(mergefile)
      +        p.readFile(mergefile)
      +        [m, n] = p.parse()
      +        self.assertEqual(map(lambda e: e.key,  m), ["foo", "eff", "bar"])
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/compare-locales/compare_locales/tests/test_properties.py b/python/compare-locales/compare_locales/tests/test_properties.py
      new file mode 100644
      index 000000000..331a1a57c
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_properties.py
      @@ -0,0 +1,95 @@
      +# -*- coding: utf-8 -*-
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales.tests import ParserTestMixin
      +
      +
      +class TestPropertiesParser(ParserTestMixin, unittest.TestCase):
      +
      +    filename = 'foo.properties'
      +
      +    def testBackslashes(self):
      +        self._test(r'''one_line = This is one line
      +two_line = This is the first \
      +of two lines
      +one_line_trailing = This line ends in \\
      +and has junk
      +two_lines_triple = This line is one of two and ends in \\\
      +and still has another line coming
      +''', (
      +            ('one_line', 'This is one line'),
      +            ('two_line', u'This is the first of two lines'),
      +            ('one_line_trailing', u'This line ends in \\'),
      +            ('_junk_\\d+_113-126$', 'and has junk\n'),
      +            ('two_lines_triple', 'This line is one of two and ends in \\'
      +             'and still has another line coming')))
      +
      +    def testProperties(self):
      +        # port of netwerk/test/PropertiesTest.cpp
      +        self.parser.readContents(self.resource('test.properties'))
      +        ref = ['1', '2', '3', '4', '5', '6', '7', '8',
      +               'this is the first part of a continued line '
      +               'and here is the 2nd part']
      +        i = iter(self.parser)
      +        for r, e in zip(ref, i):
      +            self.assertEqual(e.val, r)
      +
      +    def test_bug121341(self):
      +        # port of xpcom/tests/unit/test_bug121341.js
      +        self.parser.readContents(self.resource('bug121341.properties'))
      +        ref = ['abc', 'xy', u"\u1234\t\r\n\u00AB\u0001\n",
      +               "this is multiline property",
      +               "this is another multiline property", u"test\u0036",
      +               "yet another multiline propery", u"\ttest5\u0020", " test6\t",
      +               u"c\uCDEFd", u"\uABCD"]
      +        i = iter(self.parser)
      +        for r, e in zip(ref, i):
      +            self.assertEqual(e.val, r)
      +
      +    def test_comment_in_multi(self):
      +        self._test(r'''bar=one line with a \
      +# part that looks like a comment \
      +and an end''', (('bar', 'one line with a # part that looks like a comment '
      +                'and an end'),))
      +
      +    def test_license_header(self):
      +        self._test('''\
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +foo=value
      +''', (('foo', 'value'),))
      +        self.assert_('MPL' in self.parser.header)
      +
      +    def test_escapes(self):
      +        self.parser.readContents(r'''
      +# unicode escapes
      +zero = some \unicode
      +one = \u0
      +two = \u41
      +three = \u042
      +four = \u0043
      +five = \u0044a
      +six = \a
      +seven = \n\r\t\\
      +''')
      +        ref = ['some unicode', chr(0), 'A', 'B', 'C', 'Da', 'a', '\n\r\t\\']
      +        for r, e in zip(ref, self.parser):
      +            self.assertEqual(e.val, r)
      +
      +    def test_trailing_comment(self):
      +        self._test('''first = string
      +second = string
      +
      +#
      +#commented out
      +''', (('first', 'string'), ('second', 'string')))
      +
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/compare-locales/compare_locales/tests/test_util.py b/python/compare-locales/compare_locales/tests/test_util.py
      new file mode 100644
      index 000000000..fd2d2c92b
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_util.py
      @@ -0,0 +1,29 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales import util
      +
      +
      +class ParseLocalesTest(unittest.TestCase):
      +    def test_empty(self):
      +        self.assertEquals(util.parseLocales(''), [])
      +
      +    def test_all(self):
      +        self.assertEquals(util.parseLocales('''af
      +de'''), ['af', 'de'])
      +
      +    def test_shipped(self):
      +        self.assertEquals(util.parseLocales('''af
      +ja win mac
      +de'''), ['af', 'de', 'ja'])
      +
      +    def test_sparse(self):
      +        self.assertEquals(util.parseLocales('''
      +af
      +
      +de
      +
      +'''), ['af', 'de'])
      diff --git a/python/compare-locales/compare_locales/tests/test_webapps.py b/python/compare-locales/compare_locales/tests/test_webapps.py
      new file mode 100644
      index 000000000..2f1223649
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/tests/test_webapps.py
      @@ -0,0 +1,41 @@
      +# -*- coding: utf-8 -*-
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import unittest
      +
      +from compare_locales import webapps
      +
      +
      +class TestFileComparison(unittest.TestCase):
      +
      +    def mock_FileComparison(self, mock_listdir):
      +        class Target(webapps.FileComparison):
      +            def _listdir(self):
      +                return mock_listdir()
      +        return Target('.', 'en-US')
      +
      +    def test_just_reference(self):
      +        def _listdir():
      +            return ['my_app.en-US.properties']
      +        filecomp = self.mock_FileComparison(_listdir)
      +        filecomp.files()
      +        self.assertEqual(filecomp.locales(), [])
      +        self.assertEqual(filecomp._reference.keys(), ['my_app'])
      +        file_ = filecomp._reference['my_app']
      +        self.assertEqual(file_.file, 'locales/my_app.en-US.properties')
      +
      +    def test_just_locales(self):
      +        def _listdir():
      +            return ['my_app.ar.properties',
      +                    'my_app.sr-Latn.properties',
      +                    'my_app.sv-SE.properties',
      +                    'my_app.po_SI.properties']
      +        filecomp = self.mock_FileComparison(_listdir)
      +        filecomp.files()
      +        self.assertEqual(filecomp.locales(),
      +                         ['ar', 'sr-Latn', 'sv-SE'])
      +        self.assertEqual(filecomp._files['ar'].keys(), ['my_app'])
      +        file_ = filecomp._files['ar']['my_app']
      +        self.assertEqual(file_.file, 'locales/my_app.ar.properties')
      diff --git a/python/compare-locales/compare_locales/util.py b/python/compare-locales/compare_locales/util.py
      new file mode 100644
      index 000000000..71eadd874
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/util.py
      @@ -0,0 +1,11 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +# This file is shared between compare-locales and locale-inspector
      +# test_util is in compare-locales only, for the sake of easy
      +# development.
      +
      +
      +def parseLocales(content):
      +    return sorted(l.split()[0] for l in content.splitlines() if l)
      diff --git a/python/compare-locales/compare_locales/webapps.py b/python/compare-locales/compare_locales/webapps.py
      new file mode 100644
      index 000000000..42f5b5657
      --- /dev/null
      +++ b/python/compare-locales/compare_locales/webapps.py
      @@ -0,0 +1,235 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +'''gaia-style web apps support
      +
      +This variant supports manifest.webapp localization as well as
      +.properties files with a naming scheme of locales/foo.*.properties.
      +'''
      +
      +from collections import defaultdict
      +import json
      +import os
      +import os.path
      +import re
      +
      +from compare_locales.paths import File, EnumerateDir
      +from compare_locales.compare import AddRemove, ContentComparer
      +
      +
      +class WebAppCompare(object):
      +    '''For a given directory, analyze
      +    /manifest.webapp
      +    /locales/*.*.properties
      +
      +    Deduce the present locale codes.
      +    '''
      +    ignore_dirs = EnumerateDir.ignore_dirs
      +    reference_locale = 'en-US'
      +
      +    def __init__(self, basedir):
      +        '''Constructor
      +        :param basedir: Directory of the web app to inspect
      +        '''
      +        self.basedir = basedir
      +        self.manifest = Manifest(basedir, self.reference_locale)
      +        self.files = FileComparison(basedir, self.reference_locale)
      +        self.watcher = None
      +
      +    def compare(self, locales):
      +        '''Compare the manifest.webapp and the locales/*.*.properties
      +        '''
      +        if not locales:
      +            locales = self.locales()
      +        self.manifest.compare(locales)
      +        self.files.compare(locales)
      +
      +    def setWatcher(self, watcher):
      +        self.watcher = watcher
      +        self.manifest.watcher = watcher
      +        self.files.watcher = watcher
      +
      +    def locales(self):
      +        '''Inspect files on disk to find present languages.
      +        :rtype: List of locales, sorted, including reference.
      +        '''
      +        locales = set(self.manifest.strings.keys())
      +        locales.update(self.files.locales())
      +        locales = list(sorted(locales))
      +        return locales
      +
      +
      +class Manifest(object):
      +    '''Class that helps with parsing and inspection of manifest.webapp.
      +    '''
      +
      +    def __init__(self, basedir, reference_locale):
      +        self.file = File(os.path.join(basedir, 'manifest.webapp'),
      +                         'manifest.webapp')
      +        self.reference_locale = reference_locale
      +        self._strings = None
      +        self.watcher = None
      +
      +    @property
      +    def strings(self):
      +        if self._strings is None:
      +            self._strings = self.load_and_parse()
      +        return self._strings
      +
      +    def load_and_parse(self):
      +        try:
      +            manifest = json.load(open(self.file.fullpath))
      +        except (ValueError, IOError), e:
      +            if self.watcher:
      +                self.watcher.notify('error', self.file, str(e))
      +            return False
      +        return self.extract_manifest_strings(manifest)
      +
      +    def extract_manifest_strings(self, manifest_fragment):
      +        '''Extract localizable strings from a manifest dict.
      +        This method is recursive, and returns a two-level dict,
      +        first level being locale codes, second level being generated
      +        key and localized value. Keys are generated by concatenating
      +        each level in the json with a ".".
      +        '''
      +        rv = defaultdict(dict)
      +        localizable = manifest_fragment.pop('locales', {})
      +        if localizable:
      +            for locale, keyvalue in localizable.iteritems():
      +                for key, value in keyvalue.iteritems():
      +                    key = '.'.join(['locales', 'AB_CD', key])
      +                    rv[locale][key] = value
      +        for key, sub_manifest in manifest_fragment.iteritems():
      +            if not isinstance(sub_manifest, dict):
      +                continue
      +            subdict = self.extract_manifest_strings(sub_manifest)
      +            if subdict:
      +                for locale, keyvalue in subdict:
      +                    rv[locale].update((key + '.' + subkey, value)
      +                                      for subkey, value
      +                                      in keyvalue.iteritems())
      +        return rv
      +
      +    def compare(self, locales):
      +        strings = self.strings
      +        if not strings:
      +            return
      +        # create a copy so that we can mock around with it
      +        strings = strings.copy()
      +        reference = strings.pop(self.reference_locale)
      +        for locale in locales:
      +            if locale == self.reference_locale:
      +                continue
      +            self.compare_strings(reference,
      +                                 strings.get(locale, {}),
      +                                 locale)
      +
      +    def compare_strings(self, reference, l10n, locale):
      +        add_remove = AddRemove()
      +        add_remove.set_left(sorted(reference.keys()))
      +        add_remove.set_right(sorted(l10n.keys()))
      +        missing = obsolete = changed = unchanged = 0
      +        for op, item_or_pair in add_remove:
      +            if op == 'equal':
      +                if reference[item_or_pair[0]] == l10n[item_or_pair[1]]:
      +                    unchanged += 1
      +                else:
      +                    changed += 1
      +            else:
      +                key = item_or_pair.replace('.AB_CD.',
      +                                           '.%s.' % locale)
      +                if op == 'add':
      +                    # obsolete entry
      +                    obsolete += 1
      +                    self.watcher.notify('obsoleteEntity', self.file, key)
      +                else:
      +                    # missing entry
      +                    missing += 1
      +                    self.watcher.notify('missingEntity', self.file, key)
      +
      +
      +class FileComparison(object):
      +    '''Compare the locales/*.*.properties files inside a webapp.
      +    '''
      +    prop = re.compile('(?P.*)\\.'
      +                      '(?P[a-zA-Z]+(?:-[a-zA-Z]+)*)'
      +                      '\\.properties$')
      +
      +    def __init__(self, basedir, reference_locale):
      +        self.basedir = basedir
      +        self.reference_locale = reference_locale
      +        self.watcher = None
      +        self._reference = self._files = None
      +
      +    def locales(self):
      +        '''Get the locales present in the webapp
      +        '''
      +        self.files()
      +        locales = self._files.keys()
      +        locales.sort()
      +        return locales
      +
      +    def compare(self, locales):
      +        self.files()
      +        for locale in locales:
      +            l10n = self._files[locale]
      +            filecmp = AddRemove()
      +            filecmp.set_left(sorted(self._reference.keys()))
      +            filecmp.set_right(sorted(l10n.keys()))
      +            for op, item_or_pair in filecmp:
      +                if op == 'equal':
      +                    self.watcher.compare(self._reference[item_or_pair[0]],
      +                                         l10n[item_or_pair[1]])
      +                elif op == 'add':
      +                    # obsolete file
      +                    self.watcher.remove(l10n[item_or_pair])
      +                else:
      +                    # missing file
      +                    _path = '.'.join([item_or_pair, locale, 'properties'])
      +                    missingFile = File(
      +                        os.path.join(self.basedir, 'locales', _path),
      +                        'locales/' + _path)
      +                    self.watcher.add(self._reference[item_or_pair],
      +                                     missingFile)
      +
      +    def files(self):
      +        '''Read the list of locales from disk.
      +        '''
      +        if self._reference:
      +            return
      +        self._reference = {}
      +        self._files = defaultdict(dict)
      +        path_list = self._listdir()
      +        for path in path_list:
      +            match = self.prop.match(path)
      +            if match is None:
      +                continue
      +            locale = match.group('locale')
      +            if locale == self.reference_locale:
      +                target = self._reference
      +            else:
      +                target = self._files[locale]
      +            fullpath = os.path.join(self.basedir, 'locales', path)
      +            target[match.group('base')] = File(fullpath, 'locales/' + path)
      +
      +    def _listdir(self):
      +        'Monkey-patch this for testing.'
      +        return os.listdir(os.path.join(self.basedir, 'locales'))
      +
      +
      +def compare_web_app(basedir, locales, other_observer=None):
      +    '''Compare gaia-style web app.
      +
      +    Optional arguments are:
      +    - other_observer. A object implementing
      +        notify(category, _file, data)
      +      The return values of that callback are ignored.
      +    '''
      +    comparer = ContentComparer()
      +    if other_observer is not None:
      +        comparer.add_observer(other_observer)
      +    webapp_comp = WebAppCompare(basedir)
      +    webapp_comp.setWatcher(comparer)
      +    webapp_comp.compare(locales)
      +    return comparer.observer
      diff --git a/python/compare-locales/docs/glossary.rst b/python/compare-locales/docs/glossary.rst
      new file mode 100644
      index 000000000..e89839b16
      --- /dev/null
      +++ b/python/compare-locales/docs/glossary.rst
      @@ -0,0 +1,26 @@
      +========
      +Glossary
      +========
      +
      +.. glossary::
      +    :sorted:
      +
      +    Localization
      +        The process of creating content in a native language, including
      +        translation, but also customizations like Search.
      +
      +    Localizability
      +        Enabling a piece of software to be localized. This is mostly
      +        externalizing English strings, and writing build support to 
      +        pick up localized search engines etc.
      +
      +    L10n
      +        *Numeronym* for Localization, *L*, 10 chars, *n*
      +
      +    L12y
      +        Numeronym for Localizability
      +
      +    l10n-merge
      +        nick-name for the process of merging ``en-US`` and a particular
      +        localization into one joint artifact without any missing strings, and
      +        without technical errors, as far as possible.
      diff --git a/python/compare-locales/docs/index.rst b/python/compare-locales/docs/index.rst
      new file mode 100644
      index 000000000..925ca0f88
      --- /dev/null
      +++ b/python/compare-locales/docs/index.rst
      @@ -0,0 +1,191 @@
      +============
      +Localization
      +============
      +
      +.. toctree::
      +   :maxdepth: 1
      +
      +   glossary
      +
      +The documentation here is targeted at developers, writing localizable code
      +for Firefox and Firefox for Android, as well as Thunderbird and SeaMonkey.
      +
      +If you haven't dealt with localization in gecko code before, it's a good
      +idea to check the :doc:`./glossary` for what localization is, and which terms
      +we use for what.
      +
      +Exposing strings
      +----------------
      +
      +Localizers only handle a few file formats in well-known locations in the
      +source tree.
      +
      +The locations are in directories like
      +
      +    :file:`browser/`\ ``locales/en-US/``\ :file:`subdir/file.ext`
      +
      +The first thing to note is that only files beneath :file:`locales/en-US` are
      +exposed to localizers. The second thing to note is that only a few directories
      +are exposed. Which directories are exposed is defined in files called
      +``l10n.ini``, which are at a
      +`few places `_
      +in the source code.
      +
      +An example looks like this
      +
      +.. code-block:: ini
      +
      +    [general]
      +    depth = ../..
      +
      +    [compare]
      +    dirs = browser
      +        browser/branding/official
      +
      +    [includes]
      +    toolkit = toolkit/locales/l10n.ini
      +
      +This tells the l10n infrastructure three things: Resolve the paths against the
      +directory two levels up, include files in :file:`browser/locales/en-US` and
      +:file:`browser/branding/official/locales/en-US`, and load more data from
      +:file:`toolkit/locales/l10n.ini`.
      +
      +For projects like Thunderbird and SeaMonkey in ``comm-central``, additional
      +data needs to be provided when including an ``l10n.ini`` from a different
      +repository:
      +
      +.. code-block:: ini
      +
      +    [include_toolkit]
      +    type = hg
      +    mozilla = mozilla-central
      +    repo = http://hg.mozilla.org/
      +    l10n.ini = toolkit/locales/l10n.ini
      +
      +This tells the l10n pieces where to find the repository, and where inside
      +that repository the ``l10n.ini`` file is. This is needed because for local
      +builds, :file:`mail/locales/l10n.ini` references
      +:file:`mozilla/toolkit/locales/l10n.ini`, which is where the comm-central
      +build setup expects toolkit to be.
      +
      +Now that the directories exposed to l10n are known, we can talk about the
      +supported file formats.
      +
      +File formats
      +------------
      +
      +This is just a quick overview, please check the
      +`XUL Tutorial `_
      +for an in-depth tour.
      +
      +The following file formats are known to the l10n tool chains:
      +
      +DTD
      +    Used in XUL and XHTML. Also for Android native strings.
      +Properties
      +    Used from JavaScript and C++. When used from js, also comes with
      +    `plural support `_.
      +ini
      +    Used by the crashreporter and updater, avoid if possible.
      +foo.defines
      +    Used during builds, for example to create file:`install.rdf` for
      +    language packs.
      +
      +Adding new formats involves changing various different tools, and is strongly
      +discouraged.
      +
      +Exceptions
      +----------
      +Generally, anything that exists in ``en-US`` needs a one-to-one mapping in
      +all localizations. There are a few cases where that's not wanted, notably
      +around search settings and spell-checking dictionaries.
      +
      +To enable tools to adjust to those exceptions, there's a python-coded
      +:py:mod:`filter.py`, implementing :py:func:`test`, with the following
      +signature
      +
      +.. code-block:: python
      +
      +    def test(mod, path, entity = None):
      +        if does_not_matter:
      +            return "ignore"
      +        if show_but_do_not_merge:
      +            return "report"
      +        # default behavior, localizer or build need to do something
      +        return "error"
      +
      +For any missing file, this function is called with ``mod`` being
      +the *module*, and ``path`` being the relative path inside
      +:file:`locales/en-US`. The module is the top-level dir as referenced in
      +:file:`l10n.ini`.
      +
      +For missing strings, the :py:data:`entity` parameter is the key of the string
      +in the en-US file.
      +
      +l10n-merge
      +----------
      +
      +Gecko doesn't support fallback from a localization to ``en-US`` at runtime.
      +Thus, the build needs to ensure that the localization as it's built into
      +the package has all required strings, and that the strings don't contain
      +errors. To ensure that, we're *merging* the localization and ``en-US``
      +at build time, nick-named :term:`l10n-merge`.
      +
      +The process is usually triggered via
      +
      +.. code-block:: bash
      +
      +    $obj-dir/browser/locales> make merge-de LOCALE_MERGEDIR=$PWD/merge-de
      +
      +It creates another directory in the object dir, :file:`merge-ab-CD`, in
      +which the modified files are stored. The actual repackaging process looks for
      +the localized files in the merge dir first, then the localized file, and then
      +in ``en-US``. Thus, for the ``de`` localization of
      +:file:`browser/locales/en-US/chrome/browser/browser.dtd`, it checks
      +
      +1. :file:`$objdir/browser/locales/merge-de/browser/chrome/browser/browser.dtd`
      +2. :file:`$(LOCALE_BASEDIR)/de/browser/chrome/browser/browser.dtd`
      +3. :file:`browser/locales/en-US/chrome/browser/browser.dtd`
      +
      +and will include the first of those files it finds.
      +
      +l10n-merge modifies a file if it supports the particular file type, and there
      +are missing strings which are not filtered out, or if an existing string
      +shows an error. See the Checks section below for details.
      +
      +Checks
      +------
      +
      +As part of the build and other localization tool chains, we run a variety
      +of source-based checks. Think of them as linters.
      +
      +The suite of checks is usually determined by file type, i.e., there's a
      +suite of checks for DTD files and one for properties files, etc. An exception
      +are Android-specific checks.
      +
      +Android
      +^^^^^^^
      +
      +For Android, we need to localize :file:`strings.xml`. We're doing so via DTD
      +files, which is mostly OK. But the strings inside the XML file have to
      +satisfy additional constraints about quotes etc, that are not part of XML.
      +There's probably some historic background on why things are the way they are.
      +
      +The Android-specific checks are enabled for DTD files that are in
      +:file:`mobile/android/base/locales/en-US/`.
      +
      +Localizations
      +-------------
      +
      +Now that we talked in-depth about how to expose content to localizers,
      +where are the localizations?
      +
      +We host a mercurial repository per locale and per branch. Most of our
      +localizations only work starting with aurora, so the bulk of the localizations
      +is found on https://hg.mozilla.org/releases/l10n/mozilla-aurora/. We have
      +several localizations continuously working with mozilla-central, those
      +repositories are on https://hg.mozilla.org/l10n-central/.
      +
      +You can search inside our localized files on
      +`Transvision `_ and
      +http://dxr.mozilla.org/l10n-mozilla-aurora/.
      diff --git a/python/compare-locales/mach_commands.py b/python/compare-locales/mach_commands.py
      new file mode 100644
      index 000000000..7be6a50e7
      --- /dev/null
      +++ b/python/compare-locales/mach_commands.py
      @@ -0,0 +1,81 @@
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this,
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +from __future__ import absolute_import, print_function, unicode_literals
      +
      +from mach.decorators import (
      +    CommandArgument,
      +    CommandProvider,
      +    Command,
      +)
      +
      +from mozbuild.base import (
      +    MachCommandBase,
      +)
      +
      +import mozpack.path as mozpath
      +
      +
      +MERGE_HELP = '''Directory to merge to. Will be removed to before running
      +the comparison. Default: $(OBJDIR)/($MOZ_BUILD_APP)/locales/merge-$(AB_CD)
      +'''.lstrip()
      +
      +
      +@CommandProvider
      +class CompareLocales(MachCommandBase):
      +    """Run compare-locales."""
      +
      +    @Command('compare-locales', category='testing',
      +             description='Run source checks on a localization.')
      +    @CommandArgument('--l10n-ini',
      +                     help='l10n.ini describing the app. ' +
      +                     'Default: $(MOZ_BUILD_APP)/locales/l10n.ini')
      +    @CommandArgument('--l10n-base',
      +                     help='Directory with the localizations. ' +
      +                     'Default: $(L10NBASEDIR)')
      +    @CommandArgument('--merge-dir',
      +                     help=MERGE_HELP)
      +    @CommandArgument('locales', nargs='+', metavar='ab_CD',
      +                     help='Locale codes to compare')
      +    def compare(self, l10n_ini=None, l10n_base=None, merge_dir=None,
      +                locales=None):
      +        from compare_locales.paths import EnumerateApp
      +        from compare_locales.compare import compareApp
      +
      +        # check if we're configured and use defaults from there
      +        # otherwise, error early
      +        try:
      +            self.substs  # explicitly check
      +            if not l10n_ini:
      +                l10n_ini = mozpath.join(
      +                    self.topsrcdir,
      +                    self.substs['MOZ_BUILD_APP'],
      +                    'locales', 'l10n.ini'
      +                )
      +            if not l10n_base:
      +                l10n_base = mozpath.join(
      +                    self.topsrcdir,
      +                    self.substs['L10NBASEDIR']
      +                )
      +        except Exception:
      +            if not l10n_ini or not l10n_base:
      +                print('Specify --l10n-ini and --l10n-base or run configure.')
      +                return 1
      +
      +        if not merge_dir:
      +            try:
      +                # self.substs is raising an Exception if we're not configured
      +                # don't merge if we're not
      +                merge_dir = mozpath.join(
      +                    self.topobjdir,
      +                    self.substs['MOZ_BUILD_APP'],
      +                    'locales', 'merge-dir-{ab_CD}'
      +                )
      +            except Exception:
      +                pass
      +
      +        app = EnumerateApp(l10n_ini, l10n_base, locales)
      +        observer = compareApp(app, merge_stage=merge_dir,
      +                              clobber=True)
      +        print(observer.serialize())
      diff --git a/python/compare-locales/moz.build b/python/compare-locales/moz.build
      new file mode 100644
      index 000000000..f772ab620
      --- /dev/null
      +++ b/python/compare-locales/moz.build
      @@ -0,0 +1,16 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +with Files('compare_locales/**'):
      +    BUG_COMPONENT = ('Localization Infrastructure and Tools', 'compare-locales')
      +with Files('docs/**'):
      +    BUG_COMPONENT = ('Mozilla Localizations', 'Documentation')
      +
      +# SPHINX_PYTHON_PACKAGE_DIRS += [
      +#     'compare_locales',
      +# ]
      +
      +SPHINX_TREES['.'] = 'docs'
      diff --git a/python/configobj/PKG-INFO b/python/configobj/PKG-INFO
      new file mode 100644
      index 000000000..71c47b907
      --- /dev/null
      +++ b/python/configobj/PKG-INFO
      @@ -0,0 +1,47 @@
      +Metadata-Version: 1.0
      +Name: configobj
      +Version: 4.7.2
      +Summary: Config file reading, writing and validation.
      +Home-page: http://www.voidspace.org.uk/python/configobj.html
      +Author: Michael Foord & Nicola Larosa
      +Author-email: fuzzyman@voidspace.org.uk
      +License: UNKNOWN
      +Download-URL: http://www.voidspace.org.uk/downloads/configobj-4.7.2.zip
      +Description: **ConfigObj** is a simple but powerful config file reader and writer: an *ini
      +        file round tripper*. Its main feature is that it is very easy to use, with a
      +        straightforward programmer's interface and a simple syntax for config files.
      +        It has lots of other features though :
      +        
      +        * Nested sections (subsections), to any level
      +        * List values
      +        * Multiple line values
      +        * Full Unicode support
      +        * String interpolation (substitution)
      +        * Integrated with a powerful validation system
      +        
      +        - including automatic type checking/conversion
      +        - and allowing default values
      +        - repeated sections
      +        
      +        * All comments in the file are preserved
      +        * The order of keys/sections is preserved
      +        * Powerful ``unrepr`` mode for storing/retrieving Python data-types
      +        
      +        | Release 4.7.2 fixes several bugs in 4.7.1
      +        | Release 4.7.1 fixes a bug with the deprecated options keyword in
      +        | 4.7.0.
      +        | Release 4.7.0 improves performance adds features for validation and
      +        | fixes some bugs.
      +Keywords: config,ini,dictionary,application,admin,sysadmin,configuration,validation
      +Platform: UNKNOWN
      +Classifier: Development Status :: 6 - Mature
      +Classifier: Intended Audience :: Developers
      +Classifier: License :: OSI Approved :: BSD License
      +Classifier: Programming Language :: Python
      +Classifier: Programming Language :: Python :: 2.3
      +Classifier: Programming Language :: Python :: 2.4
      +Classifier: Programming Language :: Python :: 2.5
      +Classifier: Programming Language :: Python :: 2.6
      +Classifier: Operating System :: OS Independent
      +Classifier: Topic :: Software Development :: Libraries
      +Classifier: Topic :: Software Development :: Libraries :: Python Modules
      diff --git a/python/configobj/configobj.py b/python/configobj/configobj.py
      new file mode 100644
      index 000000000..c1f6e6df8
      --- /dev/null
      +++ b/python/configobj/configobj.py
      @@ -0,0 +1,2468 @@
      +# configobj.py
      +# A config file reader/writer that supports nested sections in config files.
      +# Copyright (C) 2005-2010 Michael Foord, Nicola Larosa
      +# E-mail: fuzzyman AT voidspace DOT org DOT uk
      +#         nico AT tekNico DOT net
      +
      +# ConfigObj 4
      +# http://www.voidspace.org.uk/python/configobj.html
      +
      +# Released subject to the BSD License
      +# Please see http://www.voidspace.org.uk/python/license.shtml
      +
      +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
      +# For information about bugfixes, updates and support, please join the
      +# ConfigObj mailing list:
      +# http://lists.sourceforge.net/lists/listinfo/configobj-develop
      +# Comments, suggestions and bug reports welcome.
      +
      +from __future__ import generators
      +
      +import os
      +import re
      +import sys
      +
      +from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
      +
      +
      +# imported lazily to avoid startup performance hit if it isn't used
      +compiler = None
      +
      +# A dictionary mapping BOM to
      +# the encoding to decode with, and what to set the
      +# encoding attribute to.
      +BOMS = {
      +    BOM_UTF8: ('utf_8', None),
      +    BOM_UTF16_BE: ('utf16_be', 'utf_16'),
      +    BOM_UTF16_LE: ('utf16_le', 'utf_16'),
      +    BOM_UTF16: ('utf_16', 'utf_16'),
      +    }
      +# All legal variants of the BOM codecs.
      +# TODO: the list of aliases is not meant to be exhaustive, is there a
      +#   better way ?
      +BOM_LIST = {
      +    'utf_16': 'utf_16',
      +    'u16': 'utf_16',
      +    'utf16': 'utf_16',
      +    'utf-16': 'utf_16',
      +    'utf16_be': 'utf16_be',
      +    'utf_16_be': 'utf16_be',
      +    'utf-16be': 'utf16_be',
      +    'utf16_le': 'utf16_le',
      +    'utf_16_le': 'utf16_le',
      +    'utf-16le': 'utf16_le',
      +    'utf_8': 'utf_8',
      +    'u8': 'utf_8',
      +    'utf': 'utf_8',
      +    'utf8': 'utf_8',
      +    'utf-8': 'utf_8',
      +    }
      +
      +# Map of encodings to the BOM to write.
      +BOM_SET = {
      +    'utf_8': BOM_UTF8,
      +    'utf_16': BOM_UTF16,
      +    'utf16_be': BOM_UTF16_BE,
      +    'utf16_le': BOM_UTF16_LE,
      +    None: BOM_UTF8
      +    }
      +
      +
      +def match_utf8(encoding):
      +    return BOM_LIST.get(encoding.lower()) == 'utf_8'
      +
      +
      +# Quote strings used for writing values
      +squot = "'%s'"
      +dquot = '"%s"'
      +noquot = "%s"
      +wspace_plus = ' \r\n\v\t\'"'
      +tsquot = '"""%s"""'
      +tdquot = "'''%s'''"
      +
      +# Sentinel for use in getattr calls to replace hasattr
      +MISSING = object()
      +
      +__version__ = '4.7.2'
      +
      +try:
      +    any
      +except NameError:
      +    def any(iterable):
      +        for entry in iterable:
      +            if entry:
      +                return True
      +        return False
      +
      +
      +__all__ = (
      +    '__version__',
      +    'DEFAULT_INDENT_TYPE',
      +    'DEFAULT_INTERPOLATION',
      +    'ConfigObjError',
      +    'NestingError',
      +    'ParseError',
      +    'DuplicateError',
      +    'ConfigspecError',
      +    'ConfigObj',
      +    'SimpleVal',
      +    'InterpolationError',
      +    'InterpolationLoopError',
      +    'MissingInterpolationOption',
      +    'RepeatSectionError',
      +    'ReloadError',
      +    'UnreprError',
      +    'UnknownType',
      +    'flatten_errors',
      +    'get_extra_values'
      +)
      +
      +DEFAULT_INTERPOLATION = 'configparser'
      +DEFAULT_INDENT_TYPE = '    '
      +MAX_INTERPOL_DEPTH = 10
      +
      +OPTION_DEFAULTS = {
      +    'interpolation': True,
      +    'raise_errors': False,
      +    'list_values': True,
      +    'create_empty': False,
      +    'file_error': False,
      +    'configspec': None,
      +    'stringify': True,
      +    # option may be set to one of ('', ' ', '\t')
      +    'indent_type': None,
      +    'encoding': None,
      +    'default_encoding': None,
      +    'unrepr': False,
      +    'write_empty_values': False,
      +}
      +
      +
      +
      +def getObj(s):
      +    global compiler
      +    if compiler is None:
      +        import compiler
      +    s = "a=" + s
      +    p = compiler.parse(s)
      +    return p.getChildren()[1].getChildren()[0].getChildren()[1]
      +
      +
      +class UnknownType(Exception):
      +    pass
      +
      +
      +class Builder(object):
      +    
      +    def build(self, o):
      +        m = getattr(self, 'build_' + o.__class__.__name__, None)
      +        if m is None:
      +            raise UnknownType(o.__class__.__name__)
      +        return m(o)
      +    
      +    def build_List(self, o):
      +        return map(self.build, o.getChildren())
      +    
      +    def build_Const(self, o):
      +        return o.value
      +    
      +    def build_Dict(self, o):
      +        d = {}
      +        i = iter(map(self.build, o.getChildren()))
      +        for el in i:
      +            d[el] = i.next()
      +        return d
      +    
      +    def build_Tuple(self, o):
      +        return tuple(self.build_List(o))
      +    
      +    def build_Name(self, o):
      +        if o.name == 'None':
      +            return None
      +        if o.name == 'True':
      +            return True
      +        if o.name == 'False':
      +            return False
      +        
      +        # An undefined Name
      +        raise UnknownType('Undefined Name')
      +    
      +    def build_Add(self, o):
      +        real, imag = map(self.build_Const, o.getChildren())
      +        try:
      +            real = float(real)
      +        except TypeError:
      +            raise UnknownType('Add')
      +        if not isinstance(imag, complex) or imag.real != 0.0:
      +            raise UnknownType('Add')
      +        return real+imag
      +    
      +    def build_Getattr(self, o):
      +        parent = self.build(o.expr)
      +        return getattr(parent, o.attrname)
      +    
      +    def build_UnarySub(self, o):
      +        return -self.build_Const(o.getChildren()[0])
      +    
      +    def build_UnaryAdd(self, o):
      +        return self.build_Const(o.getChildren()[0])
      +
      +
      +_builder = Builder()
      +
      +
      +def unrepr(s):
      +    if not s:
      +        return s
      +    return _builder.build(getObj(s))
      +
      +
      +
      +class ConfigObjError(SyntaxError):
      +    """
      +    This is the base class for all errors that ConfigObj raises.
      +    It is a subclass of SyntaxError.
      +    """
      +    def __init__(self, message='', line_number=None, line=''):
      +        self.line = line
      +        self.line_number = line_number
      +        SyntaxError.__init__(self, message)
      +
      +
      +class NestingError(ConfigObjError):
      +    """
      +    This error indicates a level of nesting that doesn't match.
      +    """
      +
      +
      +class ParseError(ConfigObjError):
      +    """
      +    This error indicates that a line is badly written.
      +    It is neither a valid ``key = value`` line,
      +    nor a valid section marker line.
      +    """
      +
      +
      +class ReloadError(IOError):
      +    """
      +    A 'reload' operation failed.
      +    This exception is a subclass of ``IOError``.
      +    """
      +    def __init__(self):
      +        IOError.__init__(self, 'reload failed, filename is not set.')
      +
      +
      +class DuplicateError(ConfigObjError):
      +    """
      +    The keyword or section specified already exists.
      +    """
      +
      +
      +class ConfigspecError(ConfigObjError):
      +    """
      +    An error occured whilst parsing a configspec.
      +    """
      +
      +
      +class InterpolationError(ConfigObjError):
      +    """Base class for the two interpolation errors."""
      +
      +
      +class InterpolationLoopError(InterpolationError):
      +    """Maximum interpolation depth exceeded in string interpolation."""
      +
      +    def __init__(self, option):
      +        InterpolationError.__init__(
      +            self,
      +            'interpolation loop detected in value "%s".' % option)
      +
      +
      +class RepeatSectionError(ConfigObjError):
      +    """
      +    This error indicates additional sections in a section with a
      +    ``__many__`` (repeated) section.
      +    """
      +
      +
      +class MissingInterpolationOption(InterpolationError):
      +    """A value specified for interpolation was missing."""
      +    def __init__(self, option):
      +        msg = 'missing option "%s" in interpolation.' % option
      +        InterpolationError.__init__(self, msg)
      +
      +
      +class UnreprError(ConfigObjError):
      +    """An error parsing in unrepr mode."""
      +
      +
      +
      +class InterpolationEngine(object):
      +    """
      +    A helper class to help perform string interpolation.
      +
      +    This class is an abstract base class; its descendants perform
      +    the actual work.
      +    """
      +
      +    # compiled regexp to use in self.interpolate()
      +    _KEYCRE = re.compile(r"%\(([^)]*)\)s")
      +    _cookie = '%'
      +
      +    def __init__(self, section):
      +        # the Section instance that "owns" this engine
      +        self.section = section
      +
      +
      +    def interpolate(self, key, value):
      +        # short-cut
      +        if not self._cookie in value:
      +            return value
      +        
      +        def recursive_interpolate(key, value, section, backtrail):
      +            """The function that does the actual work.
      +
      +            ``value``: the string we're trying to interpolate.
      +            ``section``: the section in which that string was found
      +            ``backtrail``: a dict to keep track of where we've been,
      +            to detect and prevent infinite recursion loops
      +
      +            This is similar to a depth-first-search algorithm.
      +            """
      +            # Have we been here already?
      +            if (key, section.name) in backtrail:
      +                # Yes - infinite loop detected
      +                raise InterpolationLoopError(key)
      +            # Place a marker on our backtrail so we won't come back here again
      +            backtrail[(key, section.name)] = 1
      +
      +            # Now start the actual work
      +            match = self._KEYCRE.search(value)
      +            while match:
      +                # The actual parsing of the match is implementation-dependent,
      +                # so delegate to our helper function
      +                k, v, s = self._parse_match(match)
      +                if k is None:
      +                    # That's the signal that no further interpolation is needed
      +                    replacement = v
      +                else:
      +                    # Further interpolation may be needed to obtain final value
      +                    replacement = recursive_interpolate(k, v, s, backtrail)
      +                # Replace the matched string with its final value
      +                start, end = match.span()
      +                value = ''.join((value[:start], replacement, value[end:]))
      +                new_search_start = start + len(replacement)
      +                # Pick up the next interpolation key, if any, for next time
      +                # through the while loop
      +                match = self._KEYCRE.search(value, new_search_start)
      +
      +            # Now safe to come back here again; remove marker from backtrail
      +            del backtrail[(key, section.name)]
      +
      +            return value
      +
      +        # Back in interpolate(), all we have to do is kick off the recursive
      +        # function with appropriate starting values
      +        value = recursive_interpolate(key, value, self.section, {})
      +        return value
      +
      +
      +    def _fetch(self, key):
      +        """Helper function to fetch values from owning section.
      +
      +        Returns a 2-tuple: the value, and the section where it was found.
      +        """
      +        # switch off interpolation before we try and fetch anything !
      +        save_interp = self.section.main.interpolation
      +        self.section.main.interpolation = False
      +
      +        # Start at section that "owns" this InterpolationEngine
      +        current_section = self.section
      +        while True:
      +            # try the current section first
      +            val = current_section.get(key)
      +            if val is not None and not isinstance(val, Section):
      +                break
      +            # try "DEFAULT" next
      +            val = current_section.get('DEFAULT', {}).get(key)
      +            if val is not None and not isinstance(val, Section):
      +                break
      +            # move up to parent and try again
      +            # top-level's parent is itself
      +            if current_section.parent is current_section:
      +                # reached top level, time to give up
      +                break
      +            current_section = current_section.parent
      +
      +        # restore interpolation to previous value before returning
      +        self.section.main.interpolation = save_interp
      +        if val is None:
      +            raise MissingInterpolationOption(key)
      +        return val, current_section
      +
      +
      +    def _parse_match(self, match):
      +        """Implementation-dependent helper function.
      +
      +        Will be passed a match object corresponding to the interpolation
      +        key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
      +        key in the appropriate config file section (using the ``_fetch()``
      +        helper function) and return a 3-tuple: (key, value, section)
      +
      +        ``key`` is the name of the key we're looking for
      +        ``value`` is the value found for that key
      +        ``section`` is a reference to the section where it was found
      +
      +        ``key`` and ``section`` should be None if no further
      +        interpolation should be performed on the resulting value
      +        (e.g., if we interpolated "$$" and returned "$").
      +        """
      +        raise NotImplementedError()
      +    
      +
      +
      +class ConfigParserInterpolation(InterpolationEngine):
      +    """Behaves like ConfigParser."""
      +    _cookie = '%'
      +    _KEYCRE = re.compile(r"%\(([^)]*)\)s")
      +
      +    def _parse_match(self, match):
      +        key = match.group(1)
      +        value, section = self._fetch(key)
      +        return key, value, section
      +
      +
      +
      +class TemplateInterpolation(InterpolationEngine):
      +    """Behaves like string.Template."""
      +    _cookie = '$'
      +    _delimiter = '$'
      +    _KEYCRE = re.compile(r"""
      +        \$(?:
      +          (?P\$)              |   # Two $ signs
      +          (?P[_a-z][_a-z0-9]*)  |   # $name format
      +          {(?P[^}]*)}              # ${name} format
      +        )
      +        """, re.IGNORECASE | re.VERBOSE)
      +
      +    def _parse_match(self, match):
      +        # Valid name (in or out of braces): fetch value from section
      +        key = match.group('named') or match.group('braced')
      +        if key is not None:
      +            value, section = self._fetch(key)
      +            return key, value, section
      +        # Escaped delimiter (e.g., $$): return single delimiter
      +        if match.group('escaped') is not None:
      +            # Return None for key and section to indicate it's time to stop
      +            return None, self._delimiter, None
      +        # Anything else: ignore completely, just return it unchanged
      +        return None, match.group(), None
      +
      +
      +interpolation_engines = {
      +    'configparser': ConfigParserInterpolation,
      +    'template': TemplateInterpolation,
      +}
      +
      +
      +def __newobj__(cls, *args):
      +    # Hack for pickle
      +    return cls.__new__(cls, *args) 
      +
      +class Section(dict):
      +    """
      +    A dictionary-like object that represents a section in a config file.
      +    
      +    It does string interpolation if the 'interpolation' attribute
      +    of the 'main' object is set to True.
      +    
      +    Interpolation is tried first from this object, then from the 'DEFAULT'
      +    section of this object, next from the parent and its 'DEFAULT' section,
      +    and so on until the main object is reached.
      +    
      +    A Section will behave like an ordered dictionary - following the
      +    order of the ``scalars`` and ``sections`` attributes.
      +    You can use this to change the order of members.
      +    
      +    Iteration follows the order: scalars, then sections.
      +    """
      +
      +    
      +    def __setstate__(self, state):
      +        dict.update(self, state[0])
      +        self.__dict__.update(state[1])
      +
      +    def __reduce__(self):
      +        state = (dict(self), self.__dict__)
      +        return (__newobj__, (self.__class__,), state)
      +    
      +    
      +    def __init__(self, parent, depth, main, indict=None, name=None):
      +        """
      +        * parent is the section above
      +        * depth is the depth level of this section
      +        * main is the main ConfigObj
      +        * indict is a dictionary to initialise the section with
      +        """
      +        if indict is None:
      +            indict = {}
      +        dict.__init__(self)
      +        # used for nesting level *and* interpolation
      +        self.parent = parent
      +        # used for the interpolation attribute
      +        self.main = main
      +        # level of nesting depth of this Section
      +        self.depth = depth
      +        # purely for information
      +        self.name = name
      +        #
      +        self._initialise()
      +        # we do this explicitly so that __setitem__ is used properly
      +        # (rather than just passing to ``dict.__init__``)
      +        for entry, value in indict.iteritems():
      +            self[entry] = value
      +            
      +            
      +    def _initialise(self):
      +        # the sequence of scalar values in this Section
      +        self.scalars = []
      +        # the sequence of sections in this Section
      +        self.sections = []
      +        # for comments :-)
      +        self.comments = {}
      +        self.inline_comments = {}
      +        # the configspec
      +        self.configspec = None
      +        # for defaults
      +        self.defaults = []
      +        self.default_values = {}
      +        self.extra_values = []
      +        self._created = False
      +
      +
      +    def _interpolate(self, key, value):
      +        try:
      +            # do we already have an interpolation engine?
      +            engine = self._interpolation_engine
      +        except AttributeError:
      +            # not yet: first time running _interpolate(), so pick the engine
      +            name = self.main.interpolation
      +            if name == True:  # note that "if name:" would be incorrect here
      +                # backwards-compatibility: interpolation=True means use default
      +                name = DEFAULT_INTERPOLATION
      +            name = name.lower()  # so that "Template", "template", etc. all work
      +            class_ = interpolation_engines.get(name, None)
      +            if class_ is None:
      +                # invalid value for self.main.interpolation
      +                self.main.interpolation = False
      +                return value
      +            else:
      +                # save reference to engine so we don't have to do this again
      +                engine = self._interpolation_engine = class_(self)
      +        # let the engine do the actual work
      +        return engine.interpolate(key, value)
      +
      +
      +    def __getitem__(self, key):
      +        """Fetch the item and do string interpolation."""
      +        val = dict.__getitem__(self, key)
      +        if self.main.interpolation: 
      +            if isinstance(val, basestring):
      +                return self._interpolate(key, val)
      +            if isinstance(val, list):
      +                def _check(entry):
      +                    if isinstance(entry, basestring):
      +                        return self._interpolate(key, entry)
      +                    return entry
      +                new = [_check(entry) for entry in val]
      +                if new != val:
      +                    return new
      +        return val
      +
      +
      +    def __setitem__(self, key, value, unrepr=False):
      +        """
      +        Correctly set a value.
      +        
      +        Making dictionary values Section instances.
      +        (We have to special case 'Section' instances - which are also dicts)
      +        
      +        Keys must be strings.
      +        Values need only be strings (or lists of strings) if
      +        ``main.stringify`` is set.
      +        
      +        ``unrepr`` must be set when setting a value to a dictionary, without
      +        creating a new sub-section.
      +        """
      +        if not isinstance(key, basestring):
      +            raise ValueError('The key "%s" is not a string.' % key)
      +        
      +        # add the comment
      +        if key not in self.comments:
      +            self.comments[key] = []
      +            self.inline_comments[key] = ''
      +        # remove the entry from defaults
      +        if key in self.defaults:
      +            self.defaults.remove(key)
      +        #
      +        if isinstance(value, Section):
      +            if key not in self:
      +                self.sections.append(key)
      +            dict.__setitem__(self, key, value)
      +        elif isinstance(value, dict) and not unrepr:
      +            # First create the new depth level,
      +            # then create the section
      +            if key not in self:
      +                self.sections.append(key)
      +            new_depth = self.depth + 1
      +            dict.__setitem__(
      +                self,
      +                key,
      +                Section(
      +                    self,
      +                    new_depth,
      +                    self.main,
      +                    indict=value,
      +                    name=key))
      +        else:
      +            if key not in self:
      +                self.scalars.append(key)
      +            if not self.main.stringify:
      +                if isinstance(value, basestring):
      +                    pass
      +                elif isinstance(value, (list, tuple)):
      +                    for entry in value:
      +                        if not isinstance(entry, basestring):
      +                            raise TypeError('Value is not a string "%s".' % entry)
      +                else:
      +                    raise TypeError('Value is not a string "%s".' % value)
      +            dict.__setitem__(self, key, value)
      +
      +
      +    def __delitem__(self, key):
      +        """Remove items from the sequence when deleting."""
      +        dict. __delitem__(self, key)
      +        if key in self.scalars:
      +            self.scalars.remove(key)
      +        else:
      +            self.sections.remove(key)
      +        del self.comments[key]
      +        del self.inline_comments[key]
      +
      +
      +    def get(self, key, default=None):
      +        """A version of ``get`` that doesn't bypass string interpolation."""
      +        try:
      +            return self[key]
      +        except KeyError:
      +            return default
      +
      +
      +    def update(self, indict):
      +        """
      +        A version of update that uses our ``__setitem__``.
      +        """
      +        for entry in indict:
      +            self[entry] = indict[entry]
      +
      +
      +    def pop(self, key, default=MISSING):
      +        """
      +        'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
      +        If key is not found, d is returned if given, otherwise KeyError is raised'
      +        """
      +        try:
      +            val = self[key]
      +        except KeyError:
      +            if default is MISSING:
      +                raise
      +            val = default
      +        else:
      +            del self[key]
      +        return val
      +
      +
      +    def popitem(self):
      +        """Pops the first (key,val)"""
      +        sequence = (self.scalars + self.sections)
      +        if not sequence:
      +            raise KeyError(": 'popitem(): dictionary is empty'")
      +        key = sequence[0]
      +        val =  self[key]
      +        del self[key]
      +        return key, val
      +
      +
      +    def clear(self):
      +        """
      +        A version of clear that also affects scalars/sections
      +        Also clears comments and configspec.
      +        
      +        Leaves other attributes alone :
      +            depth/main/parent are not affected
      +        """
      +        dict.clear(self)
      +        self.scalars = []
      +        self.sections = []
      +        self.comments = {}
      +        self.inline_comments = {}
      +        self.configspec = None
      +        self.defaults = []
      +        self.extra_values = []
      +
      +
      +    def setdefault(self, key, default=None):
      +        """A version of setdefault that sets sequence if appropriate."""
      +        try:
      +            return self[key]
      +        except KeyError:
      +            self[key] = default
      +            return self[key]
      +
      +
      +    def items(self):
      +        """D.items() -> list of D's (key, value) pairs, as 2-tuples"""
      +        return zip((self.scalars + self.sections), self.values())
      +
      +
      +    def keys(self):
      +        """D.keys() -> list of D's keys"""
      +        return (self.scalars + self.sections)
      +
      +
      +    def values(self):
      +        """D.values() -> list of D's values"""
      +        return [self[key] for key in (self.scalars + self.sections)]
      +
      +
      +    def iteritems(self):
      +        """D.iteritems() -> an iterator over the (key, value) items of D"""
      +        return iter(self.items())
      +
      +
      +    def iterkeys(self):
      +        """D.iterkeys() -> an iterator over the keys of D"""
      +        return iter((self.scalars + self.sections))
      +
      +    __iter__ = iterkeys
      +
      +
      +    def itervalues(self):
      +        """D.itervalues() -> an iterator over the values of D"""
      +        return iter(self.values())
      +
      +
      +    def __repr__(self):
      +        """x.__repr__() <==> repr(x)"""
      +        def _getval(key):
      +            try:
      +                return self[key]
      +            except MissingInterpolationOption:
      +                return dict.__getitem__(self, key)
      +        return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
      +            for key in (self.scalars + self.sections)])
      +
      +    __str__ = __repr__
      +    __str__.__doc__ = "x.__str__() <==> str(x)"
      +
      +
      +    # Extra methods - not in a normal dictionary
      +
      +    def dict(self):
      +        """
      +        Return a deepcopy of self as a dictionary.
      +        
      +        All members that are ``Section`` instances are recursively turned to
      +        ordinary dictionaries - by calling their ``dict`` method.
      +        
      +        >>> n = a.dict()
      +        >>> n == a
      +        1
      +        >>> n is a
      +        0
      +        """
      +        newdict = {}
      +        for entry in self:
      +            this_entry = self[entry]
      +            if isinstance(this_entry, Section):
      +                this_entry = this_entry.dict()
      +            elif isinstance(this_entry, list):
      +                # create a copy rather than a reference
      +                this_entry = list(this_entry)
      +            elif isinstance(this_entry, tuple):
      +                # create a copy rather than a reference
      +                this_entry = tuple(this_entry)
      +            newdict[entry] = this_entry
      +        return newdict
      +
      +
      +    def merge(self, indict):
      +        """
      +        A recursive update - useful for merging config files.
      +        
      +        >>> a = '''[section1]
      +        ...     option1 = True
      +        ...     [[subsection]]
      +        ...     more_options = False
      +        ...     # end of file'''.splitlines()
      +        >>> b = '''# File is user.ini
      +        ...     [section1]
      +        ...     option1 = False
      +        ...     # end of file'''.splitlines()
      +        >>> c1 = ConfigObj(b)
      +        >>> c2 = ConfigObj(a)
      +        >>> c2.merge(c1)
      +        >>> c2
      +        ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
      +        """
      +        for key, val in indict.items():
      +            if (key in self and isinstance(self[key], dict) and
      +                                isinstance(val, dict)):
      +                self[key].merge(val)
      +            else:   
      +                self[key] = val
      +
      +
      +    def rename(self, oldkey, newkey):
      +        """
      +        Change a keyname to another, without changing position in sequence.
      +        
      +        Implemented so that transformations can be made on keys,
      +        as well as on values. (used by encode and decode)
      +        
      +        Also renames comments.
      +        """
      +        if oldkey in self.scalars:
      +            the_list = self.scalars
      +        elif oldkey in self.sections:
      +            the_list = self.sections
      +        else:
      +            raise KeyError('Key "%s" not found.' % oldkey)
      +        pos = the_list.index(oldkey)
      +        #
      +        val = self[oldkey]
      +        dict.__delitem__(self, oldkey)
      +        dict.__setitem__(self, newkey, val)
      +        the_list.remove(oldkey)
      +        the_list.insert(pos, newkey)
      +        comm = self.comments[oldkey]
      +        inline_comment = self.inline_comments[oldkey]
      +        del self.comments[oldkey]
      +        del self.inline_comments[oldkey]
      +        self.comments[newkey] = comm
      +        self.inline_comments[newkey] = inline_comment
      +
      +
      +    def walk(self, function, raise_errors=True,
      +            call_on_sections=False, **keywargs):
      +        """
      +        Walk every member and call a function on the keyword and value.
      +        
      +        Return a dictionary of the return values
      +        
      +        If the function raises an exception, raise the errror
      +        unless ``raise_errors=False``, in which case set the return value to
      +        ``False``.
      +        
      +        Any unrecognised keyword arguments you pass to walk, will be pased on
      +        to the function you pass in.
      +        
      +        Note: if ``call_on_sections`` is ``True`` then - on encountering a
      +        subsection, *first* the function is called for the *whole* subsection,
      +        and then recurses into it's members. This means your function must be
      +        able to handle strings, dictionaries and lists. This allows you
      +        to change the key of subsections as well as for ordinary members. The
      +        return value when called on the whole subsection has to be discarded.
      +        
      +        See  the encode and decode methods for examples, including functions.
      +        
      +        .. admonition:: caution
      +        
      +            You can use ``walk`` to transform the names of members of a section
      +            but you mustn't add or delete members.
      +        
      +        >>> config = '''[XXXXsection]
      +        ... XXXXkey = XXXXvalue'''.splitlines()
      +        >>> cfg = ConfigObj(config)
      +        >>> cfg
      +        ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
      +        >>> def transform(section, key):
      +        ...     val = section[key]
      +        ...     newkey = key.replace('XXXX', 'CLIENT1')
      +        ...     section.rename(key, newkey)
      +        ...     if isinstance(val, (tuple, list, dict)):
      +        ...         pass
      +        ...     else:
      +        ...         val = val.replace('XXXX', 'CLIENT1')
      +        ...         section[newkey] = val
      +        >>> cfg.walk(transform, call_on_sections=True)
      +        {'CLIENT1section': {'CLIENT1key': None}}
      +        >>> cfg
      +        ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
      +        """
      +        out = {}
      +        # scalars first
      +        for i in range(len(self.scalars)):
      +            entry = self.scalars[i]
      +            try:
      +                val = function(self, entry, **keywargs)
      +                # bound again in case name has changed
      +                entry = self.scalars[i]
      +                out[entry] = val
      +            except Exception:
      +                if raise_errors:
      +                    raise
      +                else:
      +                    entry = self.scalars[i]
      +                    out[entry] = False
      +        # then sections
      +        for i in range(len(self.sections)):
      +            entry = self.sections[i]
      +            if call_on_sections:
      +                try:
      +                    function(self, entry, **keywargs)
      +                except Exception:
      +                    if raise_errors:
      +                        raise
      +                    else:
      +                        entry = self.sections[i]
      +                        out[entry] = False
      +                # bound again in case name has changed
      +                entry = self.sections[i]
      +            # previous result is discarded
      +            out[entry] = self[entry].walk(
      +                function,
      +                raise_errors=raise_errors,
      +                call_on_sections=call_on_sections,
      +                **keywargs)
      +        return out
      +
      +
      +    def as_bool(self, key):
      +        """
      +        Accepts a key as input. The corresponding value must be a string or
      +        the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
      +        retain compatibility with Python 2.2.
      +        
      +        If the string is one of  ``True``, ``On``, ``Yes``, or ``1`` it returns 
      +        ``True``.
      +        
      +        If the string is one of  ``False``, ``Off``, ``No``, or ``0`` it returns 
      +        ``False``.
      +        
      +        ``as_bool`` is not case sensitive.
      +        
      +        Any other input will raise a ``ValueError``.
      +        
      +        >>> a = ConfigObj()
      +        >>> a['a'] = 'fish'
      +        >>> a.as_bool('a')
      +        Traceback (most recent call last):
      +        ValueError: Value "fish" is neither True nor False
      +        >>> a['b'] = 'True'
      +        >>> a.as_bool('b')
      +        1
      +        >>> a['b'] = 'off'
      +        >>> a.as_bool('b')
      +        0
      +        """
      +        val = self[key]
      +        if val == True:
      +            return True
      +        elif val == False:
      +            return False
      +        else:
      +            try:
      +                if not isinstance(val, basestring):
      +                    # TODO: Why do we raise a KeyError here?
      +                    raise KeyError()
      +                else:
      +                    return self.main._bools[val.lower()]
      +            except KeyError:
      +                raise ValueError('Value "%s" is neither True nor False' % val)
      +
      +
      +    def as_int(self, key):
      +        """
      +        A convenience method which coerces the specified value to an integer.
      +        
      +        If the value is an invalid literal for ``int``, a ``ValueError`` will
      +        be raised.
      +        
      +        >>> a = ConfigObj()
      +        >>> a['a'] = 'fish'
      +        >>> a.as_int('a')
      +        Traceback (most recent call last):
      +        ValueError: invalid literal for int() with base 10: 'fish'
      +        >>> a['b'] = '1'
      +        >>> a.as_int('b')
      +        1
      +        >>> a['b'] = '3.2'
      +        >>> a.as_int('b')
      +        Traceback (most recent call last):
      +        ValueError: invalid literal for int() with base 10: '3.2'
      +        """
      +        return int(self[key])
      +
      +
      +    def as_float(self, key):
      +        """
      +        A convenience method which coerces the specified value to a float.
      +        
      +        If the value is an invalid literal for ``float``, a ``ValueError`` will
      +        be raised.
      +        
      +        >>> a = ConfigObj()
      +        >>> a['a'] = 'fish'
      +        >>> a.as_float('a')
      +        Traceback (most recent call last):
      +        ValueError: invalid literal for float(): fish
      +        >>> a['b'] = '1'
      +        >>> a.as_float('b')
      +        1.0
      +        >>> a['b'] = '3.2'
      +        >>> a.as_float('b')
      +        3.2000000000000002
      +        """
      +        return float(self[key])
      +    
      +    
      +    def as_list(self, key):
      +        """
      +        A convenience method which fetches the specified value, guaranteeing
      +        that it is a list.
      +        
      +        >>> a = ConfigObj()
      +        >>> a['a'] = 1
      +        >>> a.as_list('a')
      +        [1]
      +        >>> a['a'] = (1,)
      +        >>> a.as_list('a')
      +        [1]
      +        >>> a['a'] = [1]
      +        >>> a.as_list('a')
      +        [1]
      +        """
      +        result = self[key]
      +        if isinstance(result, (tuple, list)):
      +            return list(result)
      +        return [result]
      +        
      +
      +    def restore_default(self, key):
      +        """
      +        Restore (and return) default value for the specified key.
      +        
      +        This method will only work for a ConfigObj that was created
      +        with a configspec and has been validated.
      +        
      +        If there is no default value for this key, ``KeyError`` is raised.
      +        """
      +        default = self.default_values[key]
      +        dict.__setitem__(self, key, default)
      +        if key not in self.defaults:
      +            self.defaults.append(key)
      +        return default
      +
      +    
      +    def restore_defaults(self):
      +        """
      +        Recursively restore default values to all members
      +        that have them.
      +        
      +        This method will only work for a ConfigObj that was created
      +        with a configspec and has been validated.
      +        
      +        It doesn't delete or modify entries without default values.
      +        """
      +        for key in self.default_values:
      +            self.restore_default(key)
      +            
      +        for section in self.sections:
      +            self[section].restore_defaults()
      +
      +
      +class ConfigObj(Section):
      +    """An object to read, create, and write config files."""
      +
      +    _keyword = re.compile(r'''^ # line start
      +        (\s*)                   # indentation
      +        (                       # keyword
      +            (?:".*?")|          # double quotes
      +            (?:'.*?')|          # single quotes
      +            (?:[^'"=].*?)       # no quotes
      +        )
      +        \s*=\s*                 # divider
      +        (.*)                    # value (including list values and comments)
      +        $   # line end
      +        ''',
      +        re.VERBOSE)
      +
      +    _sectionmarker = re.compile(r'''^
      +        (\s*)                     # 1: indentation
      +        ((?:\[\s*)+)              # 2: section marker open
      +        (                         # 3: section name open
      +            (?:"\s*\S.*?\s*")|    # at least one non-space with double quotes
      +            (?:'\s*\S.*?\s*')|    # at least one non-space with single quotes
      +            (?:[^'"\s].*?)        # at least one non-space unquoted
      +        )                         # section name close
      +        ((?:\s*\])+)              # 4: section marker close
      +        \s*(\#.*)?                # 5: optional comment
      +        $''',
      +        re.VERBOSE)
      +
      +    # this regexp pulls list values out as a single string
      +    # or single values and comments
      +    # FIXME: this regex adds a '' to the end of comma terminated lists
      +    #   workaround in ``_handle_value``
      +    _valueexp = re.compile(r'''^
      +        (?:
      +            (?:
      +                (
      +                    (?:
      +                        (?:
      +                            (?:".*?")|              # double quotes
      +                            (?:'.*?')|              # single quotes
      +                            (?:[^'",\#][^,\#]*?)    # unquoted
      +                        )
      +                        \s*,\s*                     # comma
      +                    )*      # match all list items ending in a comma (if any)
      +                )
      +                (
      +                    (?:".*?")|                      # double quotes
      +                    (?:'.*?')|                      # single quotes
      +                    (?:[^'",\#\s][^,]*?)|           # unquoted
      +                    (?:(? 1:
      +                msg = "Parsing failed with several errors.\nFirst error %s" % info
      +                error = ConfigObjError(msg)
      +            else:
      +                error = self._errors[0]
      +            # set the errors attribute; it's a list of tuples:
      +            # (error_type, message, line_number)
      +            error.errors = self._errors
      +            # set the config attribute
      +            error.config = self
      +            raise error
      +        # delete private attributes
      +        del self._errors
      +        
      +        if configspec is None:
      +            self.configspec = None
      +        else:
      +            self._handle_configspec(configspec)
      +    
      +    
      +    def _initialise(self, options=None):
      +        if options is None:
      +            options = OPTION_DEFAULTS
      +            
      +        # initialise a few variables
      +        self.filename = None
      +        self._errors = []
      +        self.raise_errors = options['raise_errors']
      +        self.interpolation = options['interpolation']
      +        self.list_values = options['list_values']
      +        self.create_empty = options['create_empty']
      +        self.file_error = options['file_error']
      +        self.stringify = options['stringify']
      +        self.indent_type = options['indent_type']
      +        self.encoding = options['encoding']
      +        self.default_encoding = options['default_encoding']
      +        self.BOM = False
      +        self.newlines = None
      +        self.write_empty_values = options['write_empty_values']
      +        self.unrepr = options['unrepr']
      +        
      +        self.initial_comment = []
      +        self.final_comment = []
      +        self.configspec = None
      +        
      +        if self._inspec:
      +            self.list_values = False
      +        
      +        # Clear section attributes as well
      +        Section._initialise(self)
      +        
      +        
      +    def __repr__(self):
      +        def _getval(key):
      +            try:
      +                return self[key]
      +            except MissingInterpolationOption:
      +                return dict.__getitem__(self, key)
      +        return ('ConfigObj({%s})' % 
      +                ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) 
      +                for key in (self.scalars + self.sections)]))
      +    
      +    
      +    def _handle_bom(self, infile):
      +        """
      +        Handle any BOM, and decode if necessary.
      +        
      +        If an encoding is specified, that *must* be used - but the BOM should
      +        still be removed (and the BOM attribute set).
      +        
      +        (If the encoding is wrongly specified, then a BOM for an alternative
      +        encoding won't be discovered or removed.)
      +        
      +        If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
      +        removed. The BOM attribute will be set. UTF16 will be decoded to
      +        unicode.
      +        
      +        NOTE: This method must not be called with an empty ``infile``.
      +        
      +        Specifying the *wrong* encoding is likely to cause a
      +        ``UnicodeDecodeError``.
      +        
      +        ``infile`` must always be returned as a list of lines, but may be
      +        passed in as a single string.
      +        """
      +        if ((self.encoding is not None) and
      +            (self.encoding.lower() not in BOM_LIST)):
      +            # No need to check for a BOM
      +            # the encoding specified doesn't have one
      +            # just decode
      +            return self._decode(infile, self.encoding)
      +        
      +        if isinstance(infile, (list, tuple)):
      +            line = infile[0]
      +        else:
      +            line = infile
      +        if self.encoding is not None:
      +            # encoding explicitly supplied
      +            # And it could have an associated BOM
      +            # TODO: if encoding is just UTF16 - we ought to check for both
      +            # TODO: big endian and little endian versions.
      +            enc = BOM_LIST[self.encoding.lower()]
      +            if enc == 'utf_16':
      +                # For UTF16 we try big endian and little endian
      +                for BOM, (encoding, final_encoding) in BOMS.items():
      +                    if not final_encoding:
      +                        # skip UTF8
      +                        continue
      +                    if infile.startswith(BOM):
      +                        ### BOM discovered
      +                        ##self.BOM = True
      +                        # Don't need to remove BOM
      +                        return self._decode(infile, encoding)
      +                    
      +                # If we get this far, will *probably* raise a DecodeError
      +                # As it doesn't appear to start with a BOM
      +                return self._decode(infile, self.encoding)
      +            
      +            # Must be UTF8
      +            BOM = BOM_SET[enc]
      +            if not line.startswith(BOM):
      +                return self._decode(infile, self.encoding)
      +            
      +            newline = line[len(BOM):]
      +            
      +            # BOM removed
      +            if isinstance(infile, (list, tuple)):
      +                infile[0] = newline
      +            else:
      +                infile = newline
      +            self.BOM = True
      +            return self._decode(infile, self.encoding)
      +        
      +        # No encoding specified - so we need to check for UTF8/UTF16
      +        for BOM, (encoding, final_encoding) in BOMS.items():
      +            if not line.startswith(BOM):
      +                continue
      +            else:
      +                # BOM discovered
      +                self.encoding = final_encoding
      +                if not final_encoding:
      +                    self.BOM = True
      +                    # UTF8
      +                    # remove BOM
      +                    newline = line[len(BOM):]
      +                    if isinstance(infile, (list, tuple)):
      +                        infile[0] = newline
      +                    else:
      +                        infile = newline
      +                    # UTF8 - don't decode
      +                    if isinstance(infile, basestring):
      +                        return infile.splitlines(True)
      +                    else:
      +                        return infile
      +                # UTF16 - have to decode
      +                return self._decode(infile, encoding)
      +            
      +        # No BOM discovered and no encoding specified, just return
      +        if isinstance(infile, basestring):
      +            # infile read from a file will be a single string
      +            return infile.splitlines(True)
      +        return infile
      +
      +
      +    def _a_to_u(self, aString):
      +        """Decode ASCII strings to unicode if a self.encoding is specified."""
      +        if self.encoding:
      +            return aString.decode('ascii')
      +        else:
      +            return aString
      +
      +
      +    def _decode(self, infile, encoding):
      +        """
      +        Decode infile to unicode. Using the specified encoding.
      +        
      +        if is a string, it also needs converting to a list.
      +        """
      +        if isinstance(infile, basestring):
      +            # can't be unicode
      +            # NOTE: Could raise a ``UnicodeDecodeError``
      +            return infile.decode(encoding).splitlines(True)
      +        for i, line in enumerate(infile):
      +            if not isinstance(line, unicode):
      +                # NOTE: The isinstance test here handles mixed lists of unicode/string
      +                # NOTE: But the decode will break on any non-string values
      +                # NOTE: Or could raise a ``UnicodeDecodeError``
      +                infile[i] = line.decode(encoding)
      +        return infile
      +
      +
      +    def _decode_element(self, line):
      +        """Decode element to unicode if necessary."""
      +        if not self.encoding:
      +            return line
      +        if isinstance(line, str) and self.default_encoding:
      +            return line.decode(self.default_encoding)
      +        return line
      +
      +
      +    def _str(self, value):
      +        """
      +        Used by ``stringify`` within validate, to turn non-string values
      +        into strings.
      +        """
      +        if not isinstance(value, basestring):
      +            return str(value)
      +        else:
      +            return value
      +
      +
      +    def _parse(self, infile):
      +        """Actually parse the config file."""
      +        temp_list_values = self.list_values
      +        if self.unrepr:
      +            self.list_values = False
      +            
      +        comment_list = []
      +        done_start = False
      +        this_section = self
      +        maxline = len(infile) - 1
      +        cur_index = -1
      +        reset_comment = False
      +        
      +        while cur_index < maxline:
      +            if reset_comment:
      +                comment_list = []
      +            cur_index += 1
      +            line = infile[cur_index]
      +            sline = line.strip()
      +            # do we have anything on the line ?
      +            if not sline or sline.startswith('#'):
      +                reset_comment = False
      +                comment_list.append(line)
      +                continue
      +            
      +            if not done_start:
      +                # preserve initial comment
      +                self.initial_comment = comment_list
      +                comment_list = []
      +                done_start = True
      +                
      +            reset_comment = True
      +            # first we check if it's a section marker
      +            mat = self._sectionmarker.match(line)
      +            if mat is not None:
      +                # is a section line
      +                (indent, sect_open, sect_name, sect_close, comment) = mat.groups()
      +                if indent and (self.indent_type is None):
      +                    self.indent_type = indent
      +                cur_depth = sect_open.count('[')
      +                if cur_depth != sect_close.count(']'):
      +                    self._handle_error("Cannot compute the section depth at line %s.",
      +                                       NestingError, infile, cur_index)
      +                    continue
      +                
      +                if cur_depth < this_section.depth:
      +                    # the new section is dropping back to a previous level
      +                    try:
      +                        parent = self._match_depth(this_section,
      +                                                   cur_depth).parent
      +                    except SyntaxError:
      +                        self._handle_error("Cannot compute nesting level at line %s.",
      +                                           NestingError, infile, cur_index)
      +                        continue
      +                elif cur_depth == this_section.depth:
      +                    # the new section is a sibling of the current section
      +                    parent = this_section.parent
      +                elif cur_depth == this_section.depth + 1:
      +                    # the new section is a child the current section
      +                    parent = this_section
      +                else:
      +                    self._handle_error("Section too nested at line %s.",
      +                                       NestingError, infile, cur_index)
      +                    
      +                sect_name = self._unquote(sect_name)
      +                if sect_name in parent:
      +                    self._handle_error('Duplicate section name at line %s.',
      +                                       DuplicateError, infile, cur_index)
      +                    continue
      +                
      +                # create the new section
      +                this_section = Section(
      +                    parent,
      +                    cur_depth,
      +                    self,
      +                    name=sect_name)
      +                parent[sect_name] = this_section
      +                parent.inline_comments[sect_name] = comment
      +                parent.comments[sect_name] = comment_list
      +                continue
      +            #
      +            # it's not a section marker,
      +            # so it should be a valid ``key = value`` line
      +            mat = self._keyword.match(line)
      +            if mat is None:
      +                # it neither matched as a keyword
      +                # or a section marker
      +                self._handle_error(
      +                    'Invalid line at line "%s".',
      +                    ParseError, infile, cur_index)
      +            else:
      +                # is a keyword value
      +                # value will include any inline comment
      +                (indent, key, value) = mat.groups()
      +                if indent and (self.indent_type is None):
      +                    self.indent_type = indent
      +                # check for a multiline value
      +                if value[:3] in ['"""', "'''"]:
      +                    try:
      +                        value, comment, cur_index = self._multiline(
      +                            value, infile, cur_index, maxline)
      +                    except SyntaxError:
      +                        self._handle_error(
      +                            'Parse error in value at line %s.',
      +                            ParseError, infile, cur_index)
      +                        continue
      +                    else:
      +                        if self.unrepr:
      +                            comment = ''
      +                            try:
      +                                value = unrepr(value)
      +                            except Exception, e:
      +                                if type(e) == UnknownType:
      +                                    msg = 'Unknown name or type in value at line %s.'
      +                                else:
      +                                    msg = 'Parse error in value at line %s.'
      +                                self._handle_error(msg, UnreprError, infile,
      +                                    cur_index)
      +                                continue
      +                else:
      +                    if self.unrepr:
      +                        comment = ''
      +                        try:
      +                            value = unrepr(value)
      +                        except Exception, e:
      +                            if isinstance(e, UnknownType):
      +                                msg = 'Unknown name or type in value at line %s.'
      +                            else:
      +                                msg = 'Parse error in value at line %s.'
      +                            self._handle_error(msg, UnreprError, infile,
      +                                cur_index)
      +                            continue
      +                    else:
      +                        # extract comment and lists
      +                        try:
      +                            (value, comment) = self._handle_value(value)
      +                        except SyntaxError:
      +                            self._handle_error(
      +                                'Parse error in value at line %s.',
      +                                ParseError, infile, cur_index)
      +                            continue
      +                #
      +                key = self._unquote(key)
      +                if key in this_section:
      +                    self._handle_error(
      +                        'Duplicate keyword name at line %s.',
      +                        DuplicateError, infile, cur_index)
      +                    continue
      +                # add the key.
      +                # we set unrepr because if we have got this far we will never
      +                # be creating a new section
      +                this_section.__setitem__(key, value, unrepr=True)
      +                this_section.inline_comments[key] = comment
      +                this_section.comments[key] = comment_list
      +                continue
      +        #
      +        if self.indent_type is None:
      +            # no indentation used, set the type accordingly
      +            self.indent_type = ''
      +
      +        # preserve the final comment
      +        if not self and not self.initial_comment:
      +            self.initial_comment = comment_list
      +        elif not reset_comment:
      +            self.final_comment = comment_list
      +        self.list_values = temp_list_values
      +
      +
      +    def _match_depth(self, sect, depth):
      +        """
      +        Given a section and a depth level, walk back through the sections
      +        parents to see if the depth level matches a previous section.
      +        
      +        Return a reference to the right section,
      +        or raise a SyntaxError.
      +        """
      +        while depth < sect.depth:
      +            if sect is sect.parent:
      +                # we've reached the top level already
      +                raise SyntaxError()
      +            sect = sect.parent
      +        if sect.depth == depth:
      +            return sect
      +        # shouldn't get here
      +        raise SyntaxError()
      +
      +
      +    def _handle_error(self, text, ErrorClass, infile, cur_index):
      +        """
      +        Handle an error according to the error settings.
      +        
      +        Either raise the error or store it.
      +        The error will have occured at ``cur_index``
      +        """
      +        line = infile[cur_index]
      +        cur_index += 1
      +        message = text % cur_index
      +        error = ErrorClass(message, cur_index, line)
      +        if self.raise_errors:
      +            # raise the error - parsing stops here
      +            raise error
      +        # store the error
      +        # reraise when parsing has finished
      +        self._errors.append(error)
      +
      +
      +    def _unquote(self, value):
      +        """Return an unquoted version of a value"""
      +        if not value:
      +            # should only happen during parsing of lists
      +            raise SyntaxError
      +        if (value[0] == value[-1]) and (value[0] in ('"', "'")):
      +            value = value[1:-1]
      +        return value
      +
      +
      +    def _quote(self, value, multiline=True):
      +        """
      +        Return a safely quoted version of a value.
      +        
      +        Raise a ConfigObjError if the value cannot be safely quoted.
      +        If multiline is ``True`` (default) then use triple quotes
      +        if necessary.
      +        
      +        * Don't quote values that don't need it.
      +        * Recursively quote members of a list and return a comma joined list.
      +        * Multiline is ``False`` for lists.
      +        * Obey list syntax for empty and single member lists.
      +        
      +        If ``list_values=False`` then the value is only quoted if it contains
      +        a ``\\n`` (is multiline) or '#'.
      +        
      +        If ``write_empty_values`` is set, and the value is an empty string, it
      +        won't be quoted.
      +        """
      +        if multiline and self.write_empty_values and value == '':
      +            # Only if multiline is set, so that it is used for values not
      +            # keys, and not values that are part of a list
      +            return ''
      +        
      +        if multiline and isinstance(value, (list, tuple)):
      +            if not value:
      +                return ','
      +            elif len(value) == 1:
      +                return self._quote(value[0], multiline=False) + ','
      +            return ', '.join([self._quote(val, multiline=False)
      +                for val in value])
      +        if not isinstance(value, basestring):
      +            if self.stringify:
      +                value = str(value)
      +            else:
      +                raise TypeError('Value "%s" is not a string.' % value)
      +
      +        if not value:
      +            return '""'
      +        
      +        no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
      +        need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
      +        hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
      +        check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
      +        
      +        if check_for_single:
      +            if not self.list_values:
      +                # we don't quote if ``list_values=False``
      +                quot = noquot
      +            # for normal values either single or double quotes will do
      +            elif '\n' in value:
      +                # will only happen if multiline is off - e.g. '\n' in key
      +                raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
      +            elif ((value[0] not in wspace_plus) and
      +                    (value[-1] not in wspace_plus) and
      +                    (',' not in value)):
      +                quot = noquot
      +            else:
      +                quot = self._get_single_quote(value)
      +        else:
      +            # if value has '\n' or "'" *and* '"', it will need triple quotes
      +            quot = self._get_triple_quote(value)
      +        
      +        if quot == noquot and '#' in value and self.list_values:
      +            quot = self._get_single_quote(value)
      +                
      +        return quot % value
      +    
      +    
      +    def _get_single_quote(self, value):
      +        if ("'" in value) and ('"' in value):
      +            raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
      +        elif '"' in value:
      +            quot = squot
      +        else:
      +            quot = dquot
      +        return quot
      +    
      +    
      +    def _get_triple_quote(self, value):
      +        if (value.find('"""') != -1) and (value.find("'''") != -1):
      +            raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
      +        if value.find('"""') == -1:
      +            quot = tdquot
      +        else:
      +            quot = tsquot 
      +        return quot
      +
      +
      +    def _handle_value(self, value):
      +        """
      +        Given a value string, unquote, remove comment,
      +        handle lists. (including empty and single member lists)
      +        """
      +        if self._inspec:
      +            # Parsing a configspec so don't handle comments
      +            return (value, '')
      +        # do we look for lists in values ?
      +        if not self.list_values:
      +            mat = self._nolistvalue.match(value)
      +            if mat is None:
      +                raise SyntaxError()
      +            # NOTE: we don't unquote here
      +            return mat.groups()
      +        #
      +        mat = self._valueexp.match(value)
      +        if mat is None:
      +            # the value is badly constructed, probably badly quoted,
      +            # or an invalid list
      +            raise SyntaxError()
      +        (list_values, single, empty_list, comment) = mat.groups()
      +        if (list_values == '') and (single is None):
      +            # change this if you want to accept empty values
      +            raise SyntaxError()
      +        # NOTE: note there is no error handling from here if the regex
      +        # is wrong: then incorrect values will slip through
      +        if empty_list is not None:
      +            # the single comma - meaning an empty list
      +            return ([], comment)
      +        if single is not None:
      +            # handle empty values
      +            if list_values and not single:
      +                # FIXME: the '' is a workaround because our regex now matches
      +                #   '' at the end of a list if it has a trailing comma
      +                single = None
      +            else:
      +                single = single or '""'
      +                single = self._unquote(single)
      +        if list_values == '':
      +            # not a list value
      +            return (single, comment)
      +        the_list = self._listvalueexp.findall(list_values)
      +        the_list = [self._unquote(val) for val in the_list]
      +        if single is not None:
      +            the_list += [single]
      +        return (the_list, comment)
      +
      +
      +    def _multiline(self, value, infile, cur_index, maxline):
      +        """Extract the value, where we are in a multiline situation."""
      +        quot = value[:3]
      +        newvalue = value[3:]
      +        single_line = self._triple_quote[quot][0]
      +        multi_line = self._triple_quote[quot][1]
      +        mat = single_line.match(value)
      +        if mat is not None:
      +            retval = list(mat.groups())
      +            retval.append(cur_index)
      +            return retval
      +        elif newvalue.find(quot) != -1:
      +            # somehow the triple quote is missing
      +            raise SyntaxError()
      +        #
      +        while cur_index < maxline:
      +            cur_index += 1
      +            newvalue += '\n'
      +            line = infile[cur_index]
      +            if line.find(quot) == -1:
      +                newvalue += line
      +            else:
      +                # end of multiline, process it
      +                break
      +        else:
      +            # we've got to the end of the config, oops...
      +            raise SyntaxError()
      +        mat = multi_line.match(line)
      +        if mat is None:
      +            # a badly formed line
      +            raise SyntaxError()
      +        (value, comment) = mat.groups()
      +        return (newvalue + value, comment, cur_index)
      +
      +
      +    def _handle_configspec(self, configspec):
      +        """Parse the configspec."""
      +        # FIXME: Should we check that the configspec was created with the 
      +        #        correct settings ? (i.e. ``list_values=False``)
      +        if not isinstance(configspec, ConfigObj):
      +            try:
      +                configspec = ConfigObj(configspec,
      +                                       raise_errors=True,
      +                                       file_error=True,
      +                                       _inspec=True)
      +            except ConfigObjError, e:
      +                # FIXME: Should these errors have a reference
      +                #        to the already parsed ConfigObj ?
      +                raise ConfigspecError('Parsing configspec failed: %s' % e)
      +            except IOError, e:
      +                raise IOError('Reading configspec failed: %s' % e)
      +        
      +        self.configspec = configspec
      +            
      +
      +        
      +    def _set_configspec(self, section, copy):
      +        """
      +        Called by validate. Handles setting the configspec on subsections
      +        including sections to be validated by __many__
      +        """
      +        configspec = section.configspec
      +        many = configspec.get('__many__')
      +        if isinstance(many, dict):
      +            for entry in section.sections:
      +                if entry not in configspec:
      +                    section[entry].configspec = many
      +                    
      +        for entry in configspec.sections:
      +            if entry == '__many__':
      +                continue
      +            if entry not in section:
      +                section[entry] = {}
      +                section[entry]._created = True
      +                if copy:
      +                    # copy comments
      +                    section.comments[entry] = configspec.comments.get(entry, [])
      +                    section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
      +                
      +            # Could be a scalar when we expect a section
      +            if isinstance(section[entry], Section):
      +                section[entry].configspec = configspec[entry]
      +                        
      +
      +    def _write_line(self, indent_string, entry, this_entry, comment):
      +        """Write an individual line, for the write method"""
      +        # NOTE: the calls to self._quote here handles non-StringType values.
      +        if not self.unrepr:
      +            val = self._decode_element(self._quote(this_entry))
      +        else:
      +            val = repr(this_entry)
      +        return '%s%s%s%s%s' % (indent_string,
      +                               self._decode_element(self._quote(entry, multiline=False)),
      +                               self._a_to_u(' = '),
      +                               val,
      +                               self._decode_element(comment))
      +
      +
      +    def _write_marker(self, indent_string, depth, entry, comment):
      +        """Write a section marker line"""
      +        return '%s%s%s%s%s' % (indent_string,
      +                               self._a_to_u('[' * depth),
      +                               self._quote(self._decode_element(entry), multiline=False),
      +                               self._a_to_u(']' * depth),
      +                               self._decode_element(comment))
      +
      +
      +    def _handle_comment(self, comment):
      +        """Deal with a comment."""
      +        if not comment:
      +            return ''
      +        start = self.indent_type
      +        if not comment.startswith('#'):
      +            start += self._a_to_u(' # ')
      +        return (start + comment)
      +
      +
      +    # Public methods
      +
      +    def write(self, outfile=None, section=None):
      +        """
      +        Write the current ConfigObj as a file
      +        
      +        tekNico: FIXME: use StringIO instead of real files
      +        
      +        >>> filename = a.filename
      +        >>> a.filename = 'test.ini'
      +        >>> a.write()
      +        >>> a.filename = filename
      +        >>> a == ConfigObj('test.ini', raise_errors=True)
      +        1
      +        >>> import os
      +        >>> os.remove('test.ini')
      +        """
      +        if self.indent_type is None:
      +            # this can be true if initialised from a dictionary
      +            self.indent_type = DEFAULT_INDENT_TYPE
      +            
      +        out = []
      +        cs = self._a_to_u('#')
      +        csp = self._a_to_u('# ')
      +        if section is None:
      +            int_val = self.interpolation
      +            self.interpolation = False
      +            section = self
      +            for line in self.initial_comment:
      +                line = self._decode_element(line)
      +                stripped_line = line.strip()
      +                if stripped_line and not stripped_line.startswith(cs):
      +                    line = csp + line
      +                out.append(line)
      +                
      +        indent_string = self.indent_type * section.depth
      +        for entry in (section.scalars + section.sections):
      +            if entry in section.defaults:
      +                # don't write out default values
      +                continue
      +            for comment_line in section.comments[entry]:
      +                comment_line = self._decode_element(comment_line.lstrip())
      +                if comment_line and not comment_line.startswith(cs):
      +                    comment_line = csp + comment_line
      +                out.append(indent_string + comment_line)
      +            this_entry = section[entry]
      +            comment = self._handle_comment(section.inline_comments[entry])
      +            
      +            if isinstance(this_entry, dict):
      +                # a section
      +                out.append(self._write_marker(
      +                    indent_string,
      +                    this_entry.depth,
      +                    entry,
      +                    comment))
      +                out.extend(self.write(section=this_entry))
      +            else:
      +                out.append(self._write_line(
      +                    indent_string,
      +                    entry,
      +                    this_entry,
      +                    comment))
      +                
      +        if section is self:
      +            for line in self.final_comment:
      +                line = self._decode_element(line)
      +                stripped_line = line.strip()
      +                if stripped_line and not stripped_line.startswith(cs):
      +                    line = csp + line
      +                out.append(line)
      +            self.interpolation = int_val
      +            
      +        if section is not self:
      +            return out
      +        
      +        if (self.filename is None) and (outfile is None):
      +            # output a list of lines
      +            # might need to encode
      +            # NOTE: This will *screw* UTF16, each line will start with the BOM
      +            if self.encoding:
      +                out = [l.encode(self.encoding) for l in out]
      +            if (self.BOM and ((self.encoding is None) or
      +                (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
      +                # Add the UTF8 BOM
      +                if not out:
      +                    out.append('')
      +                out[0] = BOM_UTF8 + out[0]
      +            return out
      +        
      +        # Turn the list to a string, joined with correct newlines
      +        newline = self.newlines or os.linesep
      +        if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
      +            and sys.platform == 'win32' and newline == '\r\n'):
      +            # Windows specific hack to avoid writing '\r\r\n'
      +            newline = '\n'
      +        output = self._a_to_u(newline).join(out)
      +        if self.encoding:
      +            output = output.encode(self.encoding)
      +        if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
      +            # Add the UTF8 BOM
      +            output = BOM_UTF8 + output
      +            
      +        if not output.endswith(newline):
      +            output += newline
      +        if outfile is not None:
      +            outfile.write(output)
      +        else:
      +            h = open(self.filename, 'wb')
      +            h.write(output)
      +            h.close()
      +
      +
      +    def validate(self, validator, preserve_errors=False, copy=False,
      +                 section=None):
      +        """
      +        Test the ConfigObj against a configspec.
      +        
      +        It uses the ``validator`` object from *validate.py*.
      +        
      +        To run ``validate`` on the current ConfigObj, call: ::
      +        
      +            test = config.validate(validator)
      +        
      +        (Normally having previously passed in the configspec when the ConfigObj
      +        was created - you can dynamically assign a dictionary of checks to the
      +        ``configspec`` attribute of a section though).
      +        
      +        It returns ``True`` if everything passes, or a dictionary of
      +        pass/fails (True/False). If every member of a subsection passes, it
      +        will just have the value ``True``. (It also returns ``False`` if all
      +        members fail).
      +        
      +        In addition, it converts the values from strings to their native
      +        types if their checks pass (and ``stringify`` is set).
      +        
      +        If ``preserve_errors`` is ``True`` (``False`` is default) then instead
      +        of a marking a fail with a ``False``, it will preserve the actual
      +        exception object. This can contain info about the reason for failure.
      +        For example the ``VdtValueTooSmallError`` indicates that the value
      +        supplied was too small. If a value (or section) is missing it will
      +        still be marked as ``False``.
      +        
      +        You must have the validate module to use ``preserve_errors=True``.
      +        
      +        You can then use the ``flatten_errors`` function to turn your nested
      +        results dictionary into a flattened list of failures - useful for
      +        displaying meaningful error messages.
      +        """
      +        if section is None:
      +            if self.configspec is None:
      +                raise ValueError('No configspec supplied.')
      +            if preserve_errors:
      +                # We do this once to remove a top level dependency on the validate module
      +                # Which makes importing configobj faster
      +                from validate import VdtMissingValue
      +                self._vdtMissingValue = VdtMissingValue
      +                
      +            section = self
      +
      +            if copy:
      +                section.initial_comment = section.configspec.initial_comment
      +                section.final_comment = section.configspec.final_comment
      +                section.encoding = section.configspec.encoding
      +                section.BOM = section.configspec.BOM
      +                section.newlines = section.configspec.newlines
      +                section.indent_type = section.configspec.indent_type
      +            
      +        #
      +        # section.default_values.clear() #??
      +        configspec = section.configspec
      +        self._set_configspec(section, copy)
      +
      +        
      +        def validate_entry(entry, spec, val, missing, ret_true, ret_false):
      +            section.default_values.pop(entry, None)
      +                
      +            try:
      +                section.default_values[entry] = validator.get_default_value(configspec[entry])
      +            except (KeyError, AttributeError, validator.baseErrorClass):
      +                # No default, bad default or validator has no 'get_default_value'
      +                # (e.g. SimpleVal)
      +                pass
      +            
      +            try:
      +                check = validator.check(spec,
      +                                        val,
      +                                        missing=missing
      +                                        )
      +            except validator.baseErrorClass, e:
      +                if not preserve_errors or isinstance(e, self._vdtMissingValue):
      +                    out[entry] = False
      +                else:
      +                    # preserve the error
      +                    out[entry] = e
      +                    ret_false = False
      +                ret_true = False
      +            else:
      +                ret_false = False
      +                out[entry] = True
      +                if self.stringify or missing:
      +                    # if we are doing type conversion
      +                    # or the value is a supplied default
      +                    if not self.stringify:
      +                        if isinstance(check, (list, tuple)):
      +                            # preserve lists
      +                            check = [self._str(item) for item in check]
      +                        elif missing and check is None:
      +                            # convert the None from a default to a ''
      +                            check = ''
      +                        else:
      +                            check = self._str(check)
      +                    if (check != val) or missing:
      +                        section[entry] = check
      +                if not copy and missing and entry not in section.defaults:
      +                    section.defaults.append(entry)
      +            return ret_true, ret_false
      +        
      +        #
      +        out = {}
      +        ret_true = True
      +        ret_false = True
      +        
      +        unvalidated = [k for k in section.scalars if k not in configspec]
      +        incorrect_sections = [k for k in configspec.sections if k in section.scalars]        
      +        incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
      +        
      +        for entry in configspec.scalars:
      +            if entry in ('__many__', '___many___'):
      +                # reserved names
      +                continue
      +            if (not entry in section.scalars) or (entry in section.defaults):
      +                # missing entries
      +                # or entries from defaults
      +                missing = True
      +                val = None
      +                if copy and entry not in section.scalars:
      +                    # copy comments
      +                    section.comments[entry] = (
      +                        configspec.comments.get(entry, []))
      +                    section.inline_comments[entry] = (
      +                        configspec.inline_comments.get(entry, ''))
      +                #
      +            else:
      +                missing = False
      +                val = section[entry]
      +            
      +            ret_true, ret_false = validate_entry(entry, configspec[entry], val, 
      +                                                 missing, ret_true, ret_false)
      +        
      +        many = None
      +        if '__many__' in configspec.scalars:
      +            many = configspec['__many__']
      +        elif '___many___' in configspec.scalars:
      +            many = configspec['___many___']
      +        
      +        if many is not None:
      +            for entry in unvalidated:
      +                val = section[entry]
      +                ret_true, ret_false = validate_entry(entry, many, val, False,
      +                                                     ret_true, ret_false)
      +            unvalidated = []
      +
      +        for entry in incorrect_scalars:
      +            ret_true = False
      +            if not preserve_errors:
      +                out[entry] = False
      +            else:
      +                ret_false = False
      +                msg = 'Value %r was provided as a section' % entry
      +                out[entry] = validator.baseErrorClass(msg)
      +        for entry in incorrect_sections:
      +            ret_true = False
      +            if not preserve_errors:
      +                out[entry] = False
      +            else:
      +                ret_false = False
      +                msg = 'Section %r was provided as a single value' % entry
      +                out[entry] = validator.baseErrorClass(msg)
      +                
      +        # Missing sections will have been created as empty ones when the
      +        # configspec was read.
      +        for entry in section.sections:
      +            # FIXME: this means DEFAULT is not copied in copy mode
      +            if section is self and entry == 'DEFAULT':
      +                continue
      +            if section[entry].configspec is None:
      +                unvalidated.append(entry)
      +                continue
      +            if copy:
      +                section.comments[entry] = configspec.comments.get(entry, [])
      +                section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
      +            check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
      +            out[entry] = check
      +            if check == False:
      +                ret_true = False
      +            elif check == True:
      +                ret_false = False
      +            else:
      +                ret_true = False
      +        
      +        section.extra_values = unvalidated
      +        if preserve_errors and not section._created:
      +            # If the section wasn't created (i.e. it wasn't missing)
      +            # then we can't return False, we need to preserve errors
      +            ret_false = False
      +        #
      +        if ret_false and preserve_errors and out:
      +            # If we are preserving errors, but all
      +            # the failures are from missing sections / values
      +            # then we can return False. Otherwise there is a
      +            # real failure that we need to preserve.
      +            ret_false = not any(out.values())
      +        if ret_true:
      +            return True
      +        elif ret_false:
      +            return False
      +        return out
      +
      +
      +    def reset(self):
      +        """Clear ConfigObj instance and restore to 'freshly created' state."""
      +        self.clear()
      +        self._initialise()
      +        # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
      +        #        requires an empty dictionary
      +        self.configspec = None
      +        # Just to be sure ;-)
      +        self._original_configspec = None
      +        
      +        
      +    def reload(self):
      +        """
      +        Reload a ConfigObj from file.
      +        
      +        This method raises a ``ReloadError`` if the ConfigObj doesn't have
      +        a filename attribute pointing to a file.
      +        """
      +        if not isinstance(self.filename, basestring):
      +            raise ReloadError()
      +
      +        filename = self.filename
      +        current_options = {}
      +        for entry in OPTION_DEFAULTS:
      +            if entry == 'configspec':
      +                continue
      +            current_options[entry] = getattr(self, entry)
      +            
      +        configspec = self._original_configspec
      +        current_options['configspec'] = configspec
      +            
      +        self.clear()
      +        self._initialise(current_options)
      +        self._load(filename, configspec)
      +        
      +
      +
      +class SimpleVal(object):
      +    """
      +    A simple validator.
      +    Can be used to check that all members expected are present.
      +    
      +    To use it, provide a configspec with all your members in (the value given
      +    will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
      +    method of your ``ConfigObj``. ``validate`` will return ``True`` if all
      +    members are present, or a dictionary with True/False meaning
      +    present/missing. (Whole missing sections will be replaced with ``False``)
      +    """
      +    
      +    def __init__(self):
      +        self.baseErrorClass = ConfigObjError
      +    
      +    def check(self, check, member, missing=False):
      +        """A dummy check method, always returns the value unchanged."""
      +        if missing:
      +            raise self.baseErrorClass()
      +        return member
      +
      +
      +def flatten_errors(cfg, res, levels=None, results=None):
      +    """
      +    An example function that will turn a nested dictionary of results
      +    (as returned by ``ConfigObj.validate``) into a flat list.
      +    
      +    ``cfg`` is the ConfigObj instance being checked, ``res`` is the results
      +    dictionary returned by ``validate``.
      +    
      +    (This is a recursive function, so you shouldn't use the ``levels`` or
      +    ``results`` arguments - they are used by the function.)
      +    
      +    Returns a list of keys that failed. Each member of the list is a tuple::
      +    
      +        ([list of sections...], key, result)
      +    
      +    If ``validate`` was called with ``preserve_errors=False`` (the default)
      +    then ``result`` will always be ``False``.
      +
      +    *list of sections* is a flattened list of sections that the key was found
      +    in.
      +    
      +    If the section was missing (or a section was expected and a scalar provided
      +    - or vice-versa) then key will be ``None``.
      +    
      +    If the value (or section) was missing then ``result`` will be ``False``.
      +    
      +    If ``validate`` was called with ``preserve_errors=True`` and a value
      +    was present, but failed the check, then ``result`` will be the exception
      +    object returned. You can use this as a string that describes the failure.
      +    
      +    For example *The value "3" is of the wrong type*.
      +    """
      +    if levels is None:
      +        # first time called
      +        levels = []
      +        results = []
      +    if res == True:
      +        return results
      +    if res == False or isinstance(res, Exception):
      +        results.append((levels[:], None, res))
      +        if levels:
      +            levels.pop()
      +        return results
      +    for (key, val) in res.items():
      +        if val == True:
      +            continue
      +        if isinstance(cfg.get(key), dict):
      +            # Go down one level
      +            levels.append(key)
      +            flatten_errors(cfg[key], val, levels, results)
      +            continue
      +        results.append((levels[:], key, val))
      +    #
      +    # Go up one level
      +    if levels:
      +        levels.pop()
      +    #
      +    return results
      +
      +
      +def get_extra_values(conf, _prepend=()):
      +    """
      +    Find all the values and sections not in the configspec from a validated
      +    ConfigObj.
      +    
      +    ``get_extra_values`` returns a list of tuples where each tuple represents
      +    either an extra section, or an extra value.
      +    
      +    The tuples contain two values, a tuple representing the section the value 
      +    is in and the name of the extra values. For extra values in the top level
      +    section the first member will be an empty tuple. For values in the 'foo'
      +    section the first member will be ``('foo',)``. For members in the 'bar'
      +    subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
      +    
      +    NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
      +    been validated it will return an empty list.
      +    """
      +    out = []
      +    
      +    out.extend([(_prepend, name) for name in conf.extra_values])
      +    for name in conf.sections:
      +        if name not in conf.extra_values:
      +            out.extend(get_extra_values(conf[name], _prepend + (name,)))
      +    return out
      +
      +
      +"""*A programming language is a medium of expression.* - Paul Graham"""
      diff --git a/python/configobj/setup.py b/python/configobj/setup.py
      new file mode 100644
      index 000000000..63d70cc0c
      --- /dev/null
      +++ b/python/configobj/setup.py
      @@ -0,0 +1,83 @@
      +# setup.py
      +# Install script for ConfigObj
      +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa
      +# E-mail: fuzzyman AT voidspace DOT org DOT uk
      +#         mark AT la-la DOT com
      +#         nico AT tekNico DOT net
      +
      +# This software is licensed under the terms of the BSD license.
      +# http://www.voidspace.org.uk/python/license.shtml
      +
      +import sys
      +from distutils.core import setup
      +from configobj import __version__ as VERSION
      +
      +NAME = 'configobj'
      +
      +MODULES = 'configobj', 'validate'
      +
      +DESCRIPTION = 'Config file reading, writing and validation.'
      +
      +URL = 'http://www.voidspace.org.uk/python/configobj.html'
      +
      +DOWNLOAD_URL = "http://www.voidspace.org.uk/downloads/configobj-%s.zip" % VERSION
      +
      +LONG_DESCRIPTION = """**ConfigObj** is a simple but powerful config file reader and writer: an *ini
      +file round tripper*. Its main feature is that it is very easy to use, with a
      +straightforward programmer's interface and a simple syntax for config files.
      +It has lots of other features though :
      +
      +* Nested sections (subsections), to any level
      +* List values
      +* Multiple line values
      +* Full Unicode support
      +* String interpolation (substitution)
      +* Integrated with a powerful validation system
      +
      +    - including automatic type checking/conversion
      +    - and allowing default values
      +    - repeated sections
      +
      +* All comments in the file are preserved
      +* The order of keys/sections is preserved
      +* Powerful ``unrepr`` mode for storing/retrieving Python data-types
      +
      +| Release 4.7.2 fixes several bugs in 4.7.1
      +| Release 4.7.1 fixes a bug with the deprecated options keyword in
      +| 4.7.0.
      +| Release 4.7.0 improves performance adds features for validation and
      +| fixes some bugs."""
      +
      +CLASSIFIERS = [
      +    'Development Status :: 6 - Mature',
      +    'Intended Audience :: Developers',
      +    'License :: OSI Approved :: BSD License',
      +    'Programming Language :: Python',
      +    'Programming Language :: Python :: 2.3',
      +    'Programming Language :: Python :: 2.4',
      +    'Programming Language :: Python :: 2.5',
      +    'Programming Language :: Python :: 2.6',
      +    'Operating System :: OS Independent',
      +    'Topic :: Software Development :: Libraries',
      +    'Topic :: Software Development :: Libraries :: Python Modules',
      +]
      +
      +AUTHOR = 'Michael Foord & Nicola Larosa'
      +
      +AUTHOR_EMAIL = 'fuzzyman@voidspace.org.uk'
      +
      +KEYWORDS = "config, ini, dictionary, application, admin, sysadmin, configuration, validation".split(', ')
      +
      +
      +setup(name=NAME,
      +      version=VERSION,
      +      description=DESCRIPTION,
      +      long_description=LONG_DESCRIPTION,
      +      download_url=DOWNLOAD_URL,
      +      author=AUTHOR,
      +      author_email=AUTHOR_EMAIL,
      +      url=URL,
      +      py_modules=MODULES,
      +      classifiers=CLASSIFIERS,
      +      keywords=KEYWORDS
      +     )
      diff --git a/python/configobj/validate.py b/python/configobj/validate.py
      new file mode 100644
      index 000000000..73dbdb891
      --- /dev/null
      +++ b/python/configobj/validate.py
      @@ -0,0 +1,1450 @@
      +# validate.py
      +# A Validator object
      +# Copyright (C) 2005-2010 Michael Foord, Mark Andrews, Nicola Larosa
      +# E-mail: fuzzyman AT voidspace DOT org DOT uk
      +#         mark AT la-la DOT com
      +#         nico AT tekNico DOT net
      +
      +# This software is licensed under the terms of the BSD license.
      +# http://www.voidspace.org.uk/python/license.shtml
      +# Basically you're free to copy, modify, distribute and relicense it,
      +# So long as you keep a copy of the license with it.
      +
      +# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
      +# For information about bugfixes, updates and support, please join the
      +# ConfigObj mailing list:
      +# http://lists.sourceforge.net/lists/listinfo/configobj-develop
      +# Comments, suggestions and bug reports welcome.
      +
      +"""
      +    The Validator object is used to check that supplied values 
      +    conform to a specification.
      +    
      +    The value can be supplied as a string - e.g. from a config file.
      +    In this case the check will also *convert* the value to
      +    the required type. This allows you to add validation
      +    as a transparent layer to access data stored as strings.
      +    The validation checks that the data is correct *and*
      +    converts it to the expected type.
      +    
      +    Some standard checks are provided for basic data types.
      +    Additional checks are easy to write. They can be
      +    provided when the ``Validator`` is instantiated or
      +    added afterwards.
      +    
      +    The standard functions work with the following basic data types :
      +    
      +    * integers
      +    * floats
      +    * booleans
      +    * strings
      +    * ip_addr
      +    
      +    plus lists of these datatypes
      +    
      +    Adding additional checks is done through coding simple functions.
      +    
      +    The full set of standard checks are : 
      +    
      +    * 'integer': matches integer values (including negative)
      +                 Takes optional 'min' and 'max' arguments : ::
      +    
      +                   integer()
      +                   integer(3, 9)  # any value from 3 to 9
      +                   integer(min=0) # any positive value
      +                   integer(max=9)
      +    
      +    * 'float': matches float values
      +               Has the same parameters as the integer check.
      +    
      +    * 'boolean': matches boolean values - ``True`` or ``False``
      +                 Acceptable string values for True are :
      +                   true, on, yes, 1
      +                 Acceptable string values for False are :
      +                   false, off, no, 0
      +    
      +                 Any other value raises an error.
      +    
      +    * 'ip_addr': matches an Internet Protocol address, v.4, represented
      +                 by a dotted-quad string, i.e. '1.2.3.4'.
      +    
      +    * 'string': matches any string.
      +                Takes optional keyword args 'min' and 'max'
      +                to specify min and max lengths of the string.
      +    
      +    * 'list': matches any list.
      +              Takes optional keyword args 'min', and 'max' to specify min and
      +              max sizes of the list. (Always returns a list.)
      +    
      +    * 'tuple': matches any tuple.
      +              Takes optional keyword args 'min', and 'max' to specify min and
      +              max sizes of the tuple. (Always returns a tuple.)
      +    
      +    * 'int_list': Matches a list of integers.
      +                  Takes the same arguments as list.
      +    
      +    * 'float_list': Matches a list of floats.
      +                    Takes the same arguments as list.
      +    
      +    * 'bool_list': Matches a list of boolean values.
      +                   Takes the same arguments as list.
      +    
      +    * 'ip_addr_list': Matches a list of IP addresses.
      +                     Takes the same arguments as list.
      +    
      +    * 'string_list': Matches a list of strings.
      +                     Takes the same arguments as list.
      +    
      +    * 'mixed_list': Matches a list with different types in 
      +                    specific positions. List size must match
      +                    the number of arguments.
      +    
      +                    Each position can be one of :
      +                    'integer', 'float', 'ip_addr', 'string', 'boolean'
      +    
      +                    So to specify a list with two strings followed
      +                    by two integers, you write the check as : ::
      +    
      +                      mixed_list('string', 'string', 'integer', 'integer')
      +    
      +    * 'pass': This check matches everything ! It never fails
      +              and the value is unchanged.
      +    
      +              It is also the default if no check is specified.
      +    
      +    * 'option': This check matches any from a list of options.
      +                You specify this check with : ::
      +    
      +                  option('option 1', 'option 2', 'option 3')
      +    
      +    You can supply a default value (returned if no value is supplied)
      +    using the default keyword argument.
      +    
      +    You specify a list argument for default using a list constructor syntax in
      +    the check : ::
      +    
      +        checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3'))
      +    
      +    A badly formatted set of arguments will raise a ``VdtParamError``.
      +"""
      +
      +__version__ = '1.0.1'
      +
      +
      +__all__ = (
      +    '__version__',
      +    'dottedQuadToNum',
      +    'numToDottedQuad',
      +    'ValidateError',
      +    'VdtUnknownCheckError',
      +    'VdtParamError',
      +    'VdtTypeError',
      +    'VdtValueError',
      +    'VdtValueTooSmallError',
      +    'VdtValueTooBigError',
      +    'VdtValueTooShortError',
      +    'VdtValueTooLongError',
      +    'VdtMissingValue',
      +    'Validator',
      +    'is_integer',
      +    'is_float',
      +    'is_boolean',
      +    'is_list',
      +    'is_tuple',
      +    'is_ip_addr',
      +    'is_string',
      +    'is_int_list',
      +    'is_bool_list',
      +    'is_float_list',
      +    'is_string_list',
      +    'is_ip_addr_list',
      +    'is_mixed_list',
      +    'is_option',
      +    '__docformat__',
      +)
      +
      +
      +import re
      +
      +
      +_list_arg = re.compile(r'''
      +    (?:
      +        ([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
      +            (
      +                (?:
      +                    \s*
      +                    (?:
      +                        (?:".*?")|              # double quotes
      +                        (?:'.*?')|              # single quotes
      +                        (?:[^'",\s\)][^,\)]*?)  # unquoted
      +                    )
      +                    \s*,\s*
      +                )*
      +                (?:
      +                    (?:".*?")|              # double quotes
      +                    (?:'.*?')|              # single quotes
      +                    (?:[^'",\s\)][^,\)]*?)  # unquoted
      +                )?                          # last one
      +            )
      +        \)
      +    )
      +''', re.VERBOSE | re.DOTALL)    # two groups
      +
      +_list_members = re.compile(r'''
      +    (
      +        (?:".*?")|              # double quotes
      +        (?:'.*?')|              # single quotes
      +        (?:[^'",\s=][^,=]*?)       # unquoted
      +    )
      +    (?:
      +    (?:\s*,\s*)|(?:\s*$)            # comma
      +    )
      +''', re.VERBOSE | re.DOTALL)    # one group
      +
      +_paramstring = r'''
      +    (?:
      +        (
      +            (?:
      +                [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
      +                    (?:
      +                        \s*
      +                        (?:
      +                            (?:".*?")|              # double quotes
      +                            (?:'.*?')|              # single quotes
      +                            (?:[^'",\s\)][^,\)]*?)       # unquoted
      +                        )
      +                        \s*,\s*
      +                    )*
      +                    (?:
      +                        (?:".*?")|              # double quotes
      +                        (?:'.*?')|              # single quotes
      +                        (?:[^'",\s\)][^,\)]*?)       # unquoted
      +                    )?                              # last one
      +                \)
      +            )|
      +            (?:
      +                (?:".*?")|              # double quotes
      +                (?:'.*?')|              # single quotes
      +                (?:[^'",\s=][^,=]*?)|       # unquoted
      +                (?:                         # keyword argument
      +                    [a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
      +                    (?:
      +                        (?:".*?")|              # double quotes
      +                        (?:'.*?')|              # single quotes
      +                        (?:[^'",\s=][^,=]*?)       # unquoted
      +                    )
      +                )
      +            )
      +        )
      +        (?:
      +            (?:\s*,\s*)|(?:\s*$)            # comma
      +        )
      +    )
      +    '''
      +
      +_matchstring = '^%s*' % _paramstring
      +
      +# Python pre 2.2.1 doesn't have bool
      +try:
      +    bool
      +except NameError:
      +    def bool(val):
      +        """Simple boolean equivalent function. """
      +        if val:
      +            return 1
      +        else:
      +            return 0
      +
      +
      +def dottedQuadToNum(ip):
      +    """
      +    Convert decimal dotted quad string to long integer
      +    
      +    >>> int(dottedQuadToNum('1 '))
      +    1
      +    >>> int(dottedQuadToNum(' 1.2'))
      +    16777218
      +    >>> int(dottedQuadToNum(' 1.2.3 '))
      +    16908291
      +    >>> int(dottedQuadToNum('1.2.3.4'))
      +    16909060
      +    >>> dottedQuadToNum('255.255.255.255')
      +    4294967295L
      +    >>> dottedQuadToNum('255.255.255.256')
      +    Traceback (most recent call last):
      +    ValueError: Not a good dotted-quad IP: 255.255.255.256
      +    """
      +    
      +    # import here to avoid it when ip_addr values are not used
      +    import socket, struct
      +    
      +    try:
      +        return struct.unpack('!L',
      +            socket.inet_aton(ip.strip()))[0]
      +    except socket.error:
      +        # bug in inet_aton, corrected in Python 2.4
      +        if ip.strip() == '255.255.255.255':
      +            return 0xFFFFFFFFL
      +        else:
      +            raise ValueError('Not a good dotted-quad IP: %s' % ip)
      +    return
      +
      +
      +def numToDottedQuad(num):
      +    """
      +    Convert long int to dotted quad string
      +    
      +    >>> numToDottedQuad(-1L)
      +    Traceback (most recent call last):
      +    ValueError: Not a good numeric IP: -1
      +    >>> numToDottedQuad(1L)
      +    '0.0.0.1'
      +    >>> numToDottedQuad(16777218L)
      +    '1.0.0.2'
      +    >>> numToDottedQuad(16908291L)
      +    '1.2.0.3'
      +    >>> numToDottedQuad(16909060L)
      +    '1.2.3.4'
      +    >>> numToDottedQuad(4294967295L)
      +    '255.255.255.255'
      +    >>> numToDottedQuad(4294967296L)
      +    Traceback (most recent call last):
      +    ValueError: Not a good numeric IP: 4294967296
      +    """
      +    
      +    # import here to avoid it when ip_addr values are not used
      +    import socket, struct
      +    
      +    # no need to intercept here, 4294967295L is fine
      +    if num > 4294967295L or num < 0:
      +        raise ValueError('Not a good numeric IP: %s' % num)
      +    try:
      +        return socket.inet_ntoa(
      +            struct.pack('!L', long(num)))
      +    except (socket.error, struct.error, OverflowError):
      +        raise ValueError('Not a good numeric IP: %s' % num)
      +
      +
      +class ValidateError(Exception):
      +    """
      +    This error indicates that the check failed.
      +    It can be the base class for more specific errors.
      +    
      +    Any check function that fails ought to raise this error.
      +    (or a subclass)
      +    
      +    >>> raise ValidateError
      +    Traceback (most recent call last):
      +    ValidateError
      +    """
      +
      +
      +class VdtMissingValue(ValidateError):
      +    """No value was supplied to a check that needed one."""
      +
      +
      +class VdtUnknownCheckError(ValidateError):
      +    """An unknown check function was requested"""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtUnknownCheckError('yoda')
      +        Traceback (most recent call last):
      +        VdtUnknownCheckError: the check "yoda" is unknown.
      +        """
      +        ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
      +
      +
      +class VdtParamError(SyntaxError):
      +    """An incorrect parameter was passed"""
      +
      +    def __init__(self, name, value):
      +        """
      +        >>> raise VdtParamError('yoda', 'jedi')
      +        Traceback (most recent call last):
      +        VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
      +        """
      +        SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
      +
      +
      +class VdtTypeError(ValidateError):
      +    """The value supplied was of the wrong type"""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtTypeError('jedi')
      +        Traceback (most recent call last):
      +        VdtTypeError: the value "jedi" is of the wrong type.
      +        """
      +        ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
      +
      +
      +class VdtValueError(ValidateError):
      +    """The value supplied was of the correct type, but was not an allowed value."""
      +    
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtValueError('jedi')
      +        Traceback (most recent call last):
      +        VdtValueError: the value "jedi" is unacceptable.
      +        """
      +        ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
      +
      +
      +class VdtValueTooSmallError(VdtValueError):
      +    """The value supplied was of the correct type, but was too small."""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtValueTooSmallError('0')
      +        Traceback (most recent call last):
      +        VdtValueTooSmallError: the value "0" is too small.
      +        """
      +        ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
      +
      +
      +class VdtValueTooBigError(VdtValueError):
      +    """The value supplied was of the correct type, but was too big."""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtValueTooBigError('1')
      +        Traceback (most recent call last):
      +        VdtValueTooBigError: the value "1" is too big.
      +        """
      +        ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
      +
      +
      +class VdtValueTooShortError(VdtValueError):
      +    """The value supplied was of the correct type, but was too short."""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtValueTooShortError('jed')
      +        Traceback (most recent call last):
      +        VdtValueTooShortError: the value "jed" is too short.
      +        """
      +        ValidateError.__init__(
      +            self,
      +            'the value "%s" is too short.' % (value,))
      +
      +
      +class VdtValueTooLongError(VdtValueError):
      +    """The value supplied was of the correct type, but was too long."""
      +
      +    def __init__(self, value):
      +        """
      +        >>> raise VdtValueTooLongError('jedie')
      +        Traceback (most recent call last):
      +        VdtValueTooLongError: the value "jedie" is too long.
      +        """
      +        ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
      +
      +
      +class Validator(object):
      +    """
      +    Validator is an object that allows you to register a set of 'checks'.
      +    These checks take input and test that it conforms to the check.
      +    
      +    This can also involve converting the value from a string into
      +    the correct datatype.
      +    
      +    The ``check`` method takes an input string which configures which
      +    check is to be used and applies that check to a supplied value.
      +    
      +    An example input string would be:
      +    'int_range(param1, param2)'
      +    
      +    You would then provide something like:
      +    
      +    >>> def int_range_check(value, min, max):
      +    ...     # turn min and max from strings to integers
      +    ...     min = int(min)
      +    ...     max = int(max)
      +    ...     # check that value is of the correct type.
      +    ...     # possible valid inputs are integers or strings
      +    ...     # that represent integers
      +    ...     if not isinstance(value, (int, long, basestring)):
      +    ...         raise VdtTypeError(value)
      +    ...     elif isinstance(value, basestring):
      +    ...         # if we are given a string
      +    ...         # attempt to convert to an integer
      +    ...         try:
      +    ...             value = int(value)
      +    ...         except ValueError:
      +    ...             raise VdtValueError(value)
      +    ...     # check the value is between our constraints
      +    ...     if not min <= value:
      +    ...          raise VdtValueTooSmallError(value)
      +    ...     if not value <= max:
      +    ...          raise VdtValueTooBigError(value)
      +    ...     return value
      +    
      +    >>> fdict = {'int_range': int_range_check}
      +    >>> vtr1 = Validator(fdict)
      +    >>> vtr1.check('int_range(20, 40)', '30')
      +    30
      +    >>> vtr1.check('int_range(20, 40)', '60')
      +    Traceback (most recent call last):
      +    VdtValueTooBigError: the value "60" is too big.
      +    
      +    New functions can be added with : ::
      +    
      +    >>> vtr2 = Validator()       
      +    >>> vtr2.functions['int_range'] = int_range_check
      +    
      +    Or by passing in a dictionary of functions when Validator 
      +    is instantiated.
      +    
      +    Your functions *can* use keyword arguments,
      +    but the first argument should always be 'value'.
      +    
      +    If the function doesn't take additional arguments,
      +    the parentheses are optional in the check.
      +    It can be written with either of : ::
      +    
      +        keyword = function_name
      +        keyword = function_name()
      +    
      +    The first program to utilise Validator() was Michael Foord's
      +    ConfigObj, an alternative to ConfigParser which supports lists and
      +    can validate a config file using a config schema.
      +    For more details on using Validator with ConfigObj see:
      +    http://www.voidspace.org.uk/python/configobj.html
      +    """
      +
      +    # this regex does the initial parsing of the checks
      +    _func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
      +
      +    # this regex takes apart keyword arguments
      +    _key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$',  re.DOTALL)
      +
      +
      +    # this regex finds keyword=list(....) type values
      +    _list_arg = _list_arg
      +
      +    # this regex takes individual values out of lists - in one pass
      +    _list_members = _list_members
      +
      +    # These regexes check a set of arguments for validity
      +    # and then pull the members out
      +    _paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
      +    _matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
      +
      +
      +    def __init__(self, functions=None):
      +        """
      +        >>> vtri = Validator()
      +        """
      +        self.functions = {
      +            '': self._pass,
      +            'integer': is_integer,
      +            'float': is_float,
      +            'boolean': is_boolean,
      +            'ip_addr': is_ip_addr,
      +            'string': is_string,
      +            'list': is_list,
      +            'tuple': is_tuple,
      +            'int_list': is_int_list,
      +            'float_list': is_float_list,
      +            'bool_list': is_bool_list,
      +            'ip_addr_list': is_ip_addr_list,
      +            'string_list': is_string_list,
      +            'mixed_list': is_mixed_list,
      +            'pass': self._pass,
      +            'option': is_option,
      +            'force_list': force_list,
      +        }
      +        if functions is not None:
      +            self.functions.update(functions)
      +        # tekNico: for use by ConfigObj
      +        self.baseErrorClass = ValidateError
      +        self._cache = {}
      +
      +
      +    def check(self, check, value, missing=False):
      +        """
      +        Usage: check(check, value)
      +        
      +        Arguments:
      +            check: string representing check to apply (including arguments)
      +            value: object to be checked
      +        Returns value, converted to correct type if necessary
      +        
      +        If the check fails, raises a ``ValidateError`` subclass.
      +        
      +        >>> vtor.check('yoda', '')
      +        Traceback (most recent call last):
      +        VdtUnknownCheckError: the check "yoda" is unknown.
      +        >>> vtor.check('yoda()', '')
      +        Traceback (most recent call last):
      +        VdtUnknownCheckError: the check "yoda" is unknown.
      +        
      +        >>> vtor.check('string(default="")', '', missing=True)
      +        ''
      +        """
      +        fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
      +            
      +        if missing:
      +            if default is None:
      +                # no information needed here - to be handled by caller
      +                raise VdtMissingValue()
      +            value = self._handle_none(default)
      +        
      +        if value is None:
      +            return None
      +        
      +        return self._check_value(value, fun_name, fun_args, fun_kwargs)
      +
      +
      +    def _handle_none(self, value):
      +        if value == 'None':
      +            return None
      +        elif value in ("'None'", '"None"'):
      +            # Special case a quoted None
      +            value = self._unquote(value)
      +        return value
      +
      +
      +    def _parse_with_caching(self, check):
      +        if check in self._cache:
      +            fun_name, fun_args, fun_kwargs, default = self._cache[check]
      +            # We call list and dict below to work with *copies* of the data
      +            # rather than the original (which are mutable of course)
      +            fun_args = list(fun_args)
      +            fun_kwargs = dict(fun_kwargs)
      +        else:
      +            fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
      +            fun_kwargs = dict([(str(key), value) for (key, value) in fun_kwargs.items()])
      +            self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
      +        return fun_name, fun_args, fun_kwargs, default
      +        
      +        
      +    def _check_value(self, value, fun_name, fun_args, fun_kwargs):
      +        try:
      +            fun = self.functions[fun_name]
      +        except KeyError:
      +            raise VdtUnknownCheckError(fun_name)
      +        else:
      +            return fun(value, *fun_args, **fun_kwargs)
      +
      +
      +    def _parse_check(self, check):
      +        fun_match = self._func_re.match(check)
      +        if fun_match:
      +            fun_name = fun_match.group(1)
      +            arg_string = fun_match.group(2)
      +            arg_match = self._matchfinder.match(arg_string)
      +            if arg_match is None:
      +                # Bad syntax
      +                raise VdtParamError('Bad syntax in check "%s".' % check)
      +            fun_args = []
      +            fun_kwargs = {}
      +            # pull out args of group 2
      +            for arg in self._paramfinder.findall(arg_string):
      +                # args may need whitespace removing (before removing quotes)
      +                arg = arg.strip()
      +                listmatch = self._list_arg.match(arg)
      +                if listmatch:
      +                    key, val = self._list_handle(listmatch)
      +                    fun_kwargs[key] = val
      +                    continue
      +                keymatch = self._key_arg.match(arg)
      +                if keymatch:
      +                    val = keymatch.group(2)
      +                    if not val in ("'None'", '"None"'):
      +                        # Special case a quoted None
      +                        val = self._unquote(val)
      +                    fun_kwargs[keymatch.group(1)] = val
      +                    continue
      +                
      +                fun_args.append(self._unquote(arg))
      +        else:
      +            # allows for function names without (args)
      +            return check, (), {}, None
      +
      +        # Default must be deleted if the value is specified too,
      +        # otherwise the check function will get a spurious "default" keyword arg
      +        default = fun_kwargs.pop('default', None)
      +        return fun_name, fun_args, fun_kwargs, default
      +
      +
      +    def _unquote(self, val):
      +        """Unquote a value if necessary."""
      +        if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
      +            val = val[1:-1]
      +        return val
      +
      +
      +    def _list_handle(self, listmatch):
      +        """Take apart a ``keyword=list('val, 'val')`` type string."""
      +        out = []
      +        name = listmatch.group(1)
      +        args = listmatch.group(2)
      +        for arg in self._list_members.findall(args):
      +            out.append(self._unquote(arg))
      +        return name, out
      +
      +
      +    def _pass(self, value):
      +        """
      +        Dummy check that always passes
      +        
      +        >>> vtor.check('', 0)
      +        0
      +        >>> vtor.check('', '0')
      +        '0'
      +        """
      +        return value
      +    
      +    
      +    def get_default_value(self, check):
      +        """
      +        Given a check, return the default value for the check
      +        (converted to the right type).
      +        
      +        If the check doesn't specify a default value then a
      +        ``KeyError`` will be raised.
      +        """
      +        fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
      +        if default is None:
      +            raise KeyError('Check "%s" has no default value.' % check)
      +        value = self._handle_none(default)
      +        if value is None:
      +            return value
      +        return self._check_value(value, fun_name, fun_args, fun_kwargs)
      +
      +
      +def _is_num_param(names, values, to_float=False):
      +    """
      +    Return numbers from inputs or raise VdtParamError.
      +    
      +    Lets ``None`` pass through.
      +    Pass in keyword argument ``to_float=True`` to
      +    use float for the conversion rather than int.
      +    
      +    >>> _is_num_param(('', ''), (0, 1.0))
      +    [0, 1]
      +    >>> _is_num_param(('', ''), (0, 1.0), to_float=True)
      +    [0.0, 1.0]
      +    >>> _is_num_param(('a'), ('a'))
      +    Traceback (most recent call last):
      +    VdtParamError: passed an incorrect value "a" for parameter "a".
      +    """
      +    fun = to_float and float or int
      +    out_params = []
      +    for (name, val) in zip(names, values):
      +        if val is None:
      +            out_params.append(val)
      +        elif isinstance(val, (int, long, float, basestring)):
      +            try:
      +                out_params.append(fun(val))
      +            except ValueError, e:
      +                raise VdtParamError(name, val)
      +        else:
      +            raise VdtParamError(name, val)
      +    return out_params
      +
      +
      +# built in checks
      +# you can override these by setting the appropriate name
      +# in Validator.functions
      +# note: if the params are specified wrongly in your input string,
      +#       you will also raise errors.
      +
      +def is_integer(value, min=None, max=None):
      +    """
      +    A check that tests that a given value is an integer (int, or long)
      +    and optionally, between bounds. A negative value is accepted, while
      +    a float will fail.
      +    
      +    If the value is a string, then the conversion is done - if possible.
      +    Otherwise a VdtError is raised.
      +    
      +    >>> vtor.check('integer', '-1')
      +    -1
      +    >>> vtor.check('integer', '0')
      +    0
      +    >>> vtor.check('integer', 9)
      +    9
      +    >>> vtor.check('integer', 'a')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "a" is of the wrong type.
      +    >>> vtor.check('integer', '2.2')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "2.2" is of the wrong type.
      +    >>> vtor.check('integer(10)', '20')
      +    20
      +    >>> vtor.check('integer(max=20)', '15')
      +    15
      +    >>> vtor.check('integer(10)', '9')
      +    Traceback (most recent call last):
      +    VdtValueTooSmallError: the value "9" is too small.
      +    >>> vtor.check('integer(10)', 9)
      +    Traceback (most recent call last):
      +    VdtValueTooSmallError: the value "9" is too small.
      +    >>> vtor.check('integer(max=20)', '35')
      +    Traceback (most recent call last):
      +    VdtValueTooBigError: the value "35" is too big.
      +    >>> vtor.check('integer(max=20)', 35)
      +    Traceback (most recent call last):
      +    VdtValueTooBigError: the value "35" is too big.
      +    >>> vtor.check('integer(0, 9)', False)
      +    0
      +    """
      +    (min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
      +    if not isinstance(value, (int, long, basestring)):
      +        raise VdtTypeError(value)
      +    if isinstance(value, basestring):
      +        # if it's a string - does it represent an integer ?
      +        try:
      +            value = int(value)
      +        except ValueError:
      +            raise VdtTypeError(value)
      +    if (min_val is not None) and (value < min_val):
      +        raise VdtValueTooSmallError(value)
      +    if (max_val is not None) and (value > max_val):
      +        raise VdtValueTooBigError(value)
      +    return value
      +
      +
      +def is_float(value, min=None, max=None):
      +    """
      +    A check that tests that a given value is a float
      +    (an integer will be accepted), and optionally - that it is between bounds.
      +    
      +    If the value is a string, then the conversion is done - if possible.
      +    Otherwise a VdtError is raised.
      +    
      +    This can accept negative values.
      +    
      +    >>> vtor.check('float', '2')
      +    2.0
      +    
      +    From now on we multiply the value to avoid comparing decimals
      +    
      +    >>> vtor.check('float', '-6.8') * 10
      +    -68.0
      +    >>> vtor.check('float', '12.2') * 10
      +    122.0
      +    >>> vtor.check('float', 8.4) * 10
      +    84.0
      +    >>> vtor.check('float', 'a')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "a" is of the wrong type.
      +    >>> vtor.check('float(10.1)', '10.2') * 10
      +    102.0
      +    >>> vtor.check('float(max=20.2)', '15.1') * 10
      +    151.0
      +    >>> vtor.check('float(10.0)', '9.0')
      +    Traceback (most recent call last):
      +    VdtValueTooSmallError: the value "9.0" is too small.
      +    >>> vtor.check('float(max=20.0)', '35.0')
      +    Traceback (most recent call last):
      +    VdtValueTooBigError: the value "35.0" is too big.
      +    """
      +    (min_val, max_val) = _is_num_param(
      +        ('min', 'max'), (min, max), to_float=True)
      +    if not isinstance(value, (int, long, float, basestring)):
      +        raise VdtTypeError(value)
      +    if not isinstance(value, float):
      +        # if it's a string - does it represent a float ?
      +        try:
      +            value = float(value)
      +        except ValueError:
      +            raise VdtTypeError(value)
      +    if (min_val is not None) and (value < min_val):
      +        raise VdtValueTooSmallError(value)
      +    if (max_val is not None) and (value > max_val):
      +        raise VdtValueTooBigError(value)
      +    return value
      +
      +
      +bool_dict = {
      +    True: True, 'on': True, '1': True, 'true': True, 'yes': True, 
      +    False: False, 'off': False, '0': False, 'false': False, 'no': False,
      +}
      +
      +
      +def is_boolean(value):
      +    """
      +    Check if the value represents a boolean.
      +    
      +    >>> vtor.check('boolean', 0)
      +    0
      +    >>> vtor.check('boolean', False)
      +    0
      +    >>> vtor.check('boolean', '0')
      +    0
      +    >>> vtor.check('boolean', 'off')
      +    0
      +    >>> vtor.check('boolean', 'false')
      +    0
      +    >>> vtor.check('boolean', 'no')
      +    0
      +    >>> vtor.check('boolean', 'nO')
      +    0
      +    >>> vtor.check('boolean', 'NO')
      +    0
      +    >>> vtor.check('boolean', 1)
      +    1
      +    >>> vtor.check('boolean', True)
      +    1
      +    >>> vtor.check('boolean', '1')
      +    1
      +    >>> vtor.check('boolean', 'on')
      +    1
      +    >>> vtor.check('boolean', 'true')
      +    1
      +    >>> vtor.check('boolean', 'yes')
      +    1
      +    >>> vtor.check('boolean', 'Yes')
      +    1
      +    >>> vtor.check('boolean', 'YES')
      +    1
      +    >>> vtor.check('boolean', '')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "" is of the wrong type.
      +    >>> vtor.check('boolean', 'up')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "up" is of the wrong type.
      +    
      +    """
      +    if isinstance(value, basestring):
      +        try:
      +            return bool_dict[value.lower()]
      +        except KeyError:
      +            raise VdtTypeError(value)
      +    # we do an equality test rather than an identity test
      +    # this ensures Python 2.2 compatibilty
      +    # and allows 0 and 1 to represent True and False
      +    if value == False:
      +        return False
      +    elif value == True:
      +        return True
      +    else:
      +        raise VdtTypeError(value)
      +
      +
      +def is_ip_addr(value):
      +    """
      +    Check that the supplied value is an Internet Protocol address, v.4,
      +    represented by a dotted-quad string, i.e. '1.2.3.4'.
      +    
      +    >>> vtor.check('ip_addr', '1 ')
      +    '1'
      +    >>> vtor.check('ip_addr', ' 1.2')
      +    '1.2'
      +    >>> vtor.check('ip_addr', ' 1.2.3 ')
      +    '1.2.3'
      +    >>> vtor.check('ip_addr', '1.2.3.4')
      +    '1.2.3.4'
      +    >>> vtor.check('ip_addr', '0.0.0.0')
      +    '0.0.0.0'
      +    >>> vtor.check('ip_addr', '255.255.255.255')
      +    '255.255.255.255'
      +    >>> vtor.check('ip_addr', '255.255.255.256')
      +    Traceback (most recent call last):
      +    VdtValueError: the value "255.255.255.256" is unacceptable.
      +    >>> vtor.check('ip_addr', '1.2.3.4.5')
      +    Traceback (most recent call last):
      +    VdtValueError: the value "1.2.3.4.5" is unacceptable.
      +    >>> vtor.check('ip_addr', 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    """
      +    if not isinstance(value, basestring):
      +        raise VdtTypeError(value)
      +    value = value.strip()
      +    try:
      +        dottedQuadToNum(value)
      +    except ValueError:
      +        raise VdtValueError(value)
      +    return value
      +
      +
      +def is_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of values.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    It does no check on list members.
      +    
      +    >>> vtor.check('list', ())
      +    []
      +    >>> vtor.check('list', [])
      +    []
      +    >>> vtor.check('list', (1, 2))
      +    [1, 2]
      +    >>> vtor.check('list', [1, 2])
      +    [1, 2]
      +    >>> vtor.check('list(3)', (1, 2))
      +    Traceback (most recent call last):
      +    VdtValueTooShortError: the value "(1, 2)" is too short.
      +    >>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
      +    Traceback (most recent call last):
      +    VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
      +    >>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
      +    [1, 2, 3, 4]
      +    >>> vtor.check('list', 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    >>> vtor.check('list', '12')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "12" is of the wrong type.
      +    """
      +    (min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
      +    if isinstance(value, basestring):
      +        raise VdtTypeError(value)
      +    try:
      +        num_members = len(value)
      +    except TypeError:
      +        raise VdtTypeError(value)
      +    if min_len is not None and num_members < min_len:
      +        raise VdtValueTooShortError(value)
      +    if max_len is not None and num_members > max_len:
      +        raise VdtValueTooLongError(value)
      +    return list(value)
      +
      +
      +def is_tuple(value, min=None, max=None):
      +    """
      +    Check that the value is a tuple of values.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    It does no check on members.
      +    
      +    >>> vtor.check('tuple', ())
      +    ()
      +    >>> vtor.check('tuple', [])
      +    ()
      +    >>> vtor.check('tuple', (1, 2))
      +    (1, 2)
      +    >>> vtor.check('tuple', [1, 2])
      +    (1, 2)
      +    >>> vtor.check('tuple(3)', (1, 2))
      +    Traceback (most recent call last):
      +    VdtValueTooShortError: the value "(1, 2)" is too short.
      +    >>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
      +    Traceback (most recent call last):
      +    VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
      +    >>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
      +    (1, 2, 3, 4)
      +    >>> vtor.check('tuple', 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    >>> vtor.check('tuple', '12')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "12" is of the wrong type.
      +    """
      +    return tuple(is_list(value, min, max))
      +
      +
      +def is_string(value, min=None, max=None):
      +    """
      +    Check that the supplied value is a string.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    >>> vtor.check('string', '0')
      +    '0'
      +    >>> vtor.check('string', 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    >>> vtor.check('string(2)', '12')
      +    '12'
      +    >>> vtor.check('string(2)', '1')
      +    Traceback (most recent call last):
      +    VdtValueTooShortError: the value "1" is too short.
      +    >>> vtor.check('string(min=2, max=3)', '123')
      +    '123'
      +    >>> vtor.check('string(min=2, max=3)', '1234')
      +    Traceback (most recent call last):
      +    VdtValueTooLongError: the value "1234" is too long.
      +    """
      +    if not isinstance(value, basestring):
      +        raise VdtTypeError(value)
      +    (min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
      +    try:
      +        num_members = len(value)
      +    except TypeError:
      +        raise VdtTypeError(value)
      +    if min_len is not None and num_members < min_len:
      +        raise VdtValueTooShortError(value)
      +    if max_len is not None and num_members > max_len:
      +        raise VdtValueTooLongError(value)
      +    return value
      +
      +
      +def is_int_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of integers.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    Each list member is checked that it is an integer.
      +    
      +    >>> vtor.check('int_list', ())
      +    []
      +    >>> vtor.check('int_list', [])
      +    []
      +    >>> vtor.check('int_list', (1, 2))
      +    [1, 2]
      +    >>> vtor.check('int_list', [1, 2])
      +    [1, 2]
      +    >>> vtor.check('int_list', [1, 'a'])
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "a" is of the wrong type.
      +    """
      +    return [is_integer(mem) for mem in is_list(value, min, max)]
      +
      +
      +def is_bool_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of booleans.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    Each list member is checked that it is a boolean.
      +    
      +    >>> vtor.check('bool_list', ())
      +    []
      +    >>> vtor.check('bool_list', [])
      +    []
      +    >>> check_res = vtor.check('bool_list', (True, False))
      +    >>> check_res == [True, False]
      +    1
      +    >>> check_res = vtor.check('bool_list', [True, False])
      +    >>> check_res == [True, False]
      +    1
      +    >>> vtor.check('bool_list', [True, 'a'])
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "a" is of the wrong type.
      +    """
      +    return [is_boolean(mem) for mem in is_list(value, min, max)]
      +
      +
      +def is_float_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of floats.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    Each list member is checked that it is a float.
      +    
      +    >>> vtor.check('float_list', ())
      +    []
      +    >>> vtor.check('float_list', [])
      +    []
      +    >>> vtor.check('float_list', (1, 2.0))
      +    [1.0, 2.0]
      +    >>> vtor.check('float_list', [1, 2.0])
      +    [1.0, 2.0]
      +    >>> vtor.check('float_list', [1, 'a'])
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "a" is of the wrong type.
      +    """
      +    return [is_float(mem) for mem in is_list(value, min, max)]
      +
      +
      +def is_string_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of strings.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    Each list member is checked that it is a string.
      +    
      +    >>> vtor.check('string_list', ())
      +    []
      +    >>> vtor.check('string_list', [])
      +    []
      +    >>> vtor.check('string_list', ('a', 'b'))
      +    ['a', 'b']
      +    >>> vtor.check('string_list', ['a', 1])
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "1" is of the wrong type.
      +    >>> vtor.check('string_list', 'hello')
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "hello" is of the wrong type.
      +    """
      +    if isinstance(value, basestring):
      +        raise VdtTypeError(value)
      +    return [is_string(mem) for mem in is_list(value, min, max)]
      +
      +
      +def is_ip_addr_list(value, min=None, max=None):
      +    """
      +    Check that the value is a list of IP addresses.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    
      +    Each list member is checked that it is an IP address.
      +    
      +    >>> vtor.check('ip_addr_list', ())
      +    []
      +    >>> vtor.check('ip_addr_list', [])
      +    []
      +    >>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
      +    ['1.2.3.4', '5.6.7.8']
      +    >>> vtor.check('ip_addr_list', ['a'])
      +    Traceback (most recent call last):
      +    VdtValueError: the value "a" is unacceptable.
      +    """
      +    return [is_ip_addr(mem) for mem in is_list(value, min, max)]
      +
      +
      +def force_list(value, min=None, max=None):
      +    """
      +    Check that a value is a list, coercing strings into
      +    a list with one member. Useful where users forget the
      +    trailing comma that turns a single value into a list.
      +    
      +    You can optionally specify the minimum and maximum number of members.
      +    A minumum of greater than one will fail if the user only supplies a
      +    string.
      +    
      +    >>> vtor.check('force_list', ())
      +    []
      +    >>> vtor.check('force_list', [])
      +    []
      +    >>> vtor.check('force_list', 'hello')
      +    ['hello']
      +    """
      +    if not isinstance(value, (list, tuple)):
      +        value = [value]
      +    return is_list(value, min, max)
      +    
      +    
      +
      +fun_dict = {
      +    'integer': is_integer,
      +    'float': is_float,
      +    'ip_addr': is_ip_addr,
      +    'string': is_string,
      +    'boolean': is_boolean,
      +}
      +
      +
      +def is_mixed_list(value, *args):
      +    """
      +    Check that the value is a list.
      +    Allow specifying the type of each member.
      +    Work on lists of specific lengths.
      +    
      +    You specify each member as a positional argument specifying type
      +    
      +    Each type should be one of the following strings :
      +      'integer', 'float', 'ip_addr', 'string', 'boolean'
      +    
      +    So you can specify a list of two strings, followed by
      +    two integers as :
      +    
      +      mixed_list('string', 'string', 'integer', 'integer')
      +    
      +    The length of the list must match the number of positional
      +    arguments you supply.
      +    
      +    >>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
      +    >>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
      +    >>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
      +    1
      +    >>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
      +    >>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
      +    1
      +    >>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "b" is of the wrong type.
      +    >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
      +    Traceback (most recent call last):
      +    VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
      +    >>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
      +    Traceback (most recent call last):
      +    VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
      +    >>> vtor.check(mix_str, 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    
      +    This test requires an elaborate setup, because of a change in error string
      +    output from the interpreter between Python 2.2 and 2.3 .
      +    
      +    >>> res_seq = (
      +    ...     'passed an incorrect value "',
      +    ...     'yoda',
      +    ...     '" for parameter "mixed_list".',
      +    ... )
      +    >>> res_str = "'".join(res_seq)
      +    >>> try:
      +    ...     vtor.check('mixed_list("yoda")', ('a'))
      +    ... except VdtParamError, err:
      +    ...     str(err) == res_str
      +    1
      +    """
      +    try:
      +        length = len(value)
      +    except TypeError:
      +        raise VdtTypeError(value)
      +    if length < len(args):
      +        raise VdtValueTooShortError(value)
      +    elif length > len(args):
      +        raise VdtValueTooLongError(value)
      +    try:
      +        return [fun_dict[arg](val) for arg, val in zip(args, value)]
      +    except KeyError, e:
      +        raise VdtParamError('mixed_list', e)
      +
      +
      +def is_option(value, *options):
      +    """
      +    This check matches the value to any of a set of options.
      +    
      +    >>> vtor.check('option("yoda", "jedi")', 'yoda')
      +    'yoda'
      +    >>> vtor.check('option("yoda", "jedi")', 'jed')
      +    Traceback (most recent call last):
      +    VdtValueError: the value "jed" is unacceptable.
      +    >>> vtor.check('option("yoda", "jedi")', 0)
      +    Traceback (most recent call last):
      +    VdtTypeError: the value "0" is of the wrong type.
      +    """
      +    if not isinstance(value, basestring):
      +        raise VdtTypeError(value)
      +    if not value in options:
      +        raise VdtValueError(value)
      +    return value
      +
      +
      +def _test(value, *args, **keywargs):
      +    """
      +    A function that exists for test purposes.
      +    
      +    >>> checks = [
      +    ...     '3, 6, min=1, max=3, test=list(a, b, c)',
      +    ...     '3',
      +    ...     '3, 6',
      +    ...     '3,',
      +    ...     'min=1, test="a b c"',
      +    ...     'min=5, test="a, b, c"',
      +    ...     'min=1, max=3, test="a, b, c"',
      +    ...     'min=-100, test=-99',
      +    ...     'min=1, max=3',
      +    ...     '3, 6, test="36"',
      +    ...     '3, 6, test="a, b, c"',
      +    ...     '3, max=3, test=list("a", "b", "c")',
      +    ...     '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
      +    ...     "test='x=fish(3)'",
      +    ...    ]
      +    >>> v = Validator({'test': _test})
      +    >>> for entry in checks:
      +    ...     print v.check(('test(%s)' % entry), 3)
      +    (3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'})
      +    (3, ('3',), {})
      +    (3, ('3', '6'), {})
      +    (3, ('3',), {})
      +    (3, (), {'test': 'a b c', 'min': '1'})
      +    (3, (), {'test': 'a, b, c', 'min': '5'})
      +    (3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'})
      +    (3, (), {'test': '-99', 'min': '-100'})
      +    (3, (), {'max': '3', 'min': '1'})
      +    (3, ('3', '6'), {'test': '36'})
      +    (3, ('3', '6'), {'test': 'a, b, c'})
      +    (3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'})
      +    (3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'})
      +    (3, (), {'test': 'x=fish(3)'})
      +    
      +    >>> v = Validator()
      +    >>> v.check('integer(default=6)', '3')
      +    3
      +    >>> v.check('integer(default=6)', None, True)
      +    6
      +    >>> v.get_default_value('integer(default=6)')
      +    6
      +    >>> v.get_default_value('float(default=6)')
      +    6.0
      +    >>> v.get_default_value('pass(default=None)')
      +    >>> v.get_default_value("string(default='None')")
      +    'None'
      +    >>> v.get_default_value('pass')
      +    Traceback (most recent call last):
      +    KeyError: 'Check "pass" has no default value.'
      +    >>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
      +    ['1', '2', '3', '4']
      +    
      +    >>> v = Validator()
      +    >>> v.check("pass(default=None)", None, True)
      +    >>> v.check("pass(default='None')", None, True)
      +    'None'
      +    >>> v.check('pass(default="None")', None, True)
      +    'None'
      +    >>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
      +    ['1', '2', '3', '4']
      +    
      +    Bug test for unicode arguments
      +    >>> v = Validator()
      +    >>> v.check(u'string(min=4)', u'test')
      +    u'test'
      +    
      +    >>> v = Validator()
      +    >>> v.get_default_value(u'string(min=4, default="1234")')
      +    u'1234'
      +    >>> v.check(u'string(min=4, default="1234")', u'test')
      +    u'test'
      +    
      +    >>> v = Validator()
      +    >>> default = v.get_default_value('string(default=None)')
      +    >>> default == None
      +    1
      +    """
      +    return (value, args, keywargs)
      +
      +
      +def _test2():
      +    """
      +    >>> 
      +    >>> v = Validator()
      +    >>> v.get_default_value('string(default="#ff00dd")')
      +    '#ff00dd'
      +    >>> v.get_default_value('integer(default=3) # comment')
      +    3
      +    """
      +
      +def _test3():
      +    r"""
      +    >>> vtor.check('string(default="")', '', missing=True)
      +    ''
      +    >>> vtor.check('string(default="\n")', '', missing=True)
      +    '\n'
      +    >>> print vtor.check('string(default="\n")', '', missing=True),
      +    
      +    >>> vtor.check('string()', '\n')
      +    '\n'
      +    >>> vtor.check('string(default="\n\n\n")', '', missing=True)
      +    '\n\n\n'
      +    >>> vtor.check('string()', 'random \n text goes here\n\n')
      +    'random \n text goes here\n\n'
      +    >>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
      +    ... '', missing=True)
      +    ' \nrandom text\ngoes \n here\n\n '
      +    >>> vtor.check("string(default='\n\n\n')", '', missing=True)
      +    '\n\n\n'
      +    >>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
      +    '\n'
      +    >>> vtor.check("string_list()", ['foo', '\n', 'bar'])
      +    ['foo', '\n', 'bar']
      +    >>> vtor.check("string_list(default=list('\n'))", '', missing=True)
      +    ['\n']
      +    """
      +    
      +    
      +if __name__ == '__main__':
      +    # run the code tests in doctest format
      +    import sys
      +    import doctest
      +    m = sys.modules.get('__main__')
      +    globs = m.__dict__.copy()
      +    globs.update({
      +        'vtor': Validator(),
      +    })
      +    doctest.testmod(m, globs=globs)
      diff --git a/python/devtools/migrate-l10n/README.rst b/python/devtools/migrate-l10n/README.rst
      new file mode 100644
      index 000000000..70f5a6303
      --- /dev/null
      +++ b/python/devtools/migrate-l10n/README.rst
      @@ -0,0 +1,16 @@
      +devtools-l10n-migration script
      +==============================
      +
      +For devtools.html, devtools will no longer rely on DTD files. This migration
      +script is aimed at localizers to automate the migration of strings from DTD to
      +properties files.
      +
      +How to run this script?
      +
      +To migrate all configuration files:
      +  python migrate/main.py path/to/your/l10n/repo/ -c migrate/conf/
      +
      +To migrate only one configuration file:
      +  python migrate/main.py path/to/your/l10n/repo/ -c migrate/conf/bug1294186
      +
      +All configuration files should be named after the bug where specific devtools strings were migrated.
      diff --git a/python/devtools/migrate-l10n/migrate/__init__.py b/python/devtools/migrate-l10n/migrate/__init__.py
      new file mode 100644
      index 000000000..e69de29bb
      diff --git a/python/devtools/migrate-l10n/migrate/conf/bug1294186 b/python/devtools/migrate-l10n/migrate/conf/bug1294186
      new file mode 100644
      index 000000000..0b91b4d58
      --- /dev/null
      +++ b/python/devtools/migrate-l10n/migrate/conf/bug1294186
      @@ -0,0 +1,22 @@
      +font-inspector.properties:fontinspector.seeAll.tooltip = font-inspector.dtd:showAllFonts
      +font-inspector.properties:fontinspector.seeAll = font-inspector.dtd:showAllFontsUsed
      +font-inspector.properties:fontinspector.usedAs = font-inspector.dtd:usedAs
      +font-inspector.properties:fontinspector.system = font-inspector.dtd:system
      +font-inspector.properties:fontinspector.remote = font-inspector.dtd:remote
      +font-inspector.properties:fontinspector.previewText = font-inspector.dtd:previewHint
      +
      +inspector.properties:inspector.eyedropper.label = inspector.dtd:inspectorEyeDropper.label
      +inspector.properties:inspector.breadcrumbs.label = inspector.dtd:inspectorBreadcrumbsGroup
      +
      +boxmodel.properties:boxmodel.title = layoutview.dtd:layoutViewTitle
      +boxmodel.properties:boxmodel.margin = layoutview.dtd:margin.tooltip
      +boxmodel.properties:boxmodel.padding = layoutview.dtd:padding.tooltip
      +boxmodel.properties:boxmodel.border = layoutview.dtd:border.tooltip
      +boxmodel.properties:boxmodel.content = layoutview.dtd:content.tooltip
      +boxmodel.properties:boxmodel.geometryButton.tooltip = layoutview.dtd:geometry.button.tooltip
      +
      +inspector.properties:inspector.browserStyles.label = styleinspector.dtd:browserStylesLabel
      +inspector.properties:inspector.filterStyles.placeholder = styleinspector.dtd:filterStylesPlaceholder
      +inspector.properties:inspector.addRule.tooltip = styleinspector.dtd:addRuleButtonTooltip
      +inspector.properties:inspector.togglePseudo.tooltip = styleinspector.dtd:togglePseudoClassPanel
      +inspector.properties:inspector.noProperties = styleinspector.dtd:noPropertiesFound
      diff --git a/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191 b/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191
      new file mode 100644
      index 000000000..177236b33
      --- /dev/null
      +++ b/python/devtools/migrate-l10n/migrate/conf/bug1308500_1309191
      @@ -0,0 +1,97 @@
      +netmonitor.properties:netmonitor.perfNotice1 = netmonitor.dtd:netmonitorUI.perfNotice1
      +netmonitor.properties:netmonitor.perfNotice2 = netmonitor.dtd:netmonitorUI.perfNotice2
      +netmonitor.properties:netmonitor.perfNotice3 = netmonitor.dtd:netmonitorUI.perfNotice3
      +netmonitor.properties:netmonitor.reloadNotice1 = netmonitor.dtd:netmonitorUI.reloadNotice1
      +netmonitor.properties:netmonitor.reloadNotice2 = netmonitor.dtd:netmonitorUI.reloadNotice2
      +netmonitor.properties:netmonitor.reloadNotice3 = netmonitor.dtd:netmonitorUI.reloadNotice3
      +netmonitor.properties:netmonitor.toolbar.status3 = netmonitor.dtd:netmonitorUI.toolbar.status3
      +netmonitor.properties:netmonitor.toolbar.method = netmonitor.dtd:netmonitorUI.toolbar.method
      +netmonitor.properties:netmonitor.toolbar.file = netmonitor.dtd:netmonitorUI.toolbar.file
      +netmonitor.properties:netmonitor.toolbar.domain = netmonitor.dtd:netmonitorUI.toolbar.domain
      +netmonitor.properties:netmonitor.toolbar.cause = netmonitor.dtd:netmonitorUI.toolbar.cause
      +netmonitor.properties:netmonitor.toolbar.type = netmonitor.dtd:netmonitorUI.toolbar.type
      +netmonitor.properties:netmonitor.toolbar.transferred = netmonitor.dtd:netmonitorUI.toolbar.transferred
      +netmonitor.properties:netmonitor.toolbar.size = netmonitor.dtd:netmonitorUI.toolbar.size
      +netmonitor.properties:netmonitor.toolbar.waterfall = netmonitor.dtd:netmonitorUI.toolbar.waterfall
      +netmonitor.properties:netmonitor.tab.headers = netmonitor.dtd:netmonitorUI.tab.headers
      +netmonitor.properties:netmonitor.tab.cookies = netmonitor.dtd:netmonitorUI.tab.cookies
      +netmonitor.properties:netmonitor.tab.params = netmonitor.dtd:netmonitorUI.tab.params
      +netmonitor.properties:netmonitor.tab.response = netmonitor.dtd:netmonitorUI.tab.response
      +netmonitor.properties:netmonitor.tab.timings = netmonitor.dtd:netmonitorUI.tab.timings
      +netmonitor.properties:netmonitor.tab.preview = netmonitor.dtd:netmonitorUI.tab.preview
      +netmonitor.properties:netmonitor.tab.security = netmonitor.dtd:netmonitorUI.tab.security
      +netmonitor.properties:netmonitor.toolbar.filter.all = netmonitor.dtd:netmonitorUI.footer.filterAll
      +netmonitor.properties:netmonitor.toolbar.filter.html = netmonitor.dtd:netmonitorUI.footer.filterHTML
      +netmonitor.properties:netmonitor.toolbar.filter.css = netmonitor.dtd:netmonitorUI.footer.filterCSS
      +netmonitor.properties:netmonitor.toolbar.filter.js = netmonitor.dtd:netmonitorUI.footer.filterJS
      +netmonitor.properties:netmonitor.toolbar.filter.xhr = netmonitor.dtd:netmonitorUI.footer.filterXHR
      +netmonitor.properties:netmonitor.toolbar.filter.fonts = netmonitor.dtd:netmonitorUI.footer.filterFonts
      +netmonitor.properties:netmonitor.toolbar.filter.images = netmonitor.dtd:netmonitorUI.footer.filterImages
      +netmonitor.properties:netmonitor.toolbar.filter.media = netmonitor.dtd:netmonitorUI.footer.filterMedia
      +netmonitor.properties:netmonitor.toolbar.filter.flash = netmonitor.dtd:netmonitorUI.footer.filterFlash
      +netmonitor.properties:netmonitor.toolbar.filter.ws = netmonitor.dtd:netmonitorUI.footer.filterWS
      +netmonitor.properties:netmonitor.toolbar.filter.other = netmonitor.dtd:netmonitorUI.footer.filterOther
      +netmonitor.properties:netmonitor.toolbar.filterFreetext.label = netmonitor.dtd:netmonitorUI.footer.filterFreetext.label
      +netmonitor.properties:netmonitor.toolbar.clear = netmonitor.dtd:netmonitorUI.footer.clear
      +netmonitor.properties:netmonitor.toolbar.perf = netmonitor.dtd:netmonitorUI.footer.perf
      +netmonitor.properties:netmonitor.panesButton.tooltip = netmonitor.dtd:netmonitorUI.panesButton.tooltip
      +netmonitor.properties:netmonitor.summary.url = netmonitor.dtd:netmonitorUI.summary.url
      +netmonitor.properties:netmonitor.summary.method = netmonitor.dtd:netmonitorUI.summary.method
      +netmonitor.properties:netmonitor.summary.address = netmonitor.dtd:netmonitorUI.summary.address
      +netmonitor.properties:netmonitor.summary.status = netmonitor.dtd:netmonitorUI.summary.status
      +netmonitor.properties:netmonitor.summary.version = netmonitor.dtd:netmonitorUI.summary.version
      +netmonitor.properties:netmonitor.summary.editAndResend = netmonitor.dtd:netmonitorUI.summary.editAndResend
      +netmonitor.properties:netmonitor.summary.rawHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders
      +netmonitor.properties:netmonitor.summary.rawHeaders.requestHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders.requestHeaders
      +netmonitor.properties:netmonitor.summary.rawHeaders.responseHeaders = netmonitor.dtd:netmonitorUI.summary.rawHeaders.responseHeaders
      +netmonitor.properties:netmonitor.summary.size = netmonitor.dtd:netmonitorUI.summary.size
      +netmonitor.properties:netmonitor.response.name = netmonitor.dtd:netmonitorUI.response.name
      +netmonitor.properties:netmonitor.response.dimensions = netmonitor.dtd:netmonitorUI.response.dimensions
      +netmonitor.properties:netmonitor.response.mime = netmonitor.dtd:netmonitorUI.response.mime
      +netmonitor.properties:netmonitor.timings.blocked = netmonitor.dtd:netmonitorUI.timings.blocked
      +netmonitor.properties:netmonitor.timings.dns = netmonitor.dtd:netmonitorUI.timings.dns
      +netmonitor.properties:netmonitor.timings.connect = netmonitor.dtd:netmonitorUI.timings.connect
      +netmonitor.properties:netmonitor.timings.send = netmonitor.dtd:netmonitorUI.timings.send
      +netmonitor.properties:netmonitor.timings.wait = netmonitor.dtd:netmonitorUI.timings.wait
      +netmonitor.properties:netmonitor.timings.receive = netmonitor.dtd:netmonitorUI.timings.receive
      +netmonitor.properties:netmonitor.security.warning.cipher = netmonitor.dtd:netmonitorUI.security.warning.cipher
      +netmonitor.properties:netmonitor.security.error = netmonitor.dtd:netmonitorUI.security.error
      +netmonitor.properties:netmonitor.security.protocolVersion = netmonitor.dtd:netmonitorUI.security.protocolVersion
      +netmonitor.properties:netmonitor.security.cipherSuite = netmonitor.dtd:netmonitorUI.security.cipherSuite
      +netmonitor.properties:netmonitor.security.hsts = netmonitor.dtd:netmonitorUI.security.hsts
      +netmonitor.properties:netmonitor.security.hpkp = netmonitor.dtd:netmonitorUI.security.hpkp
      +netmonitor.properties:netmonitor.security.connection = netmonitor.dtd:netmonitorUI.security.connection
      +netmonitor.properties:netmonitor.security.certificate = netmonitor.dtd:netmonitorUI.security.certificate
      +netmonitor.properties:netmonitor.context.copyUrl = netmonitor.dtd:netmonitorUI.context.copyUrl
      +netmonitor.properties:netmonitor.context.copyUrl.accesskey = netmonitor.dtd:netmonitorUI.context.copyUrl.accesskey
      +netmonitor.properties:netmonitor.context.copyUrlParams = netmonitor.dtd:netmonitorUI.context.copyUrlParams
      +netmonitor.properties:netmonitor.context.copyUrlParams.accesskey = netmonitor.dtd:netmonitorUI.context.copyUrlParams.accesskey
      +netmonitor.properties:netmonitor.context.copyPostData = netmonitor.dtd:netmonitorUI.context.copyPostData
      +netmonitor.properties:netmonitor.context.copyPostData.accesskey = netmonitor.dtd:netmonitorUI.context.copyPostData.accesskey
      +netmonitor.properties:netmonitor.context.copyAsCurl = netmonitor.dtd:netmonitorUI.context.copyAsCurl
      +netmonitor.properties:netmonitor.context.copyAsCurl.accesskey = netmonitor.dtd:netmonitorUI.context.copyAsCurl.accesskey
      +netmonitor.properties:netmonitor.context.copyRequestHeaders = netmonitor.dtd:netmonitorUI.context.copyRequestHeaders
      +netmonitor.properties:netmonitor.context.copyRequestHeaders.accesskey = netmonitor.dtd:netmonitorUI.context.copyRequestHeaders.accesskey
      +netmonitor.properties:netmonitor.context.copyResponseHeaders = netmonitor.dtd:netmonitorUI.context.copyResponseHeaders
      +netmonitor.properties:netmonitor.context.copyResponseHeaders.accesskey = netmonitor.dtd:netmonitorUI.context.copyResponseHeaders.accesskey
      +netmonitor.properties:netmonitor.context.copyResponse = netmonitor.dtd:netmonitorUI.context.copyResponse
      +netmonitor.properties:netmonitor.context.copyResponse.accesskey = netmonitor.dtd:netmonitorUI.context.copyResponse.accesskey
      +netmonitor.properties:netmonitor.context.copyImageAsDataUri = netmonitor.dtd:netmonitorUI.context.copyImageAsDataUri
      +netmonitor.properties:netmonitor.context.copyImageAsDataUri.accesskey = netmonitor.dtd:netmonitorUI.context.copyImageAsDataUri.accesskey
      +netmonitor.properties:netmonitor.context.copyAllAsHar = netmonitor.dtd:netmonitorUI.context.copyAllAsHar
      +netmonitor.properties:netmonitor.context.copyAllAsHar.accesskey = netmonitor.dtd:netmonitorUI.context.copyAllAsHar.accesskey
      +netmonitor.properties:netmonitor.context.saveAllAsHar = netmonitor.dtd:netmonitorUI.context.saveAllAsHar
      +netmonitor.properties:netmonitor.context.saveAllAsHar.accesskey = netmonitor.dtd:netmonitorUI.context.saveAllAsHar.accesskey
      +netmonitor.properties:netmonitor.context.editAndResend = netmonitor.dtd:netmonitorUI.summary.editAndResend
      +netmonitor.properties:netmonitor.context.editAndResend.accesskey = netmonitor.dtd:netmonitorUI.summary.editAndResend.accesskey
      +netmonitor.properties:netmonitor.context.newTab = netmonitor.dtd:netmonitorUI.context.newTab
      +netmonitor.properties:netmonitor.context.newTab.accesskey = netmonitor.dtd:netmonitorUI.context.newTab.accesskey
      +netmonitor.properties:netmonitor.context.perfTools = netmonitor.dtd:netmonitorUI.context.perfTools
      +netmonitor.properties:netmonitor.context.perfTools.accesskey = netmonitor.dtd:netmonitorUI.context.perfTools.accesskey
      +netmonitor.properties:netmonitor.custom.newRequest = netmonitor.dtd:netmonitorUI.custom.newRequest
      +netmonitor.properties:netmonitor.custom.query = netmonitor.dtd:netmonitorUI.custom.query
      +netmonitor.properties:netmonitor.custom.headers = netmonitor.dtd:netmonitorUI.custom.headers
      +netmonitor.properties:netmonitor.custom.postData = netmonitor.dtd:netmonitorUI.custom.postData
      +netmonitor.properties:netmonitor.custom.send = netmonitor.dtd:netmonitorUI.custom.send
      +netmonitor.properties:netmonitor.custom.cancel = netmonitor.dtd:netmonitorUI.custom.cancel
      +netmonitor.properties:netmonitor.backButton = netmonitor.dtd:netmonitorUI.backButton
      diff --git a/python/devtools/migrate-l10n/migrate/main.py b/python/devtools/migrate-l10n/migrate/main.py
      new file mode 100644
      index 000000000..0a1d468a8
      --- /dev/null
      +++ b/python/devtools/migrate-l10n/migrate/main.py
      @@ -0,0 +1,261 @@
      +import argparse
      +import glob
      +import HTMLParser
      +import logging
      +import os
      +import re
      +import sys
      +import urllib2
      +
      +
      +# Import compare-locales parser from parent folder.
      +script_path = os.path.dirname(os.path.realpath(__file__))
      +compare_locales_path = os.path.join(script_path, '../../../compare-locales')
      +sys.path.insert(0, compare_locales_path)
      +from compare_locales import parser
      +
      +
      +# Configure logging format and level
      +logging.basicConfig(format='  [%(levelname)s] %(message)s', level=logging.INFO)
      +
      +
      +# License header to use when creating new properties files.
      +DEFAULT_HEADER = ('# This Source Code Form is subject to the terms of the '
      +                  'Mozilla Public\n# License, v. 2.0. If a copy of the MPL '
      +                  'was not distributed with this\n# file, You can obtain '
      +                  'one at http://mozilla.org/MPL/2.0/.\n')
      +
      +
      +# Base url to retrieve properties files on central, that will be parsed for
      +# localization notes.
      +CENTRAL_BASE_URL = ('https://hg.mozilla.org/'
      +                    'mozilla-central/raw-file/tip/'
      +                    'devtools/client/locales/en-US/')
      +
      +
      +# HTML parser to translate HTML entities in dtd files.
      +HTML_PARSER = HTMLParser.HTMLParser()
      +
      +# Cache to store properties files retrieved over the network.
      +central_prop_cache = {}
      +
      +# Cache the parsed entities from the existing DTD files.
      +dtd_entities_cache = {}
      +
      +
      +# Retrieve the content of the current version of a properties file for the
      +# provided filename, from devtools/client on mozilla central. Will return an
      +# empty array if the file can't be retrieved or read.
      +def get_central_prop_content(prop_filename):
      +    if prop_filename in central_prop_cache:
      +        return central_prop_cache[prop_filename]
      +
      +    url = CENTRAL_BASE_URL + prop_filename
      +    logging.info('loading localization file from central: {%s}' % url)
      +
      +    try:
      +        central_prop_cache[prop_filename] = urllib2.urlopen(url).readlines()
      +    except:
      +        logging.error('failed to load properties file from central: {%s}'
      +                      % url)
      +        central_prop_cache[prop_filename] = []
      +
      +    return central_prop_cache[prop_filename]
      +
      +
      +# Retrieve the current en-US localization notes for the provided prop_name.
      +def get_localization_note(prop_name, prop_filename):
      +    prop_content = get_central_prop_content(prop_filename)
      +
      +    comment_buffer = []
      +    for i, line in enumerate(prop_content):
      +        # Remove line breaks.
      +        line = line.strip('\n').strip('\r')
      +
      +        if line.startswith('#'):
      +            # Comment line, add to the current comment buffer.
      +            comment_buffer.append(line)
      +        elif re.search('(^|\n)' + re.escape(prop_name) + '\s*=', line):
      +            # Property found, the current comment buffer is the localization
      +            # note.
      +            break;
      +        else:
      +            # No match, not a comment, reinitialize the comment buffer.
      +            comment_buffer = []
      +
      +    return '\n'.join(comment_buffer)
      +
      +
      +# Retrieve the parsed DTD entities for a provided path. Results are cached by
      +# dtd path.
      +def get_dtd_entities(dtd_path):
      +    if dtd_path in dtd_entities_cache:
      +        return dtd_entities_cache[dtd_path]
      +
      +    dtd_parser = parser.getParser('.dtd')
      +    dtd_parser.readFile(dtd_path)
      +    dtd_entities_cache[dtd_path] = dtd_parser.parse()
      +    return dtd_entities_cache[dtd_path]
      +
      +
      +# Extract the value of an entity in a dtd file.
      +def get_translation_from_dtd(dtd_path, entity_name):
      +    entities, map = get_dtd_entities(dtd_path)
      +    if entity_name not in map:
      +        # Bail out if translation is missing.
      +        return
      +
      +    key = map[entity_name]
      +    entity = entities[key]
      +    translation = HTML_PARSER.unescape(entity.val)
      +    return translation.encode('utf-8')
      +
      +
      +# Extract the header and file wide comments for the provided properties file
      +# filename.
      +def get_properties_header(prop_filename):
      +    prop_content = get_central_prop_content(prop_filename)
      +
      +    # if the file content is empty, return the default license header.
      +    if len(prop_content) == 0:
      +        return DEFAULT_HEADER
      +
      +    header_buffer = []
      +    for i, line in enumerate(prop_content):
      +        # remove line breaks.
      +        line = line.strip('\n').strip('\r')
      +
      +        # regexp matching keys extracted form parser.py.
      +        is_entity_line = re.search('^(\s*)'
      +                                   '((?:[#!].*?\n\s*)*)'
      +                                   '([^#!\s\n][^=:\n]*?)\s*[:=][ \t]*', line)
      +        is_loc_note = re.search('^(\s*)'
      +                                '\#\s*LOCALIZATION NOTE\s*\([^)]+\)', line)
      +        if is_entity_line or is_loc_note:
      +            # header finished, break the loop.
      +            break
      +        else:
      +            # header line, add to the current buffer.
      +            header_buffer.append(line)
      +
      +    # concatenate the current buffer and return.
      +    return '\n'.join(header_buffer)
      +
      +
      +# Create a new properties file at the provided path.
      +def create_properties_file(prop_path):
      +    logging.info('creating new *.properties file: {%s}' % prop_path)
      +
      +    prop_filename = os.path.basename(prop_path)
      +    header = get_properties_header(prop_filename)
      +
      +    prop_file = open(prop_path, 'w+')
      +    prop_file.write(header)
      +    prop_file.close()
      +
      +
      +# Migrate a single string entry for a dtd to a properties file.
      +def migrate_string(dtd_path, prop_path, dtd_name, prop_name):
      +    if not os.path.isfile(dtd_path):
      +        logging.error('dtd file can not be found at: {%s}' % dtd_path)
      +        return
      +
      +    translation = get_translation_from_dtd(dtd_path, dtd_name)
      +    if not translation:
      +        logging.error('translation could not be found for: {%s} in {%s}'
      +                      % (dtd_name, dtd_path))
      +        return
      +
      +    # Create properties file if missing.
      +    if not os.path.isfile(prop_path):
      +        create_properties_file(prop_path)
      +
      +    if not os.path.isfile(prop_path):
      +        logging.error('could not create new properties file at: {%s}'
      +                      % prop_path)
      +        return
      +
      +    prop_line = prop_name + '=' + translation + '\n'
      +
      +    # Skip the string if it already exists in the destination file.
      +    prop_file_content = open(prop_path, 'r').read()
      +    if prop_line in prop_file_content:
      +        logging.warning('string already migrated, skipping: {%s}' % prop_name)
      +        return
      +
      +    # Skip the string and log an error if an existing entry is found, but with
      +    # a different value.
      +    if re.search('(^|\n)' + re.escape(prop_name) + '\s*=', prop_file_content):
      +        logging.error('existing string found, skipping: {%s}' % prop_name)
      +        return
      +
      +    prop_filename = os.path.basename(prop_path)
      +    logging.info('migrating {%s} in {%s}' % (prop_name, prop_filename))
      +    with open(prop_path, 'a') as prop_file:
      +        localization_note = get_localization_note(prop_name, prop_filename)
      +        if len(localization_note):
      +            prop_file.write('\n' + localization_note)
      +        else:
      +            logging.warning('localization notes could not be found for: {%s}'
      +                            % prop_name)
      +        prop_file.write('\n' + prop_line)
      +
      +
      +# Apply the migration instructions in the provided configuration file.
      +def migrate_conf(conf_path, l10n_path):
      +    f = open(conf_path, 'r')
      +    lines = f.readlines()
      +    f.close()
      +
      +    for i, line in enumerate(lines):
      +        # Remove line breaks.
      +        line = line.strip('\n').strip('\r')
      +
      +        # Skip invalid lines.
      +        if ' = ' not in line:
      +            continue
      +
      +        # Expected syntax: ${prop_path}:${prop_name} = ${dtd_path}:${dtd_name}.
      +        prop_info, dtd_info = line.split(' = ')
      +        prop_path, prop_name = prop_info.split(':')
      +        dtd_path, dtd_name = dtd_info.split(':')
      +
      +        dtd_path = os.path.join(l10n_path, dtd_path)
      +        prop_path = os.path.join(l10n_path, prop_path)
      +
      +        migrate_string(dtd_path, prop_path, dtd_name, prop_name)
      +
      +
      +def main():
      +    # Read command line arguments.
      +    arg_parser = argparse.ArgumentParser(
      +            description='Migrate devtools localized strings.')
      +    arg_parser.add_argument('path', type=str, help='path to l10n repository')
      +    arg_parser.add_argument('-c', '--config', type=str,
      +                            help='path to configuration file or folder')
      +    args = arg_parser.parse_args()
      +
      +    # Retrieve path to devtools localization files in l10n repository.
      +    devtools_l10n_path = os.path.join(args.path, 'devtools/client/')
      +    if not os.path.exists(devtools_l10n_path):
      +        logging.error('l10n path is invalid: {%s}' % devtools_l10n_path)
      +        exit()
      +    logging.info('l10n path is valid: {%s}' % devtools_l10n_path)
      +
      +    # Retrieve configuration files to apply.
      +    if os.path.isdir(args.config):
      +        conf_files = glob.glob(args.config + '*')
      +    elif os.path.isfile(args.config):
      +        conf_files = [args.config]
      +    else:
      +        logging.error('config path is invalid: {%s}' % args.config)
      +        exit()
      +
      +    # Perform migration for each configuration file.
      +    for conf_file in conf_files:
      +        logging.info('performing migration for config file: {%s}' % conf_file)
      +        migrate_conf(conf_file, devtools_l10n_path)
      +
      +
      +if __name__ == '__main__':
      +    main()
      diff --git a/python/devtools/migrate-l10n/migrate/tests/__init__.py b/python/devtools/migrate-l10n/migrate/tests/__init__.py
      new file mode 100644
      index 000000000..e69de29bb
      diff --git a/python/eme/gen-eme-voucher.py b/python/eme/gen-eme-voucher.py
      new file mode 100644
      index 000000000..299bc7146
      --- /dev/null
      +++ b/python/eme/gen-eme-voucher.py
      @@ -0,0 +1,633 @@
      +#!/usr/bin/env python2.7
      +#
      +# Copyright 2014 Adobe Systems Incorporated. All Rights Reserved.
      +#
      +# Adobe permits you to use, modify, and distribute this file in accordance
      +# with the terms of the Mozilla Public License, v 2.0 accompanying it.  If
      +# a copy of the MPL was not distributed with this file, You can obtain one
      +# at http://mozilla.org/MPL/2.0/.
      +#
      +# Creates an Adobe Access signed voucher for x32/x64 windows executables
      +#   Notes: This is currently python2.7 due to mozilla build system requirements
      +
      +from __future__ import print_function
      +
      +import argparse, bitstring, pprint, hashlib, os, subprocess, sys, tempfile, macholib, macholib.MachO
      +from pyasn1.codec.der import encoder as der_encoder
      +from pyasn1.type import univ, namedtype, namedval, constraint
      +
      +
      +# Defined in WinNT.h from the Windows SDK
      +IMAGE_SCN_MEM_EXECUTE = 0x20000000
      +IMAGE_REL_BASED_HIGHLOW = 3
      +IMAGE_REL_BASED_DIR64 = 10
      +
      +
      +# CodeSectionDigest ::= SEQUENCE {
      +#   offset				INTEGER --  section's file offset in the signed binary
      +#   digestAlgorithm		OBJECT IDENTIFIER -- algorithm identifier for the hash value below. For now only supports SHA256.
      +#   digestValue			OCTET STRING -- hash value of the TEXT segment.
      +# }
      +class CodeSectionDigest(univ.Sequence):
      +	componentType = namedtype.NamedTypes(
      +		namedtype.NamedType('offset', univ.Integer()),
      +		namedtype.NamedType('digestAlgorithm', univ.ObjectIdentifier()),
      +		namedtype.NamedType('digest', univ.OctetString()))
      +
      +
      +# CodeSegmentDigest ::= SEQUENCE {
      +#    offset				INTEGER -- TEXT segment's file offset in the signed binary
      +#    codeSectionDigests			SET OF CodeSectionDigests
      +# }
      +
      +class SetOfCodeSectionDigest(univ.SetOf):
      +	componentType = CodeSectionDigest()
      +
      +
      +class CodeSegmentDigest(univ.Sequence):
      +	componentType = namedtype.NamedTypes(
      +		namedtype.NamedType('offset', univ.Integer()),
      +		namedtype.NamedType('codeSectionDigests', SetOfCodeSectionDigest()))
      +
      +
      +# ArchitectureDigest ::= SEQUENCE {
      +# 	cpuType                ENUMERATED CpuType
      +# 	cpuSubType				ENUMERATED CpuSubType
      +# 	CodeSegmentDigests		SET OF CodeSegmentDigests
      +# }
      +class SetOfCodeSegmentDigest(univ.SetOf):
      +	componentType = CodeSegmentDigest()
      +
      +
      +class CPUType(univ.Enumerated):
      +	namedValues = namedval.NamedValues(
      +		('IMAGE_FILE_MACHINE_I386', 0x14c),
      +		('IMAGE_FILE_MACHINE_AMD64',0x8664 ),
      +		('MACHO_CPU_TYPE_I386',0x7 ),
      +		('MACHO_CPU_TYPE_X86_64',0x1000007 ),
      +	)
      +	subtypeSpec = univ.Enumerated.subtypeSpec + \
      +				  constraint.SingleValueConstraint(0x14c, 0x8664, 0x7, 0x1000007)
      +
      +
      +class CPUSubType(univ.Enumerated):
      +	namedValues = namedval.NamedValues(
      +		('IMAGE_UNUSED', 0x0),
      +		('CPU_SUBTYPE_X86_ALL', 0x3),
      +		('CPU_SUBTYPE_X86_64_ALL', 0x80000003)
      +	)
      +	subtypeSpec = univ.Enumerated.subtypeSpec + \
      +				  constraint.SingleValueConstraint(0, 0x3, 0x80000003)
      +
      +
      +class ArchitectureDigest(univ.Sequence):
      +	componentType = namedtype.NamedTypes(
      +		namedtype.NamedType('cpuType', CPUType()),
      +		namedtype.NamedType('cpuSubType', CPUSubType()),
      +		namedtype.NamedType('CodeSegmentDigests', SetOfCodeSegmentDigest())
      +	)
      +
      +
      +# ApplicationDigest ::= SEQUENCE {
      +#   version    INTEGER
      +#   digests    SET OF ArchitectureDigest
      +# }
      +class SetOfArchitectureDigest(univ.SetOf):
      +	componentType = ArchitectureDigest()
      +
      +
      +class ApplicationDigest(univ.Sequence):
      +	componentType = namedtype.NamedTypes(
      +		namedtype.NamedType('version', univ.Integer()),
      +		namedtype.NamedType('digests', SetOfArchitectureDigest())
      +	)
      +
      +
      +def meets_requirements(items, requirements):
      +	for r in requirements:
      +		for n, v in r.items():
      +			if n not in items or items[n] != v: return False
      +	return True
      +
      +
      +# return total number of bytes read from items_in excluding leaves
      +# TODO: research replacing this with the python built-in struct module
      +def parse_items(stream, items_in, items_out):
      +	bits_read = 0
      +	total_bits_read = 0
      +
      +	for item in items_in:
      +		name = item[0]
      +		t = item[1]
      +		bits = 1 if ":" not in t else int(t[t.index(":") + 1:])
      +
      +		if ":" in t and t.find("bytes") >= 0:
      +			bits = bits * 8
      +
      +		if len(item) == 2:
      +			items_out[name] = stream.read(t)
      +			bits_read += bits
      +			total_bits_read += bits
      +		elif len(item) == 3 or len(item) == 4:
      +			requirements = list(filter(lambda x: isinstance(x, dict), item[2]))
      +			sub_items = list(filter(lambda x: isinstance(x, tuple), item[2]))
      +
      +			if not meets_requirements(items_out, requirements): continue
      +
      +			# has sub-items based on length
      +			items_out[name] = stream.read(t)
      +			bits_read += bits
      +			total_bits_read += bits
      +
      +			if len(item) == 4:
      +				bit_length = items_out[name] * 8
      +
      +				if bit_length > 0:
      +					sub_read, sub_total_read = parse_items(stream, sub_items, items_out)
      +					bit_length -= sub_read
      +					total_bits_read += sub_total_read
      +
      +					if bit_length > 0:
      +						items_out[item[3]] = stream.read('bits:' + str(bit_length))
      +						bits_read += bit_length
      +						total_bits_read += bit_length
      +		else:
      +			raise Exception("unrecognized item" + pprint.pformat(item))
      +
      +	return bits_read, total_bits_read
      +
      +
      +# macho stuff
      +# Constant for the magic field of the mach_header (32-bit architectures)
      +MH_MAGIC =0xfeedface	# the mach magic number
      +MH_CIGAM =0xcefaedfe	# NXSwapInt(MH_MAGIC)
      +
      +MH_MAGIC_64 =0xfeedfacf	# the the 64-bit mach magic number
      +MH_CIGAM_64 =0xcffaedfe	# NXSwapInt(MH_MAGIC_64)
      +
      +FAT_CIGAM = 0xbebafeca
      +FAT_MAGIC =	0xcafebabe
      +
      +LC_SEGMENT = 0x1
      +LC_SEGMENT_64	= 0x19	# 64-bit segment of this file to be
      +
      +
      +
      +# TODO: perhaps switch to pefile module when it officially supports python3
      +class SectionHeader:
      +	def __init__(self, stream):
      +		items = [
      +			('Name', 'bytes:8'),
      +			('VirtualSize', 'uintle:32'),
      +			('VirtualAddress', 'uintle:32'),
      +			('SizeOfRawData', 'uintle:32'),
      +			('PointerToRawData', 'uintle:32'),
      +			('PointerToRelocations', 'uintle:32'),
      +			('PointerToLineNumber', 'uintle:32'),
      +			('NumberOfRelocations', 'uintle:16'),
      +			('NumberOfLineNumbers', 'uintle:16'),
      +			('Characteristics', 'uintle:32')
      +		]
      +		self.items = dict()
      +		self.relocs = dict()
      +
      +		_, self.bits_read = parse_items(stream, items, self.items)
      +
      +		self.sectionName = self.items['Name'].decode('utf-8')
      +		self.offset = self.items['PointerToRawData']
      +
      +COFF_DATA_DIRECTORY_TYPES = [
      +	"Export Table",
      +	"Import Table",
      +	"Resource Table",
      +	"Exception Table",
      +	"Certificate Tble",
      +	"Base Relocation Table",
      +	"Debug",
      +	"Architecture",
      +	"Global Ptr",
      +	"TLS Table",
      +	"Load Config Table",
      +	"Bound Import",
      +	"IAT",
      +	"Delay Import Descriptor",
      +	"CLR Runtime Header",
      +	"Reserved",
      +]
      +
      +
      +def chained_safe_get(obj, names, default=None):
      +	if obj is None: return default
      +
      +	for n in names:
      +		if n in obj:
      +			obj = obj[n]
      +		else:
      +			return default
      +
      +	return obj
      +
      +
      +class OptionalHeader:
      +	def __init__(self, stream, size):
      +		self.items = {}
      +		items = []
      +
      +		if size:
      +			items += [
      +				('Magic', 'uintle:16'),
      +				('MajorLinkerVersion', 'uintle:8'),
      +				('MinorLinkerVersion', 'uintle:8'),
      +				('SizeOfCode', 'uintle:32'),
      +				('SizeOfInitializedData', 'uintle:32'),
      +				('SizeOfUninitializedData', 'uintle:32'),
      +				('AddressOfEntryPoint', 'uintle:32'),
      +				('BaseOfCode', 'uintle:32'),
      +			]
      +
      +			_, self.bits_read = parse_items(stream, items, self.items)
      +
      +			items = []
      +			if self.items['Magic'] == 0x10b:  # PE32
      +				items += [('BaseOfData', 'uintle:32')]
      +
      +			address_size = 'uintle:64' if self.items['Magic'] == 0x20b else 'uintle:32'
      +
      +			items += [
      +				('ImageBase', address_size),
      +				('SectionAlignment', 'uintle:32'),
      +				('FileAlignment', 'uintle:32'),
      +				('MajorOperatingSystemVersion', 'uintle:16'),
      +				('MinorOperatingSystemVersion', 'uintle:16'),
      +				('MajorImageVersion', 'uintle:16'),
      +				('MinorImageVersion', 'uintle:16'),
      +				('MajorSubsystemVersion', 'uintle:16'),
      +				('MinorSubsystemVersion', 'uintle:16'),
      +				('Win32VersionValue', 'uintle:32'),
      +				('SizeOfImage', 'uintle:32'),
      +				('SizeOfHeaders', 'uintle:32'),
      +				('CheckSum', 'uintle:32'),
      +				('Subsystem', 'uintle:16'),
      +				('DllCharacteristics', 'uintle:16'),
      +				('SizeOfStackReserve', address_size),
      +				('SizeOfStackCommit', address_size),
      +				('SizeOfHeapReserve', address_size),
      +				('SizeOfHeapCommit', address_size),
      +				('LoaderFlags', 'uintle:32'),
      +				('NumberOfRvaAndSizes', 'uintle:32'),
      +			]
      +
      +		if size > 28:
      +			_, bits_read = parse_items(stream, items, self.items)
      +			self.bits_read += bits_read
      +
      +		if 'NumberOfRvaAndSizes' in self.items:
      +			index = 0
      +			self.items['Data Directories'] = dict()
      +			while self.bits_read / 8 < size:
      +				d = self.items['Data Directories'][COFF_DATA_DIRECTORY_TYPES[index]] = dict()
      +
      +				_, bits_read = parse_items(stream, [('VirtualAddress', 'uintle:32'), ('Size', 'uintle:32')], d)
      +				self.bits_read += bits_read
      +				index += 1
      +
      +
      +class COFFFileHeader:
      +	def __init__(self, stream):
      +		self.items = {}
      +		self.section_headers = []
      +
      +		items = [
      +			('Machine', 'uintle:16'),
      +			('NumberOfSections', 'uintle:16'),
      +			('TimeDateStamp', 'uintle:32'),
      +			('PointerToSymbolTable', 'uintle:32'),
      +			('NumberOfSymbols', 'uintle:32'),
      +			('SizeOfOptionalHeader', 'uintle:16'),
      +			('Characteristics', 'uintle:16')
      +		]
      +		_, self.bits_read = parse_items(stream, items, self.items)
      +
      +		self.OptionalHeader = OptionalHeader(stream, self.items['SizeOfOptionalHeader'])
      +		self.bits_read += self.OptionalHeader.bits_read
      +
      +		# start reading section headers
      +		num_sections = self.items['NumberOfSections']
      +
      +		while num_sections > 0 :
      +			section_header = SectionHeader(stream)
      +			self.bits_read += section_header.bits_read
      +			self.section_headers.append(section_header)
      +			num_sections -= 1
      +
      +		self.section_headers.sort(key=lambda header: header.offset)
      +
      +		# Read Relocations
      +		self.process_relocs(stream)
      +
      +	def process_relocs(self, stream):
      +		reloc_table = chained_safe_get(self.OptionalHeader.items, ['Data Directories', 'Base Relocation Table'])
      +		if reloc_table is None: return
      +
      +		orig_pos = stream.bitpos
      +		_, stream.bytepos = self.get_rva_section(reloc_table['VirtualAddress'])
      +		end_pos = stream.bitpos + reloc_table['Size'] * 8
      +
      +		while stream.bitpos < end_pos:
      +			page_rva = stream.read('uintle:32')
      +			block_size = stream.read('uintle:32')
      +
      +			for i in range(0, int((block_size - 8) / 2)):
      +				data = stream.read('uintle:16')
      +				typ = data >> 12
      +				offset = data & 0xFFF
      +
      +				if offset == 0 and i > 0: continue
      +
      +				assert(typ == IMAGE_REL_BASED_HIGHLOW or typ == IMAGE_REL_BASED_DIR64)
      +
      +				cur_pos = stream.bitpos
      +				sh, value_bytepos = self.get_rva_section(page_rva + offset)
      +				stream.bytepos = value_bytepos
      +				value = stream.read('uintle:32' if typ == IMAGE_REL_BASED_HIGHLOW else 'uintle:64')
      +
      +				# remove BaseAddress
      +				value -= self.OptionalHeader.items['ImageBase']
      +
      +				bit_size = (4 if typ == IMAGE_REL_BASED_HIGHLOW else 8) * 8
      +				stream.overwrite(bitstring.BitArray(uint=value, length=bit_size), pos=value_bytepos * 8)
      +				stream.pos = cur_pos
      +
      +		stream.bitpos = orig_pos
      +
      +	def get_rva_section(self, rva):
      +		for sh in self.section_headers:
      +			if rva < sh.items['VirtualAddress'] or rva >= sh.items['VirtualAddress'] + sh.items['VirtualSize']:
      +				continue
      +
      +			file_pointer = rva - sh.items['VirtualAddress'] + sh.items['PointerToRawData']
      +			return sh, file_pointer
      +
      +		raise Exception('Could not match RVA to section')
      +
      +
      +def create_temp_file(suffix=""):
      +	fd, path = tempfile.mkstemp(suffix=suffix)
      +	os.close(fd)
      +	return path
      +
      +
      +class ExpandPath(argparse.Action):
      +	def __call__(self, parser, namespace, values, option_string=None):
      +		setattr(namespace, self.dest, os.path.abspath(os.path.expanduser(values)))
      +
      +
      +# this does a naming trick since windows doesn't allow multiple usernames for the same server
      +def get_password(service_name, user_name):
      +	try:
      +		import keyring
      +
      +		# windows doesn't allow multiple usernames for the same server, argh
      +		if sys.platform == "win32":
      +			password = keyring.get_password(service_name + "-" + user_name, user_name)
      +		else:
      +			password = keyring.get_password(service_name, user_name)
      +
      +		return password
      +	except:
      +	    # This allows for manual testing where you do not wish to cache the password on the system
      +		print("Missing keyring module...getting password manually")
      +
      +	return None
      +
      +
      +def openssl_cmd(app_args, args, password_in, password_out):
      +	password = get_password(app_args.password_service, app_args.password_user) if (password_in or password_out) else None
      +	env = None
      +	args = [app_args.openssl_path] + args
      +
      +	if password is not None:
      +		env = os.environ.copy()
      +		env["COFF_PW"] = password
      +
      +		if password_in: args += ["-passin", "env:COFF_PW"]
      +		if password_out: args += ["-passout", "env:COFF_PW", "-password", "env:COFF_PW"]
      +
      +	subprocess.check_call(args, env=env)
      +
      +
      +def processMachoBinary(filename):
      +
      +	outDict = dict()
      +	outDict['result'] = False
      +
      +	setOfArchDigests = SetOfArchitectureDigest()
      +	archDigestIdx = 0
      +
      +	parsedMacho = macholib.MachO.MachO(filename)
      +
      +	for header in parsedMacho.headers :
      +		arch_digest = ArchitectureDigest()
      +		lc_segment = LC_SEGMENT
      +
      +		arch_digest.setComponentByName('cpuType', CPUType(header.header.cputype))
      +		arch_digest.setComponentByName('cpuSubType', CPUSubType(header.header.cpusubtype))
      +
      +		if header.header.cputype == 0x1000007:
      +			lc_segment = LC_SEGMENT_64
      +
      +
      +
      +		segment_commands = list(filter(lambda x: x[0].cmd == lc_segment, header.commands))
      +		text_segment_commands = list(filter(lambda x: x[1].segname.decode("utf-8").startswith("__TEXT"), segment_commands))
      +
      +
      +		code_segment_digests = SetOfCodeSegmentDigest()
      +		code_segment_idx = 0
      +
      +		for text_command in text_segment_commands:
      +
      +			codeSegmentDigest = CodeSegmentDigest()
      +			codeSegmentDigest.setComponentByName('offset', text_command[1].fileoff)
      +
      +			sectionDigestIdx = 0
      +			set_of_digest = SetOfCodeSectionDigest()
      +			for section in text_command[2]:
      +				digester = hashlib.sha256()
      +				digester.update(section.section_data)
      +				digest = digester.digest()
      +
      +				code_section_digest = CodeSectionDigest()
      +				code_section_digest.setComponentByName('offset', section.offset)
      +				code_section_digest.setComponentByName('digestAlgorithm', univ.ObjectIdentifier('2.16.840.1.101.3.4.2.1'))
      +				code_section_digest.setComponentByName('digest', univ.OctetString(digest))
      +
      +				set_of_digest.setComponentByPosition(sectionDigestIdx, code_section_digest)
      +				sectionDigestIdx += 1
      +
      +
      +			codeSegmentDigest.setComponentByName('codeSectionDigests', set_of_digest)
      +
      +			code_segment_digests.setComponentByPosition(code_segment_idx, codeSegmentDigest)
      +
      +			code_segment_idx += 1
      +
      +		arch_digest.setComponentByName('CodeSegmentDigests', code_segment_digests)
      +		setOfArchDigests.setComponentByPosition(archDigestIdx, arch_digest)
      +		archDigestIdx += 1
      +
      +		outDict['result'] = True
      +
      +	if outDict['result']:
      +		appDigest = ApplicationDigest()
      +		appDigest.setComponentByName('version', 1)
      +		appDigest.setComponentByName('digests', setOfArchDigests)
      +		outDict['digest'] = appDigest
      +
      +
      +	return outDict
      +
      +
      +
      +def processCOFFBinary(stream):
      +
      +	outDict = dict()
      +	outDict['result'] = False
      +
      +	# find the COFF header.
      +	# skip forward past the MSDOS stub header to 0x3c.
      +	stream.bytepos = 0x3c
      +
      +	# read 4 bytes, this is the file offset of the PE signature.
      +	pe_sig_offset = stream.read('uintle:32')
      +	stream.bytepos = pe_sig_offset
      +
      +	# read 4 bytes, make sure it's a PE signature.
      +	signature = stream.read('uintle:32')
      +	if signature != 0x00004550:
      +		return outDict
      +
      +	# after signature is the actual COFF file header.
      +	coff_header = COFFFileHeader(stream)
      +
      +	arch_digest = ArchitectureDigest()
      +	if coff_header.items['Machine'] == 0x14c:
      +		arch_digest.setComponentByName('cpuType', CPUType('IMAGE_FILE_MACHINE_I386'))
      +	elif coff_header.items['Machine'] == 0x8664:
      +		arch_digest.setComponentByName('cpuType', CPUType('IMAGE_FILE_MACHINE_AMD64'))
      +
      +	arch_digest.setComponentByName('cpuSubType', CPUSubType('IMAGE_UNUSED'))
      +
      +	text_section_headers = list(filter(lambda x: (x.items['Characteristics'] & IMAGE_SCN_MEM_EXECUTE) == IMAGE_SCN_MEM_EXECUTE, coff_header.section_headers))
      +
      +	code_segment_digests = SetOfCodeSegmentDigest()
      +	code_segment_idx = 0
      +	for code_sect_header in text_section_headers:
      +		stream.bytepos = code_sect_header.offset
      +		code_sect_bytes = stream.read('bytes:' + str(code_sect_header.items['VirtualSize']))
      +
      +		digester = hashlib.sha256()
      +		digester.update(code_sect_bytes)
      +		digest = digester.digest()
      +
      +		# with open('segment_' + str(code_sect_header.offset) + ".bin", 'wb') as f:
      +		#   f.write(code_sect_bytes)
      +
      +		code_section_digest = CodeSectionDigest()
      +		code_section_digest.setComponentByName('offset', code_sect_header.offset)
      +		code_section_digest.setComponentByName('digestAlgorithm', univ.ObjectIdentifier('2.16.840.1.101.3.4.2.1'))
      +		code_section_digest.setComponentByName('digest', univ.OctetString(digest))
      +
      +		set_of_digest = SetOfCodeSectionDigest()
      +		set_of_digest.setComponentByPosition(0, code_section_digest)
      +
      +		codeSegmentDigest = CodeSegmentDigest()
      +		codeSegmentDigest.setComponentByName('offset', code_sect_header.offset)
      +		codeSegmentDigest.setComponentByName('codeSectionDigests', set_of_digest)
      +
      +		code_segment_digests.setComponentByPosition(code_segment_idx, codeSegmentDigest)
      +		code_segment_idx += 1
      +
      +	arch_digest.setComponentByName('CodeSegmentDigests', code_segment_digests)
      +
      +	setOfArchDigests = SetOfArchitectureDigest()
      +	setOfArchDigests.setComponentByPosition(0, arch_digest)
      +
      +	appDigest = ApplicationDigest()
      +
      +	appDigest.setComponentByName('version', 1)
      +	appDigest.setComponentByName('digests', setOfArchDigests)
      +
      +	outDict['result'] = True
      +	outDict['digest'] = appDigest
      +
      +	return outDict
      +
      +def main():
      +	parser = argparse.ArgumentParser(description='PE/COFF Signer')
      +	parser.add_argument('-input', action=ExpandPath, required=True, help="File to parse.")
      +	parser.add_argument('-output', action=ExpandPath, required=True, help="File to write to.")
      +	parser.add_argument('-openssl_path', action=ExpandPath, help="Path to OpenSSL to create signed voucher")
      +	parser.add_argument('-signer_pfx', action=ExpandPath, help="Path to certificate to use to sign voucher.  Must contain full certificate chain.")
      +	parser.add_argument('-password_service', help="Name of Keyring/Wallet service/host")
      +	parser.add_argument('-password_user', help="Name of Keyring/Wallet user name")
      +	parser.add_argument('-verbose', action='store_true', help="Verbose output.")
      +	app_args = parser.parse_args()
      +
      +	# to simplify relocation handling we use a mutable BitStream so we can remove
      +	# the BaseAddress from each relocation
      +	stream = bitstring.BitStream(filename=app_args.input)
      +
      +
      +	dict = processCOFFBinary(stream)
      +
      +	if dict['result'] == False:
      +		dict = processMachoBinary(app_args.input)
      +
      +
      +
      +	if dict['result'] == False:
      +		raise Exception("Invalid File")
      +
      +	binaryDigest = der_encoder.encode(dict['digest'])
      +
      +	with open(app_args.output, 'wb') as f:
      +		f.write(binaryDigest)
      +
      +	# sign with openssl if specified
      +	if app_args.openssl_path is not None:
      +		assert app_args.signer_pfx is not None
      +
      +		out_base, out_ext = os.path.splitext(app_args.output)
      +		signed_path = out_base + ".signed" + out_ext
      +
      +		# http://stackoverflow.com/questions/12507277/how-to-fix-unable-to-write-random-state-in-openssl
      +		temp_files = []
      +		if sys.platform == "win32" and "RANDFILE" not in os.environ:
      +			temp_file = create_temp_file()
      +			temp_files += [temp_file]
      +			os.environ["RANDFILE"] = temp_file
      +
      +		try:
      +			# create PEM from PFX
      +			pfx_pem_path = create_temp_file(".pem")
      +			temp_files += [pfx_pem_path]
      +			print("Extracting PEM from PFX to:" + pfx_pem_path)
      +			openssl_cmd(app_args, ["pkcs12", "-in", app_args.signer_pfx, "-out", pfx_pem_path], True, True)
      +
      +			# extract CA certs
      +			pfx_cert_path = create_temp_file(".cert")
      +			temp_files += [pfx_cert_path]
      +			print("Extracting cert from PFX to:" + pfx_cert_path)
      +			openssl_cmd(app_args, ["pkcs12", "-in", app_args.signer_pfx, "-cacerts", "-nokeys", "-out", pfx_cert_path], True, False)
      +
      +			# we embed the public keychain for client validation
      +			openssl_cmd(app_args, ["cms", "-sign", "-nodetach", "-md", "sha256", "-binary", "-in", app_args.output, "-outform", "der", "-out", signed_path, "-signer", pfx_pem_path, "-certfile", pfx_cert_path], True, False)
      +		finally:
      +			for t in temp_files:
      +				if "RANDFILE" in os.environ and t == os.environ["RANDFILE"]:
      +					del os.environ["RANDFILE"]
      +				os.unlink(t)
      +
      +if __name__ == '__main__':
      +	main()
      diff --git a/python/futures/CHANGES b/python/futures/CHANGES
      new file mode 100644
      index 000000000..09b1ab183
      --- /dev/null
      +++ b/python/futures/CHANGES
      @@ -0,0 +1,89 @@
      +3.0.2
      +=====
      +
      +- Made multiprocessing optional again on implementations other than just Jython
      +
      +
      +3.0.1
      +=====
      +
      +- Made Executor.map() non-greedy
      +
      +
      +3.0.0
      +=====
      +
      +- Dropped Python 2.5 and 3.1 support
      +- Removed the deprecated "futures" top level package
      +- Applied patch for issue 11777 (Executor.map does not submit futures until
      +                                 iter.next() is called)
      +- Applied patch for issue 15015 (accessing an non-existing attribute)
      +- Applied patch for issue 16284 (memory leak)
      +- Applied patch for issue 20367 (behavior of concurrent.futures.as_completed()
      +                                 for duplicate arguments)
      +
      +2.2.0
      +=====
      +
      +- Added the set_exception_info() and exception_info() methods to Future
      +  to enable extraction of tracebacks on Python 2.x
      +- Added support for Future.set_exception_info() to ThreadPoolExecutor
      +
      +
      +2.1.6
      +=====
      +
      +- Fixed a problem with files missing from the source distribution
      +
      +
      +2.1.5
      +=====
      +
      +- Fixed Jython compatibility
      +- Added metadata for wheel support
      +
      +
      +2.1.4
      +=====
      +
      +- Ported the library again from Python 3.2.5 to get the latest bug fixes 
      +
      +
      +2.1.3
      +=====
      +
      +- Fixed race condition in wait(return_when=ALL_COMPLETED)
      +  (http://bugs.python.org/issue14406) -- thanks Ralf Schmitt
      +- Added missing setUp() methods to several test classes
      +
      +
      +2.1.2
      +=====
      +
      +- Fixed installation problem on Python 3.1
      +
      +
      +2.1.1
      +=====
      +
      +- Fixed missing 'concurrent' package declaration in setup.py
      +
      +
      +2.1
      +===
      +
      +- Moved the code from the 'futures' package to 'concurrent.futures' to provide
      +  a drop in backport that matches the code in Python 3.2 standard library
      +- Deprecated the old 'futures' package
      +
      +
      +2.0
      +===
      +
      +- Changed implementation to match PEP 3148
      +
      +
      +1.0
      +===
      +
      +Initial release.
      diff --git a/python/futures/LICENSE b/python/futures/LICENSE
      new file mode 100644
      index 000000000..c430db0f1
      --- /dev/null
      +++ b/python/futures/LICENSE
      @@ -0,0 +1,21 @@
      +Copyright 2009 Brian Quinlan. All rights reserved.
      +
      +Redistribution and use in source and binary forms, with or without modification,
      +are permitted provided that the following conditions are met:
      +
      +   1. Redistributions of source code must retain the above copyright notice,
      +      this list of conditions and the following disclaimer.
      +   2. Redistributions in binary form must reproduce the above copyright notice,
      +      this list of conditions and the following disclaimer in the documentation
      +      and/or other materials provided with the distribution.
      +
      +THIS SOFTWARE IS PROVIDED BY BRIAN QUINLAN "AS IS" AND ANY EXPRESS OR IMPLIED
      +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
      +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
      +HALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
      +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
      +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
      +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
      +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
      +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
      +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
      \ No newline at end of file
      diff --git a/python/futures/MANIFEST.in b/python/futures/MANIFEST.in
      new file mode 100644
      index 000000000..52860d043
      --- /dev/null
      +++ b/python/futures/MANIFEST.in
      @@ -0,0 +1,5 @@
      +recursive-include docs *
      +include *.py
      +include tox.ini
      +include CHANGES
      +include LICENSE
      diff --git a/python/futures/PKG-INFO b/python/futures/PKG-INFO
      new file mode 100644
      index 000000000..0f7e6250c
      --- /dev/null
      +++ b/python/futures/PKG-INFO
      @@ -0,0 +1,16 @@
      +Metadata-Version: 1.0
      +Name: futures
      +Version: 3.0.2
      +Summary: Backport of the concurrent.futures package from Python 3.2
      +Home-page: https://github.com/agronholm/pythonfutures
      +Author: Alex Gronholm
      +Author-email: alex.gronholm+pypi@nextday.fi
      +License: BSD
      +Description: UNKNOWN
      +Platform: UNKNOWN
      +Classifier: License :: OSI Approved :: BSD License
      +Classifier: Development Status :: 5 - Production/Stable
      +Classifier: Intended Audience :: Developers
      +Classifier: Programming Language :: Python :: 2.6
      +Classifier: Programming Language :: Python :: 2.7
      +Classifier: Programming Language :: Python :: 2 :: Only
      diff --git a/python/futures/concurrent/__init__.py b/python/futures/concurrent/__init__.py
      new file mode 100644
      index 000000000..b36383a61
      --- /dev/null
      +++ b/python/futures/concurrent/__init__.py
      @@ -0,0 +1,3 @@
      +from pkgutil import extend_path
      +
      +__path__ = extend_path(__path__, __name__)
      diff --git a/python/futures/concurrent/futures/__init__.py b/python/futures/concurrent/futures/__init__.py
      new file mode 100644
      index 000000000..428b14bdf
      --- /dev/null
      +++ b/python/futures/concurrent/futures/__init__.py
      @@ -0,0 +1,23 @@
      +# Copyright 2009 Brian Quinlan. All Rights Reserved.
      +# Licensed to PSF under a Contributor Agreement.
      +
      +"""Execute computations asynchronously using threads or processes."""
      +
      +__author__ = 'Brian Quinlan (brian@sweetapp.com)'
      +
      +from concurrent.futures._base import (FIRST_COMPLETED,
      +                                      FIRST_EXCEPTION,
      +                                      ALL_COMPLETED,
      +                                      CancelledError,
      +                                      TimeoutError,
      +                                      Future,
      +                                      Executor,
      +                                      wait,
      +                                      as_completed)
      +from concurrent.futures.thread import ThreadPoolExecutor
      +
      +try:
      +    from concurrent.futures.process import ProcessPoolExecutor
      +except ImportError:
      +    # some platforms don't have multiprocessing
      +    pass
      diff --git a/python/futures/concurrent/futures/_base.py b/python/futures/concurrent/futures/_base.py
      new file mode 100644
      index 000000000..fbf027512
      --- /dev/null
      +++ b/python/futures/concurrent/futures/_base.py
      @@ -0,0 +1,605 @@
      +# Copyright 2009 Brian Quinlan. All Rights Reserved.
      +# Licensed to PSF under a Contributor Agreement.
      +
      +import collections
      +import logging
      +import threading
      +import itertools
      +import time
      +
      +__author__ = 'Brian Quinlan (brian@sweetapp.com)'
      +
      +FIRST_COMPLETED = 'FIRST_COMPLETED'
      +FIRST_EXCEPTION = 'FIRST_EXCEPTION'
      +ALL_COMPLETED = 'ALL_COMPLETED'
      +_AS_COMPLETED = '_AS_COMPLETED'
      +
      +# Possible future states (for internal use by the futures package).
      +PENDING = 'PENDING'
      +RUNNING = 'RUNNING'
      +# The future was cancelled by the user...
      +CANCELLED = 'CANCELLED'
      +# ...and _Waiter.add_cancelled() was called by a worker.
      +CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
      +FINISHED = 'FINISHED'
      +
      +_FUTURE_STATES = [
      +    PENDING,
      +    RUNNING,
      +    CANCELLED,
      +    CANCELLED_AND_NOTIFIED,
      +    FINISHED
      +]
      +
      +_STATE_TO_DESCRIPTION_MAP = {
      +    PENDING: "pending",
      +    RUNNING: "running",
      +    CANCELLED: "cancelled",
      +    CANCELLED_AND_NOTIFIED: "cancelled",
      +    FINISHED: "finished"
      +}
      +
      +# Logger for internal use by the futures package.
      +LOGGER = logging.getLogger("concurrent.futures")
      +
      +class Error(Exception):
      +    """Base class for all future-related exceptions."""
      +    pass
      +
      +class CancelledError(Error):
      +    """The Future was cancelled."""
      +    pass
      +
      +class TimeoutError(Error):
      +    """The operation exceeded the given deadline."""
      +    pass
      +
      +class _Waiter(object):
      +    """Provides the event that wait() and as_completed() block on."""
      +    def __init__(self):
      +        self.event = threading.Event()
      +        self.finished_futures = []
      +
      +    def add_result(self, future):
      +        self.finished_futures.append(future)
      +
      +    def add_exception(self, future):
      +        self.finished_futures.append(future)
      +
      +    def add_cancelled(self, future):
      +        self.finished_futures.append(future)
      +
      +class _AsCompletedWaiter(_Waiter):
      +    """Used by as_completed()."""
      +
      +    def __init__(self):
      +        super(_AsCompletedWaiter, self).__init__()
      +        self.lock = threading.Lock()
      +
      +    def add_result(self, future):
      +        with self.lock:
      +            super(_AsCompletedWaiter, self).add_result(future)
      +            self.event.set()
      +
      +    def add_exception(self, future):
      +        with self.lock:
      +            super(_AsCompletedWaiter, self).add_exception(future)
      +            self.event.set()
      +
      +    def add_cancelled(self, future):
      +        with self.lock:
      +            super(_AsCompletedWaiter, self).add_cancelled(future)
      +            self.event.set()
      +
      +class _FirstCompletedWaiter(_Waiter):
      +    """Used by wait(return_when=FIRST_COMPLETED)."""
      +
      +    def add_result(self, future):
      +        super(_FirstCompletedWaiter, self).add_result(future)
      +        self.event.set()
      +
      +    def add_exception(self, future):
      +        super(_FirstCompletedWaiter, self).add_exception(future)
      +        self.event.set()
      +
      +    def add_cancelled(self, future):
      +        super(_FirstCompletedWaiter, self).add_cancelled(future)
      +        self.event.set()
      +
      +class _AllCompletedWaiter(_Waiter):
      +    """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
      +
      +    def __init__(self, num_pending_calls, stop_on_exception):
      +        self.num_pending_calls = num_pending_calls
      +        self.stop_on_exception = stop_on_exception
      +        self.lock = threading.Lock()
      +        super(_AllCompletedWaiter, self).__init__()
      +
      +    def _decrement_pending_calls(self):
      +        with self.lock:
      +            self.num_pending_calls -= 1
      +            if not self.num_pending_calls:
      +                self.event.set()
      +
      +    def add_result(self, future):
      +        super(_AllCompletedWaiter, self).add_result(future)
      +        self._decrement_pending_calls()
      +
      +    def add_exception(self, future):
      +        super(_AllCompletedWaiter, self).add_exception(future)
      +        if self.stop_on_exception:
      +            self.event.set()
      +        else:
      +            self._decrement_pending_calls()
      +
      +    def add_cancelled(self, future):
      +        super(_AllCompletedWaiter, self).add_cancelled(future)
      +        self._decrement_pending_calls()
      +
      +class _AcquireFutures(object):
      +    """A context manager that does an ordered acquire of Future conditions."""
      +
      +    def __init__(self, futures):
      +        self.futures = sorted(futures, key=id)
      +
      +    def __enter__(self):
      +        for future in self.futures:
      +            future._condition.acquire()
      +
      +    def __exit__(self, *args):
      +        for future in self.futures:
      +            future._condition.release()
      +
      +def _create_and_install_waiters(fs, return_when):
      +    if return_when == _AS_COMPLETED:
      +        waiter = _AsCompletedWaiter()
      +    elif return_when == FIRST_COMPLETED:
      +        waiter = _FirstCompletedWaiter()
      +    else:
      +        pending_count = sum(
      +                f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
      +
      +        if return_when == FIRST_EXCEPTION:
      +            waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
      +        elif return_when == ALL_COMPLETED:
      +            waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
      +        else:
      +            raise ValueError("Invalid return condition: %r" % return_when)
      +
      +    for f in fs:
      +        f._waiters.append(waiter)
      +
      +    return waiter
      +
      +def as_completed(fs, timeout=None):
      +    """An iterator over the given futures that yields each as it completes.
      +
      +    Args:
      +        fs: The sequence of Futures (possibly created by different Executors) to
      +            iterate over.
      +        timeout: The maximum number of seconds to wait. If None, then there
      +            is no limit on the wait time.
      +
      +    Returns:
      +        An iterator that yields the given Futures as they complete (finished or
      +        cancelled). If any given Futures are duplicated, they will be returned
      +        once.
      +
      +    Raises:
      +        TimeoutError: If the entire result iterator could not be generated
      +            before the given timeout.
      +    """
      +    if timeout is not None:
      +        end_time = timeout + time.time()
      +
      +    fs = set(fs)
      +    with _AcquireFutures(fs):
      +        finished = set(
      +                f for f in fs
      +                if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
      +        pending = fs - finished
      +        waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
      +
      +    try:
      +        for future in finished:
      +            yield future
      +
      +        while pending:
      +            if timeout is None:
      +                wait_timeout = None
      +            else:
      +                wait_timeout = end_time - time.time()
      +                if wait_timeout < 0:
      +                    raise TimeoutError(
      +                            '%d (of %d) futures unfinished' % (
      +                            len(pending), len(fs)))
      +
      +            waiter.event.wait(wait_timeout)
      +
      +            with waiter.lock:
      +                finished = waiter.finished_futures
      +                waiter.finished_futures = []
      +                waiter.event.clear()
      +
      +            for future in finished:
      +                yield future
      +                pending.remove(future)
      +
      +    finally:
      +        for f in fs:
      +            f._waiters.remove(waiter)
      +
      +DoneAndNotDoneFutures = collections.namedtuple(
      +        'DoneAndNotDoneFutures', 'done not_done')
      +def wait(fs, timeout=None, return_when=ALL_COMPLETED):
      +    """Wait for the futures in the given sequence to complete.
      +
      +    Args:
      +        fs: The sequence of Futures (possibly created by different Executors) to
      +            wait upon.
      +        timeout: The maximum number of seconds to wait. If None, then there
      +            is no limit on the wait time.
      +        return_when: Indicates when this function should return. The options
      +            are:
      +
      +            FIRST_COMPLETED - Return when any future finishes or is
      +                              cancelled.
      +            FIRST_EXCEPTION - Return when any future finishes by raising an
      +                              exception. If no future raises an exception
      +                              then it is equivalent to ALL_COMPLETED.
      +            ALL_COMPLETED -   Return when all futures finish or are cancelled.
      +
      +    Returns:
      +        A named 2-tuple of sets. The first set, named 'done', contains the
      +        futures that completed (is finished or cancelled) before the wait
      +        completed. The second set, named 'not_done', contains uncompleted
      +        futures.
      +    """
      +    with _AcquireFutures(fs):
      +        done = set(f for f in fs
      +                   if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
      +        not_done = set(fs) - done
      +
      +        if (return_when == FIRST_COMPLETED) and done:
      +            return DoneAndNotDoneFutures(done, not_done)
      +        elif (return_when == FIRST_EXCEPTION) and done:
      +            if any(f for f in done
      +                   if not f.cancelled() and f.exception() is not None):
      +                return DoneAndNotDoneFutures(done, not_done)
      +
      +        if len(done) == len(fs):
      +            return DoneAndNotDoneFutures(done, not_done)
      +
      +        waiter = _create_and_install_waiters(fs, return_when)
      +
      +    waiter.event.wait(timeout)
      +    for f in fs:
      +        f._waiters.remove(waiter)
      +
      +    done.update(waiter.finished_futures)
      +    return DoneAndNotDoneFutures(done, set(fs) - done)
      +
      +class Future(object):
      +    """Represents the result of an asynchronous computation."""
      +
      +    def __init__(self):
      +        """Initializes the future. Should not be called by clients."""
      +        self._condition = threading.Condition()
      +        self._state = PENDING
      +        self._result = None
      +        self._exception = None
      +        self._traceback = None
      +        self._waiters = []
      +        self._done_callbacks = []
      +
      +    def _invoke_callbacks(self):
      +        for callback in self._done_callbacks:
      +            try:
      +                callback(self)
      +            except Exception:
      +                LOGGER.exception('exception calling callback for %r', self)
      +
      +    def __repr__(self):
      +        with self._condition:
      +            if self._state == FINISHED:
      +                if self._exception:
      +                    return '' % (
      +                        hex(id(self)),
      +                        _STATE_TO_DESCRIPTION_MAP[self._state],
      +                        self._exception.__class__.__name__)
      +                else:
      +                    return '' % (
      +                        hex(id(self)),
      +                        _STATE_TO_DESCRIPTION_MAP[self._state],
      +                        self._result.__class__.__name__)
      +            return '' % (
      +                    hex(id(self)),
      +                   _STATE_TO_DESCRIPTION_MAP[self._state])
      +
      +    def cancel(self):
      +        """Cancel the future if possible.
      +
      +        Returns True if the future was cancelled, False otherwise. A future
      +        cannot be cancelled if it is running or has already completed.
      +        """
      +        with self._condition:
      +            if self._state in [RUNNING, FINISHED]:
      +                return False
      +
      +            if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
      +                return True
      +
      +            self._state = CANCELLED
      +            self._condition.notify_all()
      +
      +        self._invoke_callbacks()
      +        return True
      +
      +    def cancelled(self):
      +        """Return True if the future has cancelled."""
      +        with self._condition:
      +            return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
      +
      +    def running(self):
      +        """Return True if the future is currently executing."""
      +        with self._condition:
      +            return self._state == RUNNING
      +
      +    def done(self):
      +        """Return True of the future was cancelled or finished executing."""
      +        with self._condition:
      +            return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
      +
      +    def __get_result(self):
      +        if self._exception:
      +            raise type(self._exception), self._exception, self._traceback
      +        else:
      +            return self._result
      +
      +    def add_done_callback(self, fn):
      +        """Attaches a callable that will be called when the future finishes.
      +
      +        Args:
      +            fn: A callable that will be called with this future as its only
      +                argument when the future completes or is cancelled. The callable
      +                will always be called by a thread in the same process in which
      +                it was added. If the future has already completed or been
      +                cancelled then the callable will be called immediately. These
      +                callables are called in the order that they were added.
      +        """
      +        with self._condition:
      +            if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
      +                self._done_callbacks.append(fn)
      +                return
      +        fn(self)
      +
      +    def result(self, timeout=None):
      +        """Return the result of the call that the future represents.
      +
      +        Args:
      +            timeout: The number of seconds to wait for the result if the future
      +                isn't done. If None, then there is no limit on the wait time.
      +
      +        Returns:
      +            The result of the call that the future represents.
      +
      +        Raises:
      +            CancelledError: If the future was cancelled.
      +            TimeoutError: If the future didn't finish executing before the given
      +                timeout.
      +            Exception: If the call raised then that exception will be raised.
      +        """
      +        with self._condition:
      +            if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
      +                raise CancelledError()
      +            elif self._state == FINISHED:
      +                return self.__get_result()
      +
      +            self._condition.wait(timeout)
      +
      +            if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
      +                raise CancelledError()
      +            elif self._state == FINISHED:
      +                return self.__get_result()
      +            else:
      +                raise TimeoutError()
      +
      +    def exception_info(self, timeout=None):
      +        """Return a tuple of (exception, traceback) raised by the call that the
      +        future represents.
      +
      +        Args:
      +            timeout: The number of seconds to wait for the exception if the
      +                future isn't done. If None, then there is no limit on the wait
      +                time.
      +
      +        Returns:
      +            The exception raised by the call that the future represents or None
      +            if the call completed without raising.
      +
      +        Raises:
      +            CancelledError: If the future was cancelled.
      +            TimeoutError: If the future didn't finish executing before the given
      +                timeout.
      +        """
      +        with self._condition:
      +            if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
      +                raise CancelledError()
      +            elif self._state == FINISHED:
      +                return self._exception, self._traceback
      +
      +            self._condition.wait(timeout)
      +
      +            if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
      +                raise CancelledError()
      +            elif self._state == FINISHED:
      +                return self._exception, self._traceback
      +            else:
      +                raise TimeoutError()
      +
      +    def exception(self, timeout=None):
      +        """Return the exception raised by the call that the future represents.
      +
      +        Args:
      +            timeout: The number of seconds to wait for the exception if the
      +                future isn't done. If None, then there is no limit on the wait
      +                time.
      +
      +        Returns:
      +            The exception raised by the call that the future represents or None
      +            if the call completed without raising.
      +
      +        Raises:
      +            CancelledError: If the future was cancelled.
      +            TimeoutError: If the future didn't finish executing before the given
      +                timeout.
      +        """
      +        return self.exception_info(timeout)[0]
      +
      +    # The following methods should only be used by Executors and in tests.
      +    def set_running_or_notify_cancel(self):
      +        """Mark the future as running or process any cancel notifications.
      +
      +        Should only be used by Executor implementations and unit tests.
      +
      +        If the future has been cancelled (cancel() was called and returned
      +        True) then any threads waiting on the future completing (though calls
      +        to as_completed() or wait()) are notified and False is returned.
      +
      +        If the future was not cancelled then it is put in the running state
      +        (future calls to running() will return True) and True is returned.
      +
      +        This method should be called by Executor implementations before
      +        executing the work associated with this future. If this method returns
      +        False then the work should not be executed.
      +
      +        Returns:
      +            False if the Future was cancelled, True otherwise.
      +
      +        Raises:
      +            RuntimeError: if this method was already called or if set_result()
      +                or set_exception() was called.
      +        """
      +        with self._condition:
      +            if self._state == CANCELLED:
      +                self._state = CANCELLED_AND_NOTIFIED
      +                for waiter in self._waiters:
      +                    waiter.add_cancelled(self)
      +                # self._condition.notify_all() is not necessary because
      +                # self.cancel() triggers a notification.
      +                return False
      +            elif self._state == PENDING:
      +                self._state = RUNNING
      +                return True
      +            else:
      +                LOGGER.critical('Future %s in unexpected state: %s',
      +                                id(self),
      +                                self._state)
      +                raise RuntimeError('Future in unexpected state')
      +
      +    def set_result(self, result):
      +        """Sets the return value of work associated with the future.
      +
      +        Should only be used by Executor implementations and unit tests.
      +        """
      +        with self._condition:
      +            self._result = result
      +            self._state = FINISHED
      +            for waiter in self._waiters:
      +                waiter.add_result(self)
      +            self._condition.notify_all()
      +        self._invoke_callbacks()
      +
      +    def set_exception_info(self, exception, traceback):
      +        """Sets the result of the future as being the given exception
      +        and traceback.
      +
      +        Should only be used by Executor implementations and unit tests.
      +        """
      +        with self._condition:
      +            self._exception = exception
      +            self._traceback = traceback
      +            self._state = FINISHED
      +            for waiter in self._waiters:
      +                waiter.add_exception(self)
      +            self._condition.notify_all()
      +        self._invoke_callbacks()
      +
      +    def set_exception(self, exception):
      +        """Sets the result of the future as being the given exception.
      +
      +        Should only be used by Executor implementations and unit tests.
      +        """
      +        self.set_exception_info(exception, None)
      +
      +class Executor(object):
      +    """This is an abstract base class for concrete asynchronous executors."""
      +
      +    def submit(self, fn, *args, **kwargs):
      +        """Submits a callable to be executed with the given arguments.
      +
      +        Schedules the callable to be executed as fn(*args, **kwargs) and returns
      +        a Future instance representing the execution of the callable.
      +
      +        Returns:
      +            A Future representing the given call.
      +        """
      +        raise NotImplementedError()
      +
      +    def map(self, fn, *iterables, **kwargs):
      +        """Returns a iterator equivalent to map(fn, iter).
      +
      +        Args:
      +            fn: A callable that will take as many arguments as there are
      +                passed iterables.
      +            timeout: The maximum number of seconds to wait. If None, then there
      +                is no limit on the wait time.
      +
      +        Returns:
      +            An iterator equivalent to: map(func, *iterables) but the calls may
      +            be evaluated out-of-order.
      +
      +        Raises:
      +            TimeoutError: If the entire result iterator could not be generated
      +                before the given timeout.
      +            Exception: If fn(*args) raises for any values.
      +        """
      +        timeout = kwargs.get('timeout')
      +        if timeout is not None:
      +            end_time = timeout + time.time()
      +
      +        fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
      +
      +        # Yield must be hidden in closure so that the futures are submitted
      +        # before the first iterator value is required.
      +        def result_iterator():
      +            try:
      +                for future in fs:
      +                    if timeout is None:
      +                        yield future.result()
      +                    else:
      +                        yield future.result(end_time - time.time())
      +            finally:
      +                for future in fs:
      +                    future.cancel()
      +        return result_iterator()
      +
      +    def shutdown(self, wait=True):
      +        """Clean-up the resources associated with the Executor.
      +
      +        It is safe to call this method several times. Otherwise, no other
      +        methods can be called after this one.
      +
      +        Args:
      +            wait: If True then shutdown will not return until all running
      +                futures have finished executing and the resources used by the
      +                executor have been reclaimed.
      +        """
      +        pass
      +
      +    def __enter__(self):
      +        return self
      +
      +    def __exit__(self, exc_type, exc_val, exc_tb):
      +        self.shutdown(wait=True)
      +        return False
      diff --git a/python/futures/concurrent/futures/process.py b/python/futures/concurrent/futures/process.py
      new file mode 100644
      index 000000000..ee463f181
      --- /dev/null
      +++ b/python/futures/concurrent/futures/process.py
      @@ -0,0 +1,359 @@
      +# Copyright 2009 Brian Quinlan. All Rights Reserved.
      +# Licensed to PSF under a Contributor Agreement.
      +
      +"""Implements ProcessPoolExecutor.
      +
      +The follow diagram and text describe the data-flow through the system:
      +
      +|======================= In-process =====================|== Out-of-process ==|
      +
      ++----------+     +----------+       +--------+     +-----------+    +---------+
      +|          |  => | Work Ids |    => |        |  => | Call Q    | => |         |
      +|          |     +----------+       |        |     +-----------+    |         |
      +|          |     | ...      |       |        |     | ...       |    |         |
      +|          |     | 6        |       |        |     | 5, call() |    |         |
      +|          |     | 7        |       |        |     | ...       |    |         |
      +| Process  |     | ...      |       | Local  |     +-----------+    | Process |
      +|  Pool    |     +----------+       | Worker |                      |  #1..n  |
      +| Executor |                        | Thread |                      |         |
      +|          |     +----------- +     |        |     +-----------+    |         |
      +|          | <=> | Work Items | <=> |        | <=  | Result Q  | <= |         |
      +|          |     +------------+     |        |     +-----------+    |         |
      +|          |     | 6: call()  |     |        |     | ...       |    |         |
      +|          |     |    future  |     |        |     | 4, result |    |         |
      +|          |     | ...        |     |        |     | 3, except |    |         |
      ++----------+     +------------+     +--------+     +-----------+    +---------+
      +
      +Executor.submit() called:
      +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
      +- adds the id of the _WorkItem to the "Work Ids" queue
      +
      +Local worker thread:
      +- reads work ids from the "Work Ids" queue and looks up the corresponding
      +  WorkItem from the "Work Items" dict: if the work item has been cancelled then
      +  it is simply removed from the dict, otherwise it is repackaged as a
      +  _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
      +  until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
      +  calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
      +- reads _ResultItems from "Result Q", updates the future stored in the
      +  "Work Items" dict and deletes the dict entry
      +
      +Process #1..n:
      +- reads _CallItems from "Call Q", executes the calls, and puts the resulting
      +  _ResultItems in "Request Q"
      +"""
      +
      +import atexit
      +from concurrent.futures import _base
      +import Queue as queue
      +import multiprocessing
      +import threading
      +import weakref
      +import sys
      +
      +__author__ = 'Brian Quinlan (brian@sweetapp.com)'
      +
      +# Workers are created as daemon threads and processes. This is done to allow the
      +# interpreter to exit when there are still idle processes in a
      +# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
      +# allowing workers to die with the interpreter has two undesirable properties:
      +#   - The workers would still be running during interpretor shutdown,
      +#     meaning that they would fail in unpredictable ways.
      +#   - The workers could be killed while evaluating a work item, which could
      +#     be bad if the callable being evaluated has external side-effects e.g.
      +#     writing to a file.
      +#
      +# To work around this problem, an exit handler is installed which tells the
      +# workers to exit when their work queues are empty and then waits until the
      +# threads/processes finish.
      +
      +_threads_queues = weakref.WeakKeyDictionary()
      +_shutdown = False
      +
      +def _python_exit():
      +    global _shutdown
      +    _shutdown = True
      +    items = list(_threads_queues.items())
      +    for t, q in items:
      +        q.put(None)
      +    for t, q in items:
      +        t.join()
      +
      +# Controls how many more calls than processes will be queued in the call queue.
      +# A smaller number will mean that processes spend more time idle waiting for
      +# work while a larger number will make Future.cancel() succeed less frequently
      +# (Futures in the call queue cannot be cancelled).
      +EXTRA_QUEUED_CALLS = 1
      +
      +class _WorkItem(object):
      +    def __init__(self, future, fn, args, kwargs):
      +        self.future = future
      +        self.fn = fn
      +        self.args = args
      +        self.kwargs = kwargs
      +
      +class _ResultItem(object):
      +    def __init__(self, work_id, exception=None, result=None):
      +        self.work_id = work_id
      +        self.exception = exception
      +        self.result = result
      +
      +class _CallItem(object):
      +    def __init__(self, work_id, fn, args, kwargs):
      +        self.work_id = work_id
      +        self.fn = fn
      +        self.args = args
      +        self.kwargs = kwargs
      +
      +def _process_worker(call_queue, result_queue):
      +    """Evaluates calls from call_queue and places the results in result_queue.
      +
      +    This worker is run in a separate process.
      +
      +    Args:
      +        call_queue: A multiprocessing.Queue of _CallItems that will be read and
      +            evaluated by the worker.
      +        result_queue: A multiprocessing.Queue of _ResultItems that will written
      +            to by the worker.
      +        shutdown: A multiprocessing.Event that will be set as a signal to the
      +            worker that it should exit when call_queue is empty.
      +    """
      +    while True:
      +        call_item = call_queue.get(block=True)
      +        if call_item is None:
      +            # Wake up queue management thread
      +            result_queue.put(None)
      +            return
      +        try:
      +            r = call_item.fn(*call_item.args, **call_item.kwargs)
      +        except BaseException:
      +            e = sys.exc_info()[1]
      +            result_queue.put(_ResultItem(call_item.work_id,
      +                                         exception=e))
      +        else:
      +            result_queue.put(_ResultItem(call_item.work_id,
      +                                         result=r))
      +
      +def _add_call_item_to_queue(pending_work_items,
      +                            work_ids,
      +                            call_queue):
      +    """Fills call_queue with _WorkItems from pending_work_items.
      +
      +    This function never blocks.
      +
      +    Args:
      +        pending_work_items: A dict mapping work ids to _WorkItems e.g.
      +            {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
      +        work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
      +            are consumed and the corresponding _WorkItems from
      +            pending_work_items are transformed into _CallItems and put in
      +            call_queue.
      +        call_queue: A multiprocessing.Queue that will be filled with _CallItems
      +            derived from _WorkItems.
      +    """
      +    while True:
      +        if call_queue.full():
      +            return
      +        try:
      +            work_id = work_ids.get(block=False)
      +        except queue.Empty:
      +            return
      +        else:
      +            work_item = pending_work_items[work_id]
      +
      +            if work_item.future.set_running_or_notify_cancel():
      +                call_queue.put(_CallItem(work_id,
      +                                         work_item.fn,
      +                                         work_item.args,
      +                                         work_item.kwargs),
      +                               block=True)
      +            else:
      +                del pending_work_items[work_id]
      +                continue
      +
      +def _queue_management_worker(executor_reference,
      +                             processes,
      +                             pending_work_items,
      +                             work_ids_queue,
      +                             call_queue,
      +                             result_queue):
      +    """Manages the communication between this process and the worker processes.
      +
      +    This function is run in a local thread.
      +
      +    Args:
      +        executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
      +            this thread. Used to determine if the ProcessPoolExecutor has been
      +            garbage collected and that this function can exit.
      +        process: A list of the multiprocessing.Process instances used as
      +            workers.
      +        pending_work_items: A dict mapping work ids to _WorkItems e.g.
      +            {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
      +        work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
      +        call_queue: A multiprocessing.Queue that will be filled with _CallItems
      +            derived from _WorkItems for processing by the process workers.
      +        result_queue: A multiprocessing.Queue of _ResultItems generated by the
      +            process workers.
      +    """
      +    nb_shutdown_processes = [0]
      +    def shutdown_one_process():
      +        """Tell a worker to terminate, which will in turn wake us again"""
      +        call_queue.put(None)
      +        nb_shutdown_processes[0] += 1
      +    while True:
      +        _add_call_item_to_queue(pending_work_items,
      +                                work_ids_queue,
      +                                call_queue)
      +
      +        result_item = result_queue.get(block=True)
      +        if result_item is not None:
      +            work_item = pending_work_items[result_item.work_id]
      +            del pending_work_items[result_item.work_id]
      +
      +            if result_item.exception:
      +                work_item.future.set_exception(result_item.exception)
      +            else:
      +                work_item.future.set_result(result_item.result)
      +            # Delete references to object. See issue16284
      +            del work_item
      +        # Check whether we should start shutting down.
      +        executor = executor_reference()
      +        # No more work items can be added if:
      +        #   - The interpreter is shutting down OR
      +        #   - The executor that owns this worker has been collected OR
      +        #   - The executor that owns this worker has been shutdown.
      +        if _shutdown or executor is None or executor._shutdown_thread:
      +            # Since no new work items can be added, it is safe to shutdown
      +            # this thread if there are no pending work items.
      +            if not pending_work_items:
      +                while nb_shutdown_processes[0] < len(processes):
      +                    shutdown_one_process()
      +                # If .join() is not called on the created processes then
      +                # some multiprocessing.Queue methods may deadlock on Mac OS
      +                # X.
      +                for p in processes:
      +                    p.join()
      +                call_queue.close()
      +                return
      +        del executor
      +
      +_system_limits_checked = False
      +_system_limited = None
      +def _check_system_limits():
      +    global _system_limits_checked, _system_limited
      +    if _system_limits_checked:
      +        if _system_limited:
      +            raise NotImplementedError(_system_limited)
      +    _system_limits_checked = True
      +    try:
      +        import os
      +        nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
      +    except (AttributeError, ValueError):
      +        # sysconf not available or setting not available
      +        return
      +    if nsems_max == -1:
      +        # indetermine limit, assume that limit is determined
      +        # by available memory only
      +        return
      +    if nsems_max >= 256:
      +        # minimum number of semaphores available
      +        # according to POSIX
      +        return
      +    _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
      +    raise NotImplementedError(_system_limited)
      +
      +class ProcessPoolExecutor(_base.Executor):
      +    def __init__(self, max_workers=None):
      +        """Initializes a new ProcessPoolExecutor instance.
      +
      +        Args:
      +            max_workers: The maximum number of processes that can be used to
      +                execute the given calls. If None or not given then as many
      +                worker processes will be created as the machine has processors.
      +        """
      +        _check_system_limits()
      +
      +        if max_workers is None:
      +            self._max_workers = multiprocessing.cpu_count()
      +        else:
      +            self._max_workers = max_workers
      +
      +        # Make the call queue slightly larger than the number of processes to
      +        # prevent the worker processes from idling. But don't make it too big
      +        # because futures in the call queue cannot be cancelled.
      +        self._call_queue = multiprocessing.Queue(self._max_workers +
      +                                                 EXTRA_QUEUED_CALLS)
      +        self._result_queue = multiprocessing.Queue()
      +        self._work_ids = queue.Queue()
      +        self._queue_management_thread = None
      +        self._processes = set()
      +
      +        # Shutdown is a two-step process.
      +        self._shutdown_thread = False
      +        self._shutdown_lock = threading.Lock()
      +        self._queue_count = 0
      +        self._pending_work_items = {}
      +
      +    def _start_queue_management_thread(self):
      +        # When the executor gets lost, the weakref callback will wake up
      +        # the queue management thread.
      +        def weakref_cb(_, q=self._result_queue):
      +            q.put(None)
      +        if self._queue_management_thread is None:
      +            self._queue_management_thread = threading.Thread(
      +                    target=_queue_management_worker,
      +                    args=(weakref.ref(self, weakref_cb),
      +                          self._processes,
      +                          self._pending_work_items,
      +                          self._work_ids,
      +                          self._call_queue,
      +                          self._result_queue))
      +            self._queue_management_thread.daemon = True
      +            self._queue_management_thread.start()
      +            _threads_queues[self._queue_management_thread] = self._result_queue
      +
      +    def _adjust_process_count(self):
      +        for _ in range(len(self._processes), self._max_workers):
      +            p = multiprocessing.Process(
      +                    target=_process_worker,
      +                    args=(self._call_queue,
      +                          self._result_queue))
      +            p.start()
      +            self._processes.add(p)
      +
      +    def submit(self, fn, *args, **kwargs):
      +        with self._shutdown_lock:
      +            if self._shutdown_thread:
      +                raise RuntimeError('cannot schedule new futures after shutdown')
      +
      +            f = _base.Future()
      +            w = _WorkItem(f, fn, args, kwargs)
      +
      +            self._pending_work_items[self._queue_count] = w
      +            self._work_ids.put(self._queue_count)
      +            self._queue_count += 1
      +            # Wake up queue management thread
      +            self._result_queue.put(None)
      +
      +            self._start_queue_management_thread()
      +            self._adjust_process_count()
      +            return f
      +    submit.__doc__ = _base.Executor.submit.__doc__
      +
      +    def shutdown(self, wait=True):
      +        with self._shutdown_lock:
      +            self._shutdown_thread = True
      +        if self._queue_management_thread:
      +            # Wake up queue management thread
      +            self._result_queue.put(None)
      +            if wait:
      +                self._queue_management_thread.join()
      +        # To reduce the risk of openning too many files, remove references to
      +        # objects that use file descriptors.
      +        self._queue_management_thread = None
      +        self._call_queue = None
      +        self._result_queue = None
      +        self._processes = None
      +    shutdown.__doc__ = _base.Executor.shutdown.__doc__
      +
      +atexit.register(_python_exit)
      diff --git a/python/futures/concurrent/futures/thread.py b/python/futures/concurrent/futures/thread.py
      new file mode 100644
      index 000000000..fa5ed0c09
      --- /dev/null
      +++ b/python/futures/concurrent/futures/thread.py
      @@ -0,0 +1,134 @@
      +# Copyright 2009 Brian Quinlan. All Rights Reserved.
      +# Licensed to PSF under a Contributor Agreement.
      +
      +"""Implements ThreadPoolExecutor."""
      +
      +import atexit
      +from concurrent.futures import _base
      +import Queue as queue
      +import threading
      +import weakref
      +import sys
      +
      +__author__ = 'Brian Quinlan (brian@sweetapp.com)'
      +
      +# Workers are created as daemon threads. This is done to allow the interpreter
      +# to exit when there are still idle threads in a ThreadPoolExecutor's thread
      +# pool (i.e. shutdown() was not called). However, allowing workers to die with
      +# the interpreter has two undesirable properties:
      +#   - The workers would still be running during interpretor shutdown,
      +#     meaning that they would fail in unpredictable ways.
      +#   - The workers could be killed while evaluating a work item, which could
      +#     be bad if the callable being evaluated has external side-effects e.g.
      +#     writing to a file.
      +#
      +# To work around this problem, an exit handler is installed which tells the
      +# workers to exit when their work queues are empty and then waits until the
      +# threads finish.
      +
      +_threads_queues = weakref.WeakKeyDictionary()
      +_shutdown = False
      +
      +def _python_exit():
      +    global _shutdown
      +    _shutdown = True
      +    items = list(_threads_queues.items())
      +    for t, q in items:
      +        q.put(None)
      +    for t, q in items:
      +        t.join()
      +
      +atexit.register(_python_exit)
      +
      +class _WorkItem(object):
      +    def __init__(self, future, fn, args, kwargs):
      +        self.future = future
      +        self.fn = fn
      +        self.args = args
      +        self.kwargs = kwargs
      +
      +    def run(self):
      +        if not self.future.set_running_or_notify_cancel():
      +            return
      +
      +        try:
      +            result = self.fn(*self.args, **self.kwargs)
      +        except BaseException:
      +            e, tb = sys.exc_info()[1:]
      +            self.future.set_exception_info(e, tb)
      +        else:
      +            self.future.set_result(result)
      +
      +def _worker(executor_reference, work_queue):
      +    try:
      +        while True:
      +            work_item = work_queue.get(block=True)
      +            if work_item is not None:
      +                work_item.run()
      +                # Delete references to object. See issue16284
      +                del work_item
      +                continue
      +            executor = executor_reference()
      +            # Exit if:
      +            #   - The interpreter is shutting down OR
      +            #   - The executor that owns the worker has been collected OR
      +            #   - The executor that owns the worker has been shutdown.
      +            if _shutdown or executor is None or executor._shutdown:
      +                # Notice other workers
      +                work_queue.put(None)
      +                return
      +            del executor
      +    except BaseException:
      +        _base.LOGGER.critical('Exception in worker', exc_info=True)
      +
      +class ThreadPoolExecutor(_base.Executor):
      +    def __init__(self, max_workers):
      +        """Initializes a new ThreadPoolExecutor instance.
      +
      +        Args:
      +            max_workers: The maximum number of threads that can be used to
      +                execute the given calls.
      +        """
      +        self._max_workers = max_workers
      +        self._work_queue = queue.Queue()
      +        self._threads = set()
      +        self._shutdown = False
      +        self._shutdown_lock = threading.Lock()
      +
      +    def submit(self, fn, *args, **kwargs):
      +        with self._shutdown_lock:
      +            if self._shutdown:
      +                raise RuntimeError('cannot schedule new futures after shutdown')
      +
      +            f = _base.Future()
      +            w = _WorkItem(f, fn, args, kwargs)
      +
      +            self._work_queue.put(w)
      +            self._adjust_thread_count()
      +            return f
      +    submit.__doc__ = _base.Executor.submit.__doc__
      +
      +    def _adjust_thread_count(self):
      +        # When the executor gets lost, the weakref callback will wake up
      +        # the worker threads.
      +        def weakref_cb(_, q=self._work_queue):
      +            q.put(None)
      +        # TODO(bquinlan): Should avoid creating new threads if there are more
      +        # idle threads than items in the work queue.
      +        if len(self._threads) < self._max_workers:
      +            t = threading.Thread(target=_worker,
      +                                 args=(weakref.ref(self, weakref_cb),
      +                                       self._work_queue))
      +            t.daemon = True
      +            t.start()
      +            self._threads.add(t)
      +            _threads_queues[t] = self._work_queue
      +
      +    def shutdown(self, wait=True):
      +        with self._shutdown_lock:
      +            self._shutdown = True
      +            self._work_queue.put(None)
      +        if wait:
      +            for t in self._threads:
      +                t.join()
      +    shutdown.__doc__ = _base.Executor.shutdown.__doc__
      diff --git a/python/futures/crawl.py b/python/futures/crawl.py
      new file mode 100644
      index 000000000..86e0af7fe
      --- /dev/null
      +++ b/python/futures/crawl.py
      @@ -0,0 +1,74 @@
      +"""Compare the speed of downloading URLs sequentially vs. using futures."""
      +
      +import functools
      +import time
      +import timeit
      +import sys
      +
      +try:
      +    from urllib2 import urlopen
      +except ImportError:
      +    from urllib.request import urlopen
      +
      +from concurrent.futures import (as_completed, ThreadPoolExecutor,
      +                                ProcessPoolExecutor)
      +
      +URLS = ['http://www.google.com/',
      +        'http://www.apple.com/',
      +        'http://www.ibm.com',
      +        'http://www.thisurlprobablydoesnotexist.com',
      +        'http://www.slashdot.org/',
      +        'http://www.python.org/',
      +        'http://www.bing.com/',
      +        'http://www.facebook.com/',
      +        'http://www.yahoo.com/',
      +        'http://www.youtube.com/',
      +        'http://www.blogger.com/']
      +
      +def load_url(url, timeout):
      +    kwargs = {'timeout': timeout} if sys.version_info >= (2, 6) else {}
      +    return urlopen(url, **kwargs).read()
      +
      +def download_urls_sequential(urls, timeout=60):
      +    url_to_content = {}
      +    for url in urls:
      +        try:
      +            url_to_content[url] = load_url(url, timeout=timeout)
      +        except:
      +            pass
      +    return url_to_content
      +
      +def download_urls_with_executor(urls, executor, timeout=60):
      +    try:
      +        url_to_content = {}
      +        future_to_url = dict((executor.submit(load_url, url, timeout), url)
      +                             for url in urls)
      +
      +        for future in as_completed(future_to_url):
      +            try:
      +                url_to_content[future_to_url[future]] = future.result()
      +            except:
      +                pass
      +        return url_to_content
      +    finally:
      +        executor.shutdown()
      +
      +def main():
      +    for name, fn in [('sequential',
      +                      functools.partial(download_urls_sequential, URLS)),
      +                     ('processes',
      +                      functools.partial(download_urls_with_executor,
      +                                        URLS,
      +                                        ProcessPoolExecutor(10))),
      +                     ('threads',
      +                      functools.partial(download_urls_with_executor,
      +                                        URLS,
      +                                        ThreadPoolExecutor(10)))]:
      +        sys.stdout.write('%s: ' % name.ljust(12))
      +        start = time.time()
      +        url_map = fn()
      +        sys.stdout.write('%.2f seconds (%d of %d downloaded)\n' %
      +                         (time.time() - start, len(url_map), len(URLS)))
      +
      +if __name__ == '__main__':
      +    main()
      diff --git a/python/futures/docs/Makefile b/python/futures/docs/Makefile
      new file mode 100644
      index 000000000..f69d84035
      --- /dev/null
      +++ b/python/futures/docs/Makefile
      @@ -0,0 +1,88 @@
      +# Makefile for Sphinx documentation
      +#
      +
      +# You can set these variables from the command line.
      +SPHINXOPTS    =
      +SPHINXBUILD   = sphinx-build
      +PAPER         =
      +
      +# Internal variables.
      +PAPEROPT_a4     = -D latex_paper_size=a4
      +PAPEROPT_letter = -D latex_paper_size=letter
      +ALLSPHINXOPTS   = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
      +
      +.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
      +
      +help:
      +	@echo "Please use \`make ' where  is one of"
      +	@echo "  html      to make standalone HTML files"
      +	@echo "  dirhtml   to make HTML files named index.html in directories"
      +	@echo "  pickle    to make pickle files"
      +	@echo "  json      to make JSON files"
      +	@echo "  htmlhelp  to make HTML files and a HTML help project"
      +	@echo "  qthelp    to make HTML files and a qthelp project"
      +	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
      +	@echo "  changes   to make an overview of all changed/added/deprecated items"
      +	@echo "  linkcheck to check all external links for integrity"
      +	@echo "  doctest   to run all doctests embedded in the documentation (if enabled)"
      +
      +clean:
      +	-rm -rf _build/*
      +
      +html:
      +	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html
      +	@echo
      +	@echo "Build finished. The HTML pages are in _build/html."
      +
      +dirhtml:
      +	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml
      +	@echo
      +	@echo "Build finished. The HTML pages are in _build/dirhtml."
      +
      +pickle:
      +	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle
      +	@echo
      +	@echo "Build finished; now you can process the pickle files."
      +
      +json:
      +	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json
      +	@echo
      +	@echo "Build finished; now you can process the JSON files."
      +
      +htmlhelp:
      +	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp
      +	@echo
      +	@echo "Build finished; now you can run HTML Help Workshop with the" \
      +	      ".hhp project file in _build/htmlhelp."
      +
      +qthelp:
      +	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) _build/qthelp
      +	@echo
      +	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
      +	      ".qhcp project file in _build/qthelp, like this:"
      +	@echo "# qcollectiongenerator _build/qthelp/futures.qhcp"
      +	@echo "To view the help file:"
      +	@echo "# assistant -collectionFile _build/qthelp/futures.qhc"
      +
      +latex:
      +	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex
      +	@echo
      +	@echo "Build finished; the LaTeX files are in _build/latex."
      +	@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
      +	      "run these through (pdf)latex."
      +
      +changes:
      +	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes
      +	@echo
      +	@echo "The overview file is in _build/changes."
      +
      +linkcheck:
      +	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck
      +	@echo
      +	@echo "Link check complete; look for any errors in the above output " \
      +	      "or in _build/linkcheck/output.txt."
      +
      +doctest:
      +	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest
      +	@echo "Testing of doctests in the sources finished, look at the " \
      +	      "results in _build/doctest/output.txt."
      diff --git a/python/futures/docs/conf.py b/python/futures/docs/conf.py
      new file mode 100644
      index 000000000..5cea44c88
      --- /dev/null
      +++ b/python/futures/docs/conf.py
      @@ -0,0 +1,194 @@
      +# -*- coding: utf-8 -*-
      +#
      +# futures documentation build configuration file, created by
      +# sphinx-quickstart on Wed Jun  3 19:35:34 2009.
      +#
      +# This file is execfile()d with the current directory set to its containing dir.
      +#
      +# Note that not all possible configuration values are present in this
      +# autogenerated file.
      +#
      +# All configuration values have a default; values that are commented out
      +# serve to show the default.
      +
      +import sys, os
      +
      +# If extensions (or modules to document with autodoc) are in another directory,
      +# add these directories to sys.path here. If the directory is relative to the
      +# documentation root, use os.path.abspath to make it absolute, like shown here.
      +#sys.path.append(os.path.abspath('.'))
      +
      +# -- General configuration -----------------------------------------------------
      +
      +# Add any Sphinx extension module names here, as strings. They can be extensions
      +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
      +extensions = []
      +
      +# Add any paths that contain templates here, relative to this directory.
      +templates_path = ['_templates']
      +
      +# The suffix of source filenames.
      +source_suffix = '.rst'
      +
      +# The encoding of source files.
      +#source_encoding = 'utf-8'
      +
      +# The master toctree document.
      +master_doc = 'index'
      +
      +# General information about the project.
      +project = u'futures'
      +copyright = u'2009-2011, Brian Quinlan'
      +
      +# The version info for the project you're documenting, acts as replacement for
      +# |version| and |release|, also used in various other places throughout the
      +# built documents.
      +#
      +# The short X.Y version.
      +version = '2.1.6'
      +# The full version, including alpha/beta/rc tags.
      +release = '2.1.6'
      +
      +# The language for content autogenerated by Sphinx. Refer to documentation
      +# for a list of supported languages.
      +#language = None
      +
      +# There are two options for replacing |today|: either, you set today to some
      +# non-false value, then it is used:
      +#today = ''
      +# Else, today_fmt is used as the format for a strftime call.
      +#today_fmt = '%B %d, %Y'
      +
      +# List of documents that shouldn't be included in the build.
      +#unused_docs = []
      +
      +# List of directories, relative to source directory, that shouldn't be searched
      +# for source files.
      +exclude_trees = ['_build']
      +
      +# The reST default role (used for this markup: `text`) to use for all documents.
      +#default_role = None
      +
      +# If true, '()' will be appended to :func: etc. cross-reference text.
      +#add_function_parentheses = True
      +
      +# If true, the current module name will be prepended to all description
      +# unit titles (such as .. function::).
      +#add_module_names = True
      +
      +# If true, sectionauthor and moduleauthor directives will be shown in the
      +# output. They are ignored by default.
      +#show_authors = False
      +
      +# The name of the Pygments (syntax highlighting) style to use.
      +pygments_style = 'sphinx'
      +
      +# A list of ignored prefixes for module index sorting.
      +#modindex_common_prefix = []
      +
      +
      +# -- Options for HTML output ---------------------------------------------------
      +
      +# The theme to use for HTML and HTML Help pages.  Major themes that come with
      +# Sphinx are currently 'default' and 'sphinxdoc'.
      +html_theme = 'default'
      +
      +# Theme options are theme-specific and customize the look and feel of a theme
      +# further.  For a list of options available for each theme, see the
      +# documentation.
      +#html_theme_options = {}
      +
      +# Add any paths that contain custom themes here, relative to this directory.
      +#html_theme_path = []
      +
      +# The name for this set of Sphinx documents.  If None, it defaults to
      +# " v documentation".
      +#html_title = None
      +
      +# A shorter title for the navigation bar.  Default is the same as html_title.
      +#html_short_title = None
      +
      +# The name of an image file (relative to this directory) to place at the top
      +# of the sidebar.
      +#html_logo = None
      +
      +# The name of an image file (within the static path) to use as favicon of the
      +# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
      +# pixels large.
      +#html_favicon = None
      +
      +# Add any paths that contain custom static files (such as style sheets) here,
      +# relative to this directory. They are copied after the builtin static files,
      +# so a file named "default.css" will overwrite the builtin "default.css".
      +html_static_path = ['_static']
      +
      +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
      +# using the given strftime format.
      +#html_last_updated_fmt = '%b %d, %Y'
      +
      +# If true, SmartyPants will be used to convert quotes and dashes to
      +# typographically correct entities.
      +#html_use_smartypants = True
      +
      +# Custom sidebar templates, maps document names to template names.
      +#html_sidebars = {}
      +
      +# Additional templates that should be rendered to pages, maps page names to
      +# template names.
      +#html_additional_pages = {}
      +
      +# If false, no module index is generated.
      +#html_use_modindex = True
      +
      +# If false, no index is generated.
      +#html_use_index = True
      +
      +# If true, the index is split into individual pages for each letter.
      +#html_split_index = False
      +
      +# If true, links to the reST sources are added to the pages.
      +#html_show_sourcelink = True
      +
      +# If true, an OpenSearch description file will be output, and all pages will
      +# contain a  tag referring to it.  The value of this option must be the
      +# base URL from which the finished HTML is served.
      +#html_use_opensearch = ''
      +
      +# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
      +#html_file_suffix = ''
      +
      +# Output file base name for HTML help builder.
      +htmlhelp_basename = 'futuresdoc'
      +
      +
      +# -- Options for LaTeX output --------------------------------------------------
      +
      +# The paper size ('letter' or 'a4').
      +#latex_paper_size = 'letter'
      +
      +# The font size ('10pt', '11pt' or '12pt').
      +#latex_font_size = '10pt'
      +
      +# Grouping the document tree into LaTeX files. List of tuples
      +# (source start file, target name, title, author, documentclass [howto/manual]).
      +latex_documents = [
      +  ('index', 'futures.tex', u'futures Documentation',
      +   u'Brian Quinlan', 'manual'),
      +]
      +
      +# The name of an image file (relative to this directory) to place at the top of
      +# the title page.
      +#latex_logo = None
      +
      +# For "manual" documents, if this is true, then toplevel headings are parts,
      +# not chapters.
      +#latex_use_parts = False
      +
      +# Additional stuff for the LaTeX preamble.
      +#latex_preamble = ''
      +
      +# Documents to append as an appendix to all manuals.
      +#latex_appendices = []
      +
      +# If false, no module index is generated.
      +#latex_use_modindex = True
      diff --git a/python/futures/docs/index.rst b/python/futures/docs/index.rst
      new file mode 100644
      index 000000000..4103f014b
      --- /dev/null
      +++ b/python/futures/docs/index.rst
      @@ -0,0 +1,347 @@
      +:mod:`concurrent.futures` --- Asynchronous computation
      +======================================================
      +
      +.. module:: concurrent.futures
      +   :synopsis: Execute computations asynchronously using threads or processes. 
      +
      +The :mod:`concurrent.futures` module provides a high-level interface for
      +asynchronously executing callables.
      +
      +The asynchronous execution can be be performed by threads using
      +:class:`ThreadPoolExecutor` or seperate processes using
      +:class:`ProcessPoolExecutor`. Both implement the same interface, which is
      +defined by the abstract :class:`Executor` class.
      +
      +Executor Objects
      +----------------
      +
      +:class:`Executor` is an abstract class that provides methods to execute calls
      +asynchronously. It should not be used directly, but through its two
      +subclasses: :class:`ThreadPoolExecutor` and :class:`ProcessPoolExecutor`.
      +
      +.. method:: Executor.submit(fn, *args, **kwargs)
      +
      +   Schedules the callable to be executed as *fn*(*\*args*, *\*\*kwargs*) and
      +   returns a :class:`Future` representing the execution of the callable.
      +
      +::
      +
      +    with ThreadPoolExecutor(max_workers=1) as executor:
      +        future = executor.submit(pow, 323, 1235)
      +        print(future.result())
      +
      +.. method:: Executor.map(func, *iterables, timeout=None)
      +
      +   Equivalent to map(*func*, *\*iterables*) but func is executed asynchronously
      +   and several calls to *func* may be made concurrently. The returned iterator
      +   raises a :exc:`TimeoutError` if :meth:`__next__()` is called and the result
      +   isn't available after *timeout* seconds from the original call to
      +   :meth:`map()`. *timeout* can be an int or float. If *timeout* is not
      +   specified or ``None`` then there is no limit to the wait time. If a call
      +   raises an exception then that exception will be raised when its value is
      +   retrieved from the iterator.
      +
      +.. method:: Executor.shutdown(wait=True)
      +
      +   Signal the executor that it should free any resources that it is using when
      +   the currently pending futures are done executing. Calls to
      +   :meth:`Executor.submit` and :meth:`Executor.map` made after shutdown will
      +   raise :exc:`RuntimeError`.
      +
      +   If *wait* is `True` then this method will not return until all the pending
      +   futures are done executing and the resources associated with the executor
      +   have been freed. If *wait* is `False` then this method will return
      +   immediately and the resources associated with the executor will be freed
      +   when all pending futures are done executing. Regardless of the value of
      +   *wait*, the entire Python program will not exit until all pending futures
      +   are done executing.
      +
      +   You can avoid having to call this method explicitly if you use the `with`
      +   statement, which will shutdown the `Executor` (waiting as if
      +   `Executor.shutdown` were called with *wait* set to `True`):
      +
      +::
      +
      +    import shutil
      +    with ThreadPoolExecutor(max_workers=4) as e:
      +        e.submit(shutil.copy, 'src1.txt', 'dest1.txt')
      +        e.submit(shutil.copy, 'src2.txt', 'dest2.txt')
      +        e.submit(shutil.copy, 'src3.txt', 'dest3.txt')
      +        e.submit(shutil.copy, 'src3.txt', 'dest4.txt')
      +
      +
      +ThreadPoolExecutor Objects
      +--------------------------
      +
      +The :class:`ThreadPoolExecutor` class is an :class:`Executor` subclass that uses
      +a pool of threads to execute calls asynchronously.
      +
      +Deadlock can occur when the callable associated with a :class:`Future` waits on
      +the results of another :class:`Future`. For example:
      +
      +::
      +
      +    import time
      +    def wait_on_b():
      +        time.sleep(5)
      +        print(b.result())  # b will never complete because it is waiting on a.
      +        return 5
      +
      +    def wait_on_a():
      +        time.sleep(5)
      +        print(a.result())  # a will never complete because it is waiting on b.
      +        return 6
      +
      +
      +    executor = ThreadPoolExecutor(max_workers=2)
      +    a = executor.submit(wait_on_b)
      +    b = executor.submit(wait_on_a)
      +
      +And:
      +
      +::
      +
      +    def wait_on_future():
      +        f = executor.submit(pow, 5, 2)
      +        # This will never complete because there is only one worker thread and
      +        # it is executing this function.
      +        print(f.result())
      +    
      +    executor = ThreadPoolExecutor(max_workers=1)
      +    executor.submit(wait_on_future)
      +
      +.. class:: ThreadPoolExecutor(max_workers)
      +
      +   Executes calls asynchronously using at pool of at most *max_workers* threads.
      +
      +.. _threadpoolexecutor-example:
      +
      +ThreadPoolExecutor Example
      +^^^^^^^^^^^^^^^^^^^^^^^^^^
      +::
      +
      +    from concurrent import futures
      +    import urllib.request
      +    
      +    URLS = ['http://www.foxnews.com/',
      +            'http://www.cnn.com/',
      +            'http://europe.wsj.com/',
      +            'http://www.bbc.co.uk/',
      +            'http://some-made-up-domain.com/']
      +    
      +    def load_url(url, timeout):
      +        return urllib.request.urlopen(url, timeout=timeout).read()
      +    
      +    with futures.ThreadPoolExecutor(max_workers=5) as executor:
      +        future_to_url = dict((executor.submit(load_url, url, 60), url)
      +                             for url in URLS)
      +    
      +        for future in futures.as_completed(future_to_url):
      +            url = future_to_url[future]
      +            if future.exception() is not None:
      +                print('%r generated an exception: %s' % (url,
      +                                                         future.exception()))
      +            else:
      +                print('%r page is %d bytes' % (url, len(future.result())))
      +
      +ProcessPoolExecutor Objects
      +---------------------------
      +
      +The :class:`ProcessPoolExecutor` class is an :class:`Executor` subclass that
      +uses a pool of processes to execute calls asynchronously.
      +:class:`ProcessPoolExecutor` uses the :mod:`multiprocessing` module, which
      +allows it to side-step the :term:`Global Interpreter Lock` but also means that
      +only picklable objects can be executed and returned.
      +
      +Calling :class:`Executor` or :class:`Future` methods from a callable submitted
      +to a :class:`ProcessPoolExecutor` will result in deadlock.
      +
      +.. class:: ProcessPoolExecutor(max_workers=None)
      +
      +   Executes calls asynchronously using a pool of at most *max_workers*
      +   processes. If *max_workers* is ``None`` or not given then as many worker
      +   processes will be created as the machine has processors.
      +
      +.. _processpoolexecutor-example:
      +
      +ProcessPoolExecutor Example
      +^^^^^^^^^^^^^^^^^^^^^^^^^^^
      +::
      +
      +   import math
      +
      +   PRIMES = [
      +       112272535095293,
      +       112582705942171,
      +       112272535095293,
      +       115280095190773,
      +       115797848077099,
      +       1099726899285419]
      +
      +   def is_prime(n):
      +       if n % 2 == 0:
      +           return False
      +
      +       sqrt_n = int(math.floor(math.sqrt(n)))
      +       for i in range(3, sqrt_n + 1, 2):
      +           if n % i == 0:
      +               return False
      +       return True
      +
      +   def main():
      +       with futures.ProcessPoolExecutor() as executor:
      +           for number, prime in zip(PRIMES, executor.map(is_prime, PRIMES)):
      +               print('%d is prime: %s' % (number, prime))
      +
      +   if __name__ == '__main__':
      +       main()
      +
      +Future Objects
      +--------------
      +
      +The :class:`Future` class encapulates the asynchronous execution of a callable.
      +:class:`Future` instances are created by :meth:`Executor.submit`.
      +
      +.. method:: Future.cancel()
      +
      +   Attempt to cancel the call. If the call is currently being executed then
      +   it cannot be cancelled and the method will return `False`, otherwise the call
      +   will be cancelled and the method will return `True`.
      +
      +.. method:: Future.cancelled()
      +
      +   Return `True` if the call was successfully cancelled.
      +
      +.. method:: Future.running()
      +
      +   Return `True` if the call is currently being executed and cannot be
      +   cancelled.
      +
      +.. method:: Future.done()
      +
      +   Return `True` if the call was successfully cancelled or finished running.
      +
      +.. method:: Future.result(timeout=None)
      +
      +   Return the value returned by the call. If the call hasn't yet completed then
      +   this method will wait up to *timeout* seconds. If the call hasn't completed
      +   in *timeout* seconds then a :exc:`TimeoutError` will be raised. *timeout* can
      +   be an int or float.If *timeout* is not specified or ``None`` then there is no
      +   limit to the wait time.
      +
      +   If the future is cancelled before completing then :exc:`CancelledError` will
      +   be raised.
      +
      +   If the call raised then this method will raise the same exception.
      +
      +.. method:: Future.exception(timeout=None)
      +
      +   Return the exception raised by the call. If the call hasn't yet completed
      +   then this method will wait up to *timeout* seconds. If the call hasn't
      +   completed in *timeout* seconds then a :exc:`TimeoutError` will be raised.
      +   *timeout* can be an int or float. If *timeout* is not specified or ``None``
      +   then there is no limit to the wait time.
      +
      +   If the future is cancelled before completing then :exc:`CancelledError` will
      +   be raised.
      +
      +   If the call completed without raising then ``None`` is returned.   
      +
      +.. method:: Future.add_done_callback(fn)
      +
      +   Attaches the callable *fn* to the future. *fn* will be called, with the
      +   future as its only argument, when the future is cancelled or finishes
      +   running.
      +
      +   Added callables are called in the order that they were added and are always
      +   called in a thread belonging to the process that added them. If the callable
      +   raises an :exc:`Exception` then it will be logged and ignored. If the
      +   callable raises another :exc:`BaseException` then the behavior is not
      +   defined.
      +
      +   If the future has already completed or been cancelled then *fn* will be
      +   called immediately.
      +
      +Internal Future Methods
      +^^^^^^^^^^^^^^^^^^^^^^^
      +
      +The following :class:`Future` methods are meant for use in unit tests and
      +:class:`Executor` implementations.
      +
      +.. method:: Future.set_running_or_notify_cancel()
      +
      +   This method should only be called by :class:`Executor` implementations before
      +   executing the work associated with the :class:`Future` and by unit tests.
      +
      +   If the method returns `False` then the :class:`Future` was cancelled i.e.
      +   :meth:`Future.cancel` was called and returned `True`. Any threads waiting
      +   on the :class:`Future` completing (i.e. through :func:`as_completed` or
      +   :func:`wait`) will be woken up.
      +
      +   If the method returns `True` then the :class:`Future` was not cancelled
      +   and has been put in the running state i.e. calls to
      +   :meth:`Future.running` will return `True`.
      +
      +   This method can only be called once and cannot be called after
      +   :meth:`Future.set_result` or :meth:`Future.set_exception` have been
      +   called.
      +
      +.. method:: Future.set_result(result)
      +
      +   Sets the result of the work associated with the :class:`Future` to *result*.
      +
      +   This method should only be used by Executor implementations and unit tests.
      +
      +.. method:: Future.set_exception(exception)
      +
      +   Sets the result of the work associated with the :class:`Future` to the
      +   :class:`Exception` *exception*.
      +
      +   This method should only be used by Executor implementations and unit tests.
      +
      +Module Functions
      +----------------
      +
      +.. function:: wait(fs, timeout=None, return_when=ALL_COMPLETED)
      +
      +   Wait for the :class:`Future` instances (possibly created by different
      +   :class:`Executor` instances) given by *fs*  to complete. Returns a named
      +   2-tuple of sets. The first set, named "done", contains the futures that
      +   completed (finished or were cancelled) before the wait completed. The second
      +   set, named "not_done", contains uncompleted futures.
      +
      +   *timeout* can be used to control the maximum number of seconds to wait before
      +   returning. *timeout* can be an int or float. If *timeout* is not specified or
      +   ``None`` then there is no limit to the wait time.
      +
      +   *return_when* indicates when this function should return. It must be one of
      +   the following constants:
      +
      +      +-----------------------------+----------------------------------------+
      +      | Constant                    | Description                            |
      +      +=============================+========================================+
      +      | :const:`FIRST_COMPLETED`    | The function will return when any      |
      +      |                             | future finishes or is cancelled.       |
      +      +-----------------------------+----------------------------------------+
      +      | :const:`FIRST_EXCEPTION`    | The function will return when any      |
      +      |                             | future finishes by raising an          |
      +      |                             | exception. If no future raises an      |
      +      |                             | exception then it is equivalent to     |
      +      |                             | `ALL_COMPLETED`.                       |
      +      +-----------------------------+----------------------------------------+
      +      | :const:`ALL_COMPLETED`      | The function will return when all      |
      +      |                             | futures finish or are cancelled.       |
      +      +-----------------------------+----------------------------------------+
      +
      +.. function:: as_completed(fs, timeout=None)
      +
      +   Returns an iterator over the :class:`Future` instances (possibly created by
      +   different :class:`Executor` instances) given by *fs* that yields futures as
      +   they complete (finished or were cancelled). Any futures given by *fs* that
      +   are duplicated will be returned once. Any futures that completed
      +   before :func:`as_completed` is called will be yielded first.  The returned
      +   iterator raises a :exc:`TimeoutError` if :meth:`~iterator.__next__` is
      +   called and the result isn't available after *timeout* seconds from the
      +   original call to :func:`as_completed`.  *timeout* can be an int or float.
      +   If *timeout* is not specified or ``None``, there is no limit to the wait
      +   time.
      diff --git a/python/futures/docs/make.bat b/python/futures/docs/make.bat
      new file mode 100644
      index 000000000..3e8021b56
      --- /dev/null
      +++ b/python/futures/docs/make.bat
      @@ -0,0 +1,112 @@
      +@ECHO OFF
      +
      +REM Command file for Sphinx documentation
      +
      +set SPHINXBUILD=sphinx-build
      +set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% .
      +if NOT "%PAPER%" == "" (
      +	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
      +)
      +
      +if "%1" == "" goto help
      +
      +if "%1" == "help" (
      +	:help
      +	echo.Please use `make ^` where ^ is one of
      +	echo.  html      to make standalone HTML files
      +	echo.  dirhtml   to make HTML files named index.html in directories
      +	echo.  pickle    to make pickle files
      +	echo.  json      to make JSON files
      +	echo.  htmlhelp  to make HTML files and a HTML help project
      +	echo.  qthelp    to make HTML files and a qthelp project
      +	echo.  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter
      +	echo.  changes   to make an overview over all changed/added/deprecated items
      +	echo.  linkcheck to check all external links for integrity
      +	echo.  doctest   to run all doctests embedded in the documentation if enabled
      +	goto end
      +)
      +
      +if "%1" == "clean" (
      +	for /d %%i in (_build\*) do rmdir /q /s %%i
      +	del /q /s _build\*
      +	goto end
      +)
      +
      +if "%1" == "html" (
      +	%SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html
      +	echo.
      +	echo.Build finished. The HTML pages are in _build/html.
      +	goto end
      +)
      +
      +if "%1" == "dirhtml" (
      +	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml
      +	echo.
      +	echo.Build finished. The HTML pages are in _build/dirhtml.
      +	goto end
      +)
      +
      +if "%1" == "pickle" (
      +	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle
      +	echo.
      +	echo.Build finished; now you can process the pickle files.
      +	goto end
      +)
      +
      +if "%1" == "json" (
      +	%SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json
      +	echo.
      +	echo.Build finished; now you can process the JSON files.
      +	goto end
      +)
      +
      +if "%1" == "htmlhelp" (
      +	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp
      +	echo.
      +	echo.Build finished; now you can run HTML Help Workshop with the ^
      +.hhp project file in _build/htmlhelp.
      +	goto end
      +)
      +
      +if "%1" == "qthelp" (
      +	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp
      +	echo.
      +	echo.Build finished; now you can run "qcollectiongenerator" with the ^
      +.qhcp project file in _build/qthelp, like this:
      +	echo.^> qcollectiongenerator _build\qthelp\futures.qhcp
      +	echo.To view the help file:
      +	echo.^> assistant -collectionFile _build\qthelp\futures.ghc
      +	goto end
      +)
      +
      +if "%1" == "latex" (
      +	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex
      +	echo.
      +	echo.Build finished; the LaTeX files are in _build/latex.
      +	goto end
      +)
      +
      +if "%1" == "changes" (
      +	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes
      +	echo.
      +	echo.The overview file is in _build/changes.
      +	goto end
      +)
      +
      +if "%1" == "linkcheck" (
      +	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck
      +	echo.
      +	echo.Link check complete; look for any errors in the above output ^
      +or in _build/linkcheck/output.txt.
      +	goto end
      +)
      +
      +if "%1" == "doctest" (
      +	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest
      +	echo.
      +	echo.Testing of doctests in the sources finished, look at the ^
      +results in _build/doctest/output.txt.
      +	goto end
      +)
      +
      +:end
      diff --git a/python/futures/primes.py b/python/futures/primes.py
      new file mode 100644
      index 000000000..0da2b3e64
      --- /dev/null
      +++ b/python/futures/primes.py
      @@ -0,0 +1,50 @@
      +from __future__ import with_statement
      +import math
      +import time
      +import sys
      +
      +from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
      +
      +PRIMES = [
      +    112272535095293,
      +    112582705942171,
      +    112272535095293,
      +    115280095190773,
      +    115797848077099,
      +    117450548693743,
      +    993960000099397]
      +
      +def is_prime(n):
      +    if n % 2 == 0:
      +        return False
      +
      +    sqrt_n = int(math.floor(math.sqrt(n)))
      +    for i in range(3, sqrt_n + 1, 2):
      +        if n % i == 0:
      +            return False
      +    return True
      +
      +def sequential():
      +    return list(map(is_prime, PRIMES))
      +
      +def with_process_pool_executor():
      +    with ProcessPoolExecutor(10) as executor:
      +        return list(executor.map(is_prime, PRIMES))
      +
      +def with_thread_pool_executor():
      +    with ThreadPoolExecutor(10) as executor:
      +        return list(executor.map(is_prime, PRIMES))
      +
      +def main():
      +    for name, fn in [('sequential', sequential),
      +                     ('processes', with_process_pool_executor),
      +                     ('threads', with_thread_pool_executor)]:
      +        sys.stdout.write('%s: ' % name.ljust(12))
      +        start = time.time()
      +        if fn() != [True] * len(PRIMES):
      +            sys.stdout.write('failed\n')
      +        else:
      +            sys.stdout.write('%.2f seconds\n' % (time.time() - start))
      +
      +if __name__ == '__main__':
      +    main()
      diff --git a/python/futures/setup.cfg b/python/futures/setup.cfg
      new file mode 100644
      index 000000000..e04dbabe3
      --- /dev/null
      +++ b/python/futures/setup.cfg
      @@ -0,0 +1,12 @@
      +[build_sphinx]
      +build-dir = build/sphinx
      +source-dir = docs
      +
      +[egg_info]
      +tag_build = 
      +tag_date = 0
      +tag_svn_revision = 0
      +
      +[upload_docs]
      +upload-dir = build/sphinx/html
      +
      diff --git a/python/futures/setup.py b/python/futures/setup.py
      new file mode 100755
      index 000000000..7c1d40ee2
      --- /dev/null
      +++ b/python/futures/setup.py
      @@ -0,0 +1,27 @@
      +#!/usr/bin/env python
      +
      +extras = {}
      +try:
      +    from setuptools import setup
      +    extras['zip_safe'] = False
      +except ImportError:
      +    from distutils.core import setup
      +
      +setup(name='futures',
      +      version='3.0.2',
      +      description='Backport of the concurrent.futures package from Python 3.2',
      +      author='Brian Quinlan',
      +      author_email='brian@sweetapp.com',
      +      maintainer='Alex Gronholm',
      +      maintainer_email='alex.gronholm+pypi@nextday.fi',
      +      url='https://github.com/agronholm/pythonfutures',
      +      packages=['concurrent', 'concurrent.futures'],
      +      license='BSD',
      +      classifiers=['License :: OSI Approved :: BSD License',
      +                   'Development Status :: 5 - Production/Stable',
      +                   'Intended Audience :: Developers',
      +                   'Programming Language :: Python :: 2.6',
      +                   'Programming Language :: Python :: 2.7',
      +                   'Programming Language :: Python :: 2 :: Only'],
      +      **extras
      +      )
      diff --git a/python/futures/test_futures.py b/python/futures/test_futures.py
      new file mode 100644
      index 000000000..ace340cb0
      --- /dev/null
      +++ b/python/futures/test_futures.py
      @@ -0,0 +1,724 @@
      +import os
      +import subprocess
      +import sys
      +import threading
      +import functools
      +import contextlib
      +import logging
      +import re
      +import time
      +from StringIO import StringIO
      +from test import test_support
      +
      +from concurrent import futures
      +from concurrent.futures._base import (
      +    PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
      +
      +try:
      +    import unittest2 as unittest
      +except ImportError:
      +    import unittest
      +
      +
      +def reap_threads(func):
      +    """Use this function when threads are being used.  This will
      +    ensure that the threads are cleaned up even when the test fails.
      +    If threading is unavailable this function does nothing.
      +    """
      +    @functools.wraps(func)
      +    def decorator(*args): 
      +        key = test_support.threading_setup()
      +        try:
      +            return func(*args)
      +        finally:
      +            test_support.threading_cleanup(*key)
      +    return decorator
      +
      +
      +# Executing the interpreter in a subprocess
      +def _assert_python(expected_success, *args, **env_vars):
      +    cmd_line = [sys.executable]
      +    if not env_vars:
      +        cmd_line.append('-E')
      +    # Need to preserve the original environment, for in-place testing of
      +    # shared library builds.
      +    env = os.environ.copy()
      +    # But a special flag that can be set to override -- in this case, the
      +    # caller is responsible to pass the full environment.
      +    if env_vars.pop('__cleanenv', None):
      +        env = {}
      +    env.update(env_vars) 
      +    cmd_line.extend(args)
      +    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
      +                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
      +                         env=env)
      +    try:
      +        out, err = p.communicate()
      +    finally:
      +        subprocess._cleanup()
      +        p.stdout.close()
      +        p.stderr.close()
      +    rc = p.returncode
      +    err = strip_python_stderr(err)
      +    if (rc and expected_success) or (not rc and not expected_success):
      +        raise AssertionError(
      +            "Process return code is %d, "
      +            "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
      +    return rc, out, err
      +
      +
      +def assert_python_ok(*args, **env_vars):
      +    """
      +    Assert that running the interpreter with `args` and optional environment
      +    variables `env_vars` is ok and return a (return code, stdout, stderr) tuple.
      +    """
      +    return _assert_python(True, *args, **env_vars)
      +
      +
      +def strip_python_stderr(stderr): 
      +    """Strip the stderr of a Python process from potential debug output
      +    emitted by the interpreter.
      +
      +    This will typically be run on the result of the communicate() method
      +    of a subprocess.Popen object.
      +    """
      +    stderr = re.sub(r"\[\d+ refs\]\r?\n?$".encode(), "".encode(), stderr).strip()
      +    return stderr
      +
      +
      +@contextlib.contextmanager
      +def captured_stderr():
      +    """Return a context manager used by captured_stdout/stdin/stderr
      +    that temporarily replaces the sys stream *stream_name* with a StringIO."""
      +    logging_stream = StringIO()
      +    handler = logging.StreamHandler(logging_stream)
      +    logging.root.addHandler(handler)
      +
      +    try:
      +        yield logging_stream
      +    finally:
      +        logging.root.removeHandler(handler)
      +
      +
      +def create_future(state=PENDING, exception=None, result=None):
      +    f = Future()
      +    f._state = state
      +    f._exception = exception
      +    f._result = result
      +    return f
      +
      +
      +PENDING_FUTURE = create_future(state=PENDING)
      +RUNNING_FUTURE = create_future(state=RUNNING)
      +CANCELLED_FUTURE = create_future(state=CANCELLED)
      +CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
      +EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
      +SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
      +
      +
      +def mul(x, y):
      +    return x * y
      +
      +
      +def sleep_and_raise(t):
      +    time.sleep(t)
      +    raise Exception('this is an exception')
      +
      +def sleep_and_print(t, msg):
      +    time.sleep(t)
      +    print(msg)
      +    sys.stdout.flush()
      +
      +
      +class ExecutorMixin:
      +    worker_count = 5
      +
      +    def setUp(self):
      +        self.t1 = time.time()
      +        try:
      +            self.executor = self.executor_type(max_workers=self.worker_count)
      +        except NotImplementedError:
      +            e = sys.exc_info()[1]
      +            self.skipTest(str(e))
      +        self._prime_executor()
      +
      +    def tearDown(self):
      +        self.executor.shutdown(wait=True)
      +        dt = time.time() - self.t1
      +        if test_support.verbose:
      +            print("%.2fs" % dt)
      +        self.assertLess(dt, 60, "synchronization issue: test lasted too long")
      +
      +    def _prime_executor(self):
      +        # Make sure that the executor is ready to do work before running the
      +        # tests. This should reduce the probability of timeouts in the tests.
      +        futures = [self.executor.submit(time.sleep, 0.1)
      +                   for _ in range(self.worker_count)]
      +
      +        for f in futures:
      +            f.result()
      +
      +
      +class ThreadPoolMixin(ExecutorMixin):
      +    executor_type = futures.ThreadPoolExecutor
      +
      +
      +class ProcessPoolMixin(ExecutorMixin):
      +    executor_type = futures.ProcessPoolExecutor
      +
      +
      +class ExecutorShutdownTest(unittest.TestCase):
      +    def test_run_after_shutdown(self):
      +        self.executor.shutdown()
      +        self.assertRaises(RuntimeError,
      +                          self.executor.submit,
      +                          pow, 2, 5)
      +
      +    def test_interpreter_shutdown(self):
      +        # Test the atexit hook for shutdown of worker threads and processes
      +        rc, out, err = assert_python_ok('-c', """if 1:
      +            from concurrent.futures import %s
      +            from time import sleep
      +            from test_futures import sleep_and_print
      +            t = %s(5)
      +            t.submit(sleep_and_print, 1.0, "apple")
      +            """ % (self.executor_type.__name__, self.executor_type.__name__))
      +        # Errors in atexit hooks don't change the process exit code, check
      +        # stderr manually.
      +        self.assertFalse(err)
      +        self.assertEqual(out.strip(), "apple".encode())
      +
      +    def test_hang_issue12364(self):
      +        fs = [self.executor.submit(time.sleep, 0.1) for _ in range(50)]
      +        self.executor.shutdown()
      +        for f in fs:
      +            f.result()
      +
      +
      +class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest):
      +    def _prime_executor(self):
      +        pass
      +
      +    def test_threads_terminate(self):
      +        self.executor.submit(mul, 21, 2)
      +        self.executor.submit(mul, 6, 7)
      +        self.executor.submit(mul, 3, 14)
      +        self.assertEqual(len(self.executor._threads), 3)
      +        self.executor.shutdown()
      +        for t in self.executor._threads:
      +            t.join()
      +
      +    def test_context_manager_shutdown(self):
      +        with futures.ThreadPoolExecutor(max_workers=5) as e:
      +            executor = e
      +            self.assertEqual(list(e.map(abs, range(-5, 5))),
      +                             [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
      +
      +        for t in executor._threads:
      +            t.join()
      +
      +    def test_del_shutdown(self):
      +        executor = futures.ThreadPoolExecutor(max_workers=5)
      +        executor.map(abs, range(-5, 5))
      +        threads = executor._threads
      +        del executor
      +
      +        for t in threads:
      +            t.join()
      +
      +
      +class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
      +    def _prime_executor(self):
      +        pass
      +
      +    def test_processes_terminate(self):
      +        self.executor.submit(mul, 21, 2)
      +        self.executor.submit(mul, 6, 7)
      +        self.executor.submit(mul, 3, 14)
      +        self.assertEqual(len(self.executor._processes), 5)
      +        processes = self.executor._processes
      +        self.executor.shutdown()
      +
      +        for p in processes:
      +            p.join()
      +
      +    def test_context_manager_shutdown(self):
      +        with futures.ProcessPoolExecutor(max_workers=5) as e:
      +            processes = e._processes
      +            self.assertEqual(list(e.map(abs, range(-5, 5))),
      +                             [5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
      +
      +        for p in processes:
      +            p.join()
      +
      +    def test_del_shutdown(self):
      +        executor = futures.ProcessPoolExecutor(max_workers=5)
      +        list(executor.map(abs, range(-5, 5)))
      +        queue_management_thread = executor._queue_management_thread
      +        processes = executor._processes
      +        del executor
      +
      +        queue_management_thread.join()
      +        for p in processes:
      +            p.join()
      +
      +
      +class WaitTests(unittest.TestCase):
      +
      +    def test_first_completed(self):
      +        future1 = self.executor.submit(mul, 21, 2)
      +        future2 = self.executor.submit(time.sleep, 1.5)
      +
      +        done, not_done = futures.wait(
      +                [CANCELLED_FUTURE, future1, future2],
      +                 return_when=futures.FIRST_COMPLETED)
      +
      +        self.assertEqual(set([future1]), done)
      +        self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
      +
      +    def test_first_completed_some_already_completed(self):
      +        future1 = self.executor.submit(time.sleep, 1.5)
      +
      +        finished, pending = futures.wait(
      +                 [CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
      +                 return_when=futures.FIRST_COMPLETED)
      +
      +        self.assertEqual(
      +                set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
      +                finished)
      +        self.assertEqual(set([future1]), pending)
      +
      +    def test_first_exception(self):
      +        future1 = self.executor.submit(mul, 2, 21)
      +        future2 = self.executor.submit(sleep_and_raise, 1.5)
      +        future3 = self.executor.submit(time.sleep, 3)
      +
      +        finished, pending = futures.wait(
      +                [future1, future2, future3],
      +                return_when=futures.FIRST_EXCEPTION)
      +
      +        self.assertEqual(set([future1, future2]), finished)
      +        self.assertEqual(set([future3]), pending)
      +
      +    def test_first_exception_some_already_complete(self):
      +        future1 = self.executor.submit(divmod, 21, 0)
      +        future2 = self.executor.submit(time.sleep, 1.5)
      +
      +        finished, pending = futures.wait(
      +                [SUCCESSFUL_FUTURE,
      +                 CANCELLED_FUTURE,
      +                 CANCELLED_AND_NOTIFIED_FUTURE,
      +                 future1, future2],
      +                return_when=futures.FIRST_EXCEPTION)
      +
      +        self.assertEqual(set([SUCCESSFUL_FUTURE,
      +                              CANCELLED_AND_NOTIFIED_FUTURE,
      +                              future1]), finished)
      +        self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
      +
      +    def test_first_exception_one_already_failed(self):
      +        future1 = self.executor.submit(time.sleep, 2)
      +
      +        finished, pending = futures.wait(
      +                 [EXCEPTION_FUTURE, future1],
      +                 return_when=futures.FIRST_EXCEPTION)
      +
      +        self.assertEqual(set([EXCEPTION_FUTURE]), finished)
      +        self.assertEqual(set([future1]), pending)
      +
      +    def test_all_completed(self):
      +        future1 = self.executor.submit(divmod, 2, 0)
      +        future2 = self.executor.submit(mul, 2, 21)
      +
      +        finished, pending = futures.wait(
      +                [SUCCESSFUL_FUTURE,
      +                 CANCELLED_AND_NOTIFIED_FUTURE,
      +                 EXCEPTION_FUTURE,
      +                 future1,
      +                 future2],
      +                return_when=futures.ALL_COMPLETED)
      +
      +        self.assertEqual(set([SUCCESSFUL_FUTURE,
      +                              CANCELLED_AND_NOTIFIED_FUTURE,
      +                              EXCEPTION_FUTURE,
      +                              future1,
      +                              future2]), finished)
      +        self.assertEqual(set(), pending)
      +
      +    def test_timeout(self):
      +        future1 = self.executor.submit(mul, 6, 7)
      +        future2 = self.executor.submit(time.sleep, 3)
      +
      +        finished, pending = futures.wait(
      +                [CANCELLED_AND_NOTIFIED_FUTURE,
      +                 EXCEPTION_FUTURE,
      +                 SUCCESSFUL_FUTURE,
      +                 future1, future2],
      +                timeout=1.5,
      +                return_when=futures.ALL_COMPLETED)
      +
      +        self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
      +                              EXCEPTION_FUTURE,
      +                              SUCCESSFUL_FUTURE,
      +                              future1]), finished)
      +        self.assertEqual(set([future2]), pending)
      +
      +
      +class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests):
      +
      +    def test_pending_calls_race(self):
      +        # Issue #14406: multi-threaded race condition when waiting on all
      +        # futures.
      +        event = threading.Event()
      +        def future_func():
      +            event.wait()
      +        oldswitchinterval = sys.getcheckinterval()
      +        sys.setcheckinterval(1)
      +        try:
      +            fs = set(self.executor.submit(future_func) for i in range(100))
      +            event.set()
      +            futures.wait(fs, return_when=futures.ALL_COMPLETED)
      +        finally:
      +            sys.setcheckinterval(oldswitchinterval)
      +
      +
      +class ProcessPoolWaitTests(ProcessPoolMixin, WaitTests):
      +    pass
      +
      +
      +class AsCompletedTests(unittest.TestCase):
      +    # TODO(brian@sweetapp.com): Should have a test with a non-zero timeout.
      +    def test_no_timeout(self):
      +        future1 = self.executor.submit(mul, 2, 21)
      +        future2 = self.executor.submit(mul, 7, 6)
      +
      +        completed = set(futures.as_completed(
      +                [CANCELLED_AND_NOTIFIED_FUTURE,
      +                 EXCEPTION_FUTURE,
      +                 SUCCESSFUL_FUTURE,
      +                 future1, future2]))
      +        self.assertEqual(set(
      +                [CANCELLED_AND_NOTIFIED_FUTURE,
      +                 EXCEPTION_FUTURE,
      +                 SUCCESSFUL_FUTURE,
      +                 future1, future2]),
      +                completed)
      +
      +    def test_zero_timeout(self):
      +        future1 = self.executor.submit(time.sleep, 2)
      +        completed_futures = set()
      +        try:
      +            for future in futures.as_completed(
      +                    [CANCELLED_AND_NOTIFIED_FUTURE,
      +                     EXCEPTION_FUTURE,
      +                     SUCCESSFUL_FUTURE,
      +                     future1],
      +                    timeout=0):
      +                completed_futures.add(future)
      +        except futures.TimeoutError:
      +            pass
      +
      +        self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
      +                              EXCEPTION_FUTURE,
      +                              SUCCESSFUL_FUTURE]),
      +                         completed_futures)
      +
      +    def test_duplicate_futures(self):
      +        # Issue 20367. Duplicate futures should not raise exceptions or give
      +        # duplicate responses.
      +        future1 = self.executor.submit(time.sleep, 2)
      +        completed = [f for f in futures.as_completed([future1,future1])]
      +        self.assertEqual(len(completed), 1)
      +
      +
      +class ThreadPoolAsCompletedTests(ThreadPoolMixin, AsCompletedTests):
      +    pass
      +
      +
      +class ProcessPoolAsCompletedTests(ProcessPoolMixin, AsCompletedTests):
      +    pass
      +
      +
      +class ExecutorTest(unittest.TestCase):
      +    # Executor.shutdown() and context manager usage is tested by
      +    # ExecutorShutdownTest.
      +    def test_submit(self):
      +        future = self.executor.submit(pow, 2, 8)
      +        self.assertEqual(256, future.result())
      +
      +    def test_submit_keyword(self):
      +        future = self.executor.submit(mul, 2, y=8)
      +        self.assertEqual(16, future.result())
      +
      +    def test_map(self):
      +        self.assertEqual(
      +                list(self.executor.map(pow, range(10), range(10))),
      +                list(map(pow, range(10), range(10))))
      +
      +    def test_map_exception(self):
      +        i = self.executor.map(divmod, [1, 1, 1, 1], [2, 3, 0, 5])
      +        self.assertEqual(next(i), (0, 1))
      +        self.assertEqual(next(i), (0, 1))
      +        self.assertRaises(ZeroDivisionError, next, i)
      +
      +    def test_map_timeout(self):
      +        results = []
      +        try:
      +            for i in self.executor.map(time.sleep,
      +                                       [0, 0, 3],
      +                                       timeout=1.5):
      +                results.append(i)
      +        except futures.TimeoutError:
      +            pass
      +        else:
      +            self.fail('expected TimeoutError')
      +
      +        self.assertEqual([None, None], results)
      +
      +
      +class ThreadPoolExecutorTest(ThreadPoolMixin, ExecutorTest):
      +    def test_map_submits_without_iteration(self):
      +        """Tests verifying issue 11777."""
      +        finished = []
      +        def record_finished(n):
      +            finished.append(n)
      +
      +        self.executor.map(record_finished, range(10))
      +        self.executor.shutdown(wait=True)
      +        self.assertEqual(len(finished), 10)
      +
      +
      +class ProcessPoolExecutorTest(ProcessPoolMixin, ExecutorTest):
      +    pass
      +
      +
      +class FutureTests(unittest.TestCase):
      +    def test_done_callback_with_result(self):
      +        callback_result = [None]
      +        def fn(callback_future):
      +            callback_result[0] = callback_future.result()
      +
      +        f = Future()
      +        f.add_done_callback(fn)
      +        f.set_result(5)
      +        self.assertEqual(5, callback_result[0])
      +
      +    def test_done_callback_with_exception(self):
      +        callback_exception = [None]
      +        def fn(callback_future):
      +            callback_exception[0] = callback_future.exception()
      +
      +        f = Future()
      +        f.add_done_callback(fn)
      +        f.set_exception(Exception('test'))
      +        self.assertEqual(('test',), callback_exception[0].args)
      +
      +    def test_done_callback_with_cancel(self):
      +        was_cancelled = [None]
      +        def fn(callback_future):
      +            was_cancelled[0] = callback_future.cancelled()
      +
      +        f = Future()
      +        f.add_done_callback(fn)
      +        self.assertTrue(f.cancel())
      +        self.assertTrue(was_cancelled[0])
      +
      +    def test_done_callback_raises(self):
      +        with captured_stderr() as stderr:
      +            raising_was_called = [False]
      +            fn_was_called = [False]
      +
      +            def raising_fn(callback_future):
      +                raising_was_called[0] = True
      +                raise Exception('doh!')
      +
      +            def fn(callback_future):
      +                fn_was_called[0] = True
      +
      +            f = Future()
      +            f.add_done_callback(raising_fn)
      +            f.add_done_callback(fn)
      +            f.set_result(5)
      +            self.assertTrue(raising_was_called)
      +            self.assertTrue(fn_was_called)
      +            self.assertIn('Exception: doh!', stderr.getvalue())
      +
      +    def test_done_callback_already_successful(self):
      +        callback_result = [None]
      +        def fn(callback_future):
      +            callback_result[0] = callback_future.result()
      +
      +        f = Future()
      +        f.set_result(5)
      +        f.add_done_callback(fn)
      +        self.assertEqual(5, callback_result[0])
      +
      +    def test_done_callback_already_failed(self):
      +        callback_exception = [None]
      +        def fn(callback_future):
      +            callback_exception[0] = callback_future.exception()
      +
      +        f = Future()
      +        f.set_exception(Exception('test'))
      +        f.add_done_callback(fn)
      +        self.assertEqual(('test',), callback_exception[0].args)
      +
      +    def test_done_callback_already_cancelled(self):
      +        was_cancelled = [None]
      +        def fn(callback_future):
      +            was_cancelled[0] = callback_future.cancelled()
      +
      +        f = Future()
      +        self.assertTrue(f.cancel())
      +        f.add_done_callback(fn)
      +        self.assertTrue(was_cancelled[0])
      +
      +    def test_repr(self):
      +        self.assertRegexpMatches(repr(PENDING_FUTURE),
      +                                 '')
      +        self.assertRegexpMatches(repr(RUNNING_FUTURE),
      +                                 '')
      +        self.assertRegexpMatches(repr(CANCELLED_FUTURE),
      +                                 '')
      +        self.assertRegexpMatches(repr(CANCELLED_AND_NOTIFIED_FUTURE),
      +                                 '')
      +        self.assertRegexpMatches(
      +                repr(EXCEPTION_FUTURE),
      +                '')
      +        self.assertRegexpMatches(
      +                repr(SUCCESSFUL_FUTURE),
      +                '')
      +
      +    def test_cancel(self):
      +        f1 = create_future(state=PENDING)
      +        f2 = create_future(state=RUNNING)
      +        f3 = create_future(state=CANCELLED)
      +        f4 = create_future(state=CANCELLED_AND_NOTIFIED)
      +        f5 = create_future(state=FINISHED, exception=IOError())
      +        f6 = create_future(state=FINISHED, result=5)
      +
      +        self.assertTrue(f1.cancel())
      +        self.assertEqual(f1._state, CANCELLED)
      +
      +        self.assertFalse(f2.cancel())
      +        self.assertEqual(f2._state, RUNNING)
      +
      +        self.assertTrue(f3.cancel())
      +        self.assertEqual(f3._state, CANCELLED)
      +
      +        self.assertTrue(f4.cancel())
      +        self.assertEqual(f4._state, CANCELLED_AND_NOTIFIED)
      +
      +        self.assertFalse(f5.cancel())
      +        self.assertEqual(f5._state, FINISHED)
      +
      +        self.assertFalse(f6.cancel())
      +        self.assertEqual(f6._state, FINISHED)
      +
      +    def test_cancelled(self):
      +        self.assertFalse(PENDING_FUTURE.cancelled())
      +        self.assertFalse(RUNNING_FUTURE.cancelled())
      +        self.assertTrue(CANCELLED_FUTURE.cancelled())
      +        self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.cancelled())
      +        self.assertFalse(EXCEPTION_FUTURE.cancelled())
      +        self.assertFalse(SUCCESSFUL_FUTURE.cancelled())
      +
      +    def test_done(self):
      +        self.assertFalse(PENDING_FUTURE.done())
      +        self.assertFalse(RUNNING_FUTURE.done())
      +        self.assertTrue(CANCELLED_FUTURE.done())
      +        self.assertTrue(CANCELLED_AND_NOTIFIED_FUTURE.done())
      +        self.assertTrue(EXCEPTION_FUTURE.done())
      +        self.assertTrue(SUCCESSFUL_FUTURE.done())
      +
      +    def test_running(self):
      +        self.assertFalse(PENDING_FUTURE.running())
      +        self.assertTrue(RUNNING_FUTURE.running())
      +        self.assertFalse(CANCELLED_FUTURE.running())
      +        self.assertFalse(CANCELLED_AND_NOTIFIED_FUTURE.running())
      +        self.assertFalse(EXCEPTION_FUTURE.running())
      +        self.assertFalse(SUCCESSFUL_FUTURE.running())
      +
      +    def test_result_with_timeout(self):
      +        self.assertRaises(futures.TimeoutError,
      +                          PENDING_FUTURE.result, timeout=0)
      +        self.assertRaises(futures.TimeoutError,
      +                          RUNNING_FUTURE.result, timeout=0)
      +        self.assertRaises(futures.CancelledError,
      +                          CANCELLED_FUTURE.result, timeout=0)
      +        self.assertRaises(futures.CancelledError,
      +                          CANCELLED_AND_NOTIFIED_FUTURE.result, timeout=0)
      +        self.assertRaises(IOError, EXCEPTION_FUTURE.result, timeout=0)
      +        self.assertEqual(SUCCESSFUL_FUTURE.result(timeout=0), 42)
      +
      +    def test_result_with_success(self):
      +        # TODO(brian@sweetapp.com): This test is timing dependant.
      +        def notification():
      +            # Wait until the main thread is waiting for the result.
      +            time.sleep(1)
      +            f1.set_result(42)
      +
      +        f1 = create_future(state=PENDING)
      +        t = threading.Thread(target=notification)
      +        t.start()
      +
      +        self.assertEqual(f1.result(timeout=5), 42)
      +
      +    def test_result_with_cancel(self):
      +        # TODO(brian@sweetapp.com): This test is timing dependant.
      +        def notification():
      +            # Wait until the main thread is waiting for the result.
      +            time.sleep(1)
      +            f1.cancel()
      +
      +        f1 = create_future(state=PENDING)
      +        t = threading.Thread(target=notification)
      +        t.start()
      +
      +        self.assertRaises(futures.CancelledError, f1.result, timeout=5)
      +
      +    def test_exception_with_timeout(self):
      +        self.assertRaises(futures.TimeoutError,
      +                          PENDING_FUTURE.exception, timeout=0)
      +        self.assertRaises(futures.TimeoutError,
      +                          RUNNING_FUTURE.exception, timeout=0)
      +        self.assertRaises(futures.CancelledError,
      +                          CANCELLED_FUTURE.exception, timeout=0)
      +        self.assertRaises(futures.CancelledError,
      +                          CANCELLED_AND_NOTIFIED_FUTURE.exception, timeout=0)
      +        self.assertTrue(isinstance(EXCEPTION_FUTURE.exception(timeout=0),
      +                                   IOError))
      +        self.assertEqual(SUCCESSFUL_FUTURE.exception(timeout=0), None)
      +
      +    def test_exception_with_success(self):
      +        def notification():
      +            # Wait until the main thread is waiting for the exception.
      +            time.sleep(1)
      +            with f1._condition:
      +                f1._state = FINISHED
      +                f1._exception = IOError()
      +                f1._condition.notify_all()
      +
      +        f1 = create_future(state=PENDING)
      +        t = threading.Thread(target=notification)
      +        t.start()
      +
      +        self.assertTrue(isinstance(f1.exception(timeout=5), IOError))
      +
      +@reap_threads
      +def test_main():
      +    try:
      +        test_support.run_unittest(ProcessPoolExecutorTest,
      +                                  ThreadPoolExecutorTest,
      +                                  ProcessPoolWaitTests,
      +                                  ThreadPoolWaitTests,
      +                                  ProcessPoolAsCompletedTests,
      +                                  ThreadPoolAsCompletedTests,
      +                                  FutureTests,
      +                                  ProcessPoolShutdownTest,
      +                                  ThreadPoolShutdownTest)
      +    finally:
      +        test_support.reap_children()
      +
      +if __name__ == "__main__":
      +    test_main()
      diff --git a/python/futures/tox.ini b/python/futures/tox.ini
      new file mode 100644
      index 000000000..4948bd161
      --- /dev/null
      +++ b/python/futures/tox.ini
      @@ -0,0 +1,8 @@
      +[tox]
      +envlist = py26,py27
      +
      +[testenv]
      +commands={envpython} test_futures.py []
      +
      +[testenv:py26]
      +deps=unittest2
      diff --git a/python/gdbpp/gdbpp/__init__.py b/python/gdbpp/gdbpp/__init__.py
      new file mode 100644
      index 000000000..d20de23a7
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/__init__.py
      @@ -0,0 +1,28 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +import gdb.printing
      +
      +class GeckoPrettyPrinter(object):
      +    pp = gdb.printing.RegexpCollectionPrettyPrinter('GeckoPrettyPrinters')
      +
      +    def __init__(self, name, regexp):
      +        self.name = name
      +        self.regexp = regexp
      +
      +    def __call__(self, wrapped):
      +        GeckoPrettyPrinter.pp.add_printer(self.name, self.regexp, wrapped)
      +        return wrapped
      +
      +import gdbpp.linkedlist
      +import gdbpp.owningthread
      +import gdbpp.smartptr
      +import gdbpp.string
      +import gdbpp.tarray
      +import gdbpp.thashtable
      +
      +gdb.printing.register_pretty_printer(None, GeckoPrettyPrinter.pp)
      diff --git a/python/gdbpp/gdbpp/linkedlist.py b/python/gdbpp/gdbpp/linkedlist.py
      new file mode 100644
      index 000000000..966f9b9c0
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/linkedlist.py
      @@ -0,0 +1,49 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +import itertools
      +from gdbpp import GeckoPrettyPrinter
      +
      +# mfbt's LinkedList is a doubly-linked list where the items in the list store
      +# the next/prev pointers as part of themselves rather than the list structure be
      +# its own independent data structure.  This means:
      +# - Every item may belong to at most one LinkedList instance.
      +# - For our pretty printer, we only want to pretty-print the LinkedList object
      +#   itself.  We do not want to start printing every item in the list whenever
      +#   we run into a LinkedListElement.
      +@GeckoPrettyPrinter('mozilla::LinkedList', '^mozilla::LinkedList<.*>$')
      +class linkedlist_printer(object):
      +    def __init__(self, value):
      +        self.value = value
      +        # mfbt's LinkedList has the elements of the linked list subclass from
      +        # LinkedListElement.  We want its pointer type for casting purposes.
      +        #
      +        # (We want to list pointers since we expect all of these objects to be
      +        # complex enough that we don't want to automatically expand them.  The
      +        # LinkedListElement type itself isn't small.)
      +        self.t_ptr_type = value.type.template_argument(0).pointer()
      +
      +    def children(self):
      +        # Walk mNext until we loop back around to the sentinel.  The sentinel
      +        # item always exists and in the zero-length base-case mNext == sentinel,
      +        # so extract that immediately and update it throughout the loop.
      +        sentinel = self.value['sentinel']
      +        pSentinel = sentinel.address
      +        pNext = sentinel['mNext']
      +        i = 0
      +        while pSentinel != pNext:
      +            list_elem = pNext.dereference()
      +            list_value = pNext.cast(self.t_ptr_type)
      +            yield ('%d' % i, list_value)
      +            pNext = list_elem['mNext']
      +            i += 1
      +
      +    def to_string(self):
      +        return str(self.value.type)
      +
      +    def display_hint(self):
      +        return 'array'
      diff --git a/python/gdbpp/gdbpp/owningthread.py b/python/gdbpp/gdbpp/owningthread.py
      new file mode 100644
      index 000000000..d102bef24
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/owningthread.py
      @@ -0,0 +1,24 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +from gdbpp import GeckoPrettyPrinter
      +
      +@GeckoPrettyPrinter('nsAutoOwningThread', '^nsAutoOwningThread$')
      +class owning_thread_printer(object):
      +    def __init__(self, value):
      +        self.value = value
      +
      +    def to_string(self):
      +        prthread_type = gdb.lookup_type('PRThread').pointer()
      +        prthread = self.value['mThread'].cast(prthread_type)
      +        name = prthread['name']
      +
      +        # if the thread doesn't have a name try to get its thread id (might not
      +        # work on !linux)
      +        name = prthread['tid']
      +
      +        return name if name else '(PRThread *) %s' % prthread
      diff --git a/python/gdbpp/gdbpp/smartptr.py b/python/gdbpp/gdbpp/smartptr.py
      new file mode 100644
      index 000000000..c35215426
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/smartptr.py
      @@ -0,0 +1,55 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +from gdbpp import GeckoPrettyPrinter
      +
      +@GeckoPrettyPrinter('nsWeakPtr', '^nsCOMPtr$')
      +class weak_ptr_printer(object):
      +    def __init__(self, value):
      +        self.value = value
      +
      +    def to_string(self):
      +        proxy = self.value['mRawPtr']
      +        if not proxy:
      +            return '[(%s) 0x0]' % proxy.type
      +
      +        ref_type = proxy.dynamic_type
      +        weak_ptr = proxy.cast(ref_type).dereference()['mReferent']
      +        if not weak_ptr:
      +            return '[(%s) %s]' % (weak_ptr.type, weak_ptr)
      +
      +        return '[(%s) %s]' % (weak_ptr.dynamic_type, weak_ptr)
      +
      +@GeckoPrettyPrinter('mozilla::StaticAutoPtr', '^mozilla::StaticAutoPtr<.*>$')
      +@GeckoPrettyPrinter('mozilla::StaticRefPtr', '^mozilla::StaticRefPtr<.*>$')
      +@GeckoPrettyPrinter('nsAutoPtr', '^nsAutoPtr<.*>$')
      +@GeckoPrettyPrinter('nsCOMPtr', '^nsCOMPtr<.*>$')
      +@GeckoPrettyPrinter('RefPtr', '^RefPtr<.*>$')
      +class smartptr_printer(object):
      +    def __init__(self, value):
      +        self.value = value['mRawPtr']
      +
      +    def to_string(self):
      +        if not self.value:
      +            type_name = str(self.value.type)
      +        else:
      +            type_name = str(self.value.dereference().dynamic_type.pointer())
      +
      +        return '[(%s) %s]' % (type_name, str(self.value))
      +
      +@GeckoPrettyPrinter('UniquePtr', '^mozilla::UniquePtr<.*>$')
      +class uniqueptr_printer(object):
      +    def __init__(self, value):
      +        self.value = value['mTuple']['mFirstA']
      +
      +    def to_string(self):
      +        if not self.value:
      +            type_name = str(self.value.type)
      +        else:
      +            type_name = str(self.value.dereference().dynamic_type.pointer())
      +
      +        return '[(%s) %s]' % (type_name, str(self.value))
      diff --git a/python/gdbpp/gdbpp/string.py b/python/gdbpp/gdbpp/string.py
      new file mode 100644
      index 000000000..33d536a02
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/string.py
      @@ -0,0 +1,19 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +from gdbpp import GeckoPrettyPrinter
      +
      +@GeckoPrettyPrinter('nsString', '^ns.*String$')
      +class string_printer(object):
      +    def __init__(self, value):
      +        self.value = value
      +
      +    def to_string(self):
      +        return self.value['mData']
      +
      +    def display_hint(self):
      +        return 'string'
      diff --git a/python/gdbpp/gdbpp/tarray.py b/python/gdbpp/gdbpp/tarray.py
      new file mode 100644
      index 000000000..66797e4c9
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/tarray.py
      @@ -0,0 +1,30 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +import itertools
      +from gdbpp import GeckoPrettyPrinter
      +
      +@GeckoPrettyPrinter('InfallibleTArray', '^InfallibleTArray<.*>$')
      +@GeckoPrettyPrinter('FallibleTArray', '^FallibleTArray<.*>$')
      +@GeckoPrettyPrinter('AutoTArray', '^AutoTArray<.*>$')
      +@GeckoPrettyPrinter('nsTArray', '^nsTArray<.*>$')
      +class tarray_printer(object):
      +    def __init__(self, value):
      +        self.value = value
      +        self.elem_type = value.type.template_argument(0)
      +
      +    def children(self):
      +        length = self.value['mHdr'].dereference()['mLength']
      +        data = self.value['mHdr'] + 1
      +        elements = data.cast(self.elem_type.pointer())
      +        return (('%d' % i, (elements + i).dereference()) for i in range(0, int(length)))
      +
      +    def to_string(self):
      +        return str(self.value.type)
      +
      +    def display_hint(self):
      +        return 'array'
      diff --git a/python/gdbpp/gdbpp/thashtable.py b/python/gdbpp/gdbpp/thashtable.py
      new file mode 100644
      index 000000000..10aee4946
      --- /dev/null
      +++ b/python/gdbpp/gdbpp/thashtable.py
      @@ -0,0 +1,143 @@
      +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
      +# vim: set filetype=python:
      +# This Source Code Form is subject to the terms of the Mozilla Public
      +# License, v. 2.0. If a copy of the MPL was not distributed with this
      +# file, You can obtain one at http://mozilla.org/MPL/2.0/.
      +
      +import gdb
      +import itertools
      +from gdbpp import GeckoPrettyPrinter
      +
      +def walk_template_to_given_base(value, desired_tag_prefix):
      +    '''Given a value of some template subclass, walk up its ancestry until we
      +    hit the desired type, then return the appropriate value (which will then
      +    have that type).
      +    '''
      +    # Base case
      +    t = value.type
      +    # It's possible that we're dealing with an alias template that looks like:
      +    #   template
      +    #   using ManagedContainer = nsTHashtable>;
      +    # In which case we want to strip the indirection, and strip_typedefs()
      +    # accomplishes this.  (Disclaimer: I tried it and it worked and it didn't
      +    # break my other use cases, if things start exploding, do reconsider.)
      +    t = t.strip_typedefs()
      +    if t.tag.startswith(desired_tag_prefix):
      +        return value
      +    for f in t.fields():
      +        # we only care about the inheritance hierarchy
      +        if not f.is_base_class:
      +            continue
      +        # This is the answer or something we're going to need to recurse into.
      +        fv = value[f]
      +        ft = fv.type
      +        # slightly optimize by checking the tag rather than in the recursion
      +        if ft.tag.startswith(desired_tag_prefix):
      +            # found it!
      +            return fv
      +        return walk_template_to_given_base(fv, desired_tag_prefix)
      +    return None
      +
      +# The templates and their inheritance hierarchy form an onion of types around
      +# the nsTHashtable core at the center.  All we care about is that nsTHashtable,
      +# but we register for the descendant types in order to avoid the default pretty
      +# printers having to unwrap those onion layers, wasting precious lines.
      +@GeckoPrettyPrinter('nsClassHashtable', '^nsClassHashtable<.*>$')
      +@GeckoPrettyPrinter('nsDataHashtable', '^nsDataHashtable<.*>$')
      +@GeckoPrettyPrinter('nsInterfaceHashtable', '^nsInterfaceHashtable<.*>$')
      +@GeckoPrettyPrinter('nsRefPtrHashtable', '^nsRefPtrHashtable<.*>$')
      +@GeckoPrettyPrinter('nsBaseHashtable', '^nsBaseHashtable<.*>$')
      +@GeckoPrettyPrinter('nsTHashtable', '^nsTHashtable<.*>$')
      +class thashtable_printer(object):
      +    def __init__(self, outer_value):
      +        self.outermost_type = outer_value.type
      +
      +        value = walk_template_to_given_base(outer_value, 'nsTHashtable<')
      +        self.value = value
      +
      +        self.entry_type = value.type.template_argument(0)
      +
      +        # -- Determine whether we're a hashTABLE or a hashSET
      +        # If we're a table, the entry type will be a nsBaseHashtableET template.
      +        # If we're a set, it will be something like nsPtrHashKey.
      +        #
      +        # So, assume we're a set if we're not nsBaseHashtableET<
      +        # (It should ideally also be true that the type ends with HashKey, but
      +        # since nsBaseHashtableET causes us to assume "mData" exists, let's
      +        # pivot based on that.)
      +        self.is_table = self.entry_type.tag.startswith('nsBaseHashtableET<')
      +
      +        # While we know that it has a field `mKeyHash` for the hash-code and
      +        # book-keeping, and a DataType field mData for the value (if we're a
      +        # table), the key field frustratingly varies by key type.
      +        #
      +        # So we want to walk its key type to figure out the field name.  And we
      +        # do mean field name.  The field object is no good for subscripting the
      +        # value unless the field was directly owned by that value's type.  But
      +        # by using a string name, we save ourselves all that fanciness.
      +
      +        if self.is_table:
      +            # For nsBaseHashtableET, we want the KeyClass
      +            key_type = self.entry_type.template_argument(0)
      +        else:
      +            # If we're a set, our entry type is the key class already!
      +            key_type = self.entry_type
      +        self.key_field_name = None
      +        for f in key_type.fields():
      +            # No need to traverse up the type hierarchy...
      +            if f.is_base_class:
      +                continue
      +            # ...just to skip the fields we know exist...
      +            if f.name == 'mKeyHash' or f.name == 'mData':
      +                continue
      +            # ...and assume the first one we find is the key.
      +            self.key_field_name = f.name
      +            break
      +
      +    def children(self):
      +        table = self.value['mTable']
      +
      +        # mEntryCount is the number of occupied slots/entries in the table.
      +        # We can use this to avoid doing wasted memory reads.
      +        entryCount = table['mEntryCount']
      +        if entryCount == 0:
      +            return
      +
      +        # The table capacity is tracked "cleverly" in terms of how many bits
      +        # the hash needs to be shifted.  CapacityFromHashShift calculates the
      +        # actual entry capacity via ((uint32_t)1 << (kHashBits - mHashShift));
      +        capacity = 1 << (table['kHashBits'] - table['mHashShift'])
      +
      +        # Pierce generation-tracking EntryStore class to get at buffer.  The
      +        # class instance always exists, but this char* may be null.
      +        store = table['mEntryStore']['mEntryStore']
      +
      +        key_field_name = self.key_field_name
      +
      +        seenCount = 0
      +        pEntry = store.cast(self.entry_type.pointer())
      +        for i in range(0, int(capacity)):
      +            entry = (pEntry + i).dereference()
      +            # An mKeyHash of 0 means empty, 1 means deleted sentinel, so skip
      +            # if that's the case.
      +            if entry['mKeyHash'] <= 1:
      +                continue
      +
      +            yield ('%d' % i, entry[key_field_name])
      +            if self.is_table:
      +                yield ('%d' % i, entry['mData'])
      +
      +            # Stop iterating if we know there are no more occupied slots.
      +            seenCount += 1
      +            if seenCount >= entryCount:
      +                break
      +
      +    def to_string(self):
      +        # The most specific template type is the most interesting.
      +        return str(self.outermost_type)
      +
      +    def display_hint(self):
      +        if self.is_table:
      +            return 'map'
      +        else:
      +            return 'array'
      diff --git a/python/jsmin/jsmin/__init__.py b/python/jsmin/jsmin/__init__.py
      new file mode 100644
      index 000000000..033a08701
      --- /dev/null
      +++ b/python/jsmin/jsmin/__init__.py
      @@ -0,0 +1,238 @@
      +# This code is original from jsmin by Douglas Crockford, it was translated to
      +# Python by Baruch Even. It was rewritten by Dave St.Germain for speed.
      +#
      +# The MIT License (MIT)
      +# 
      +# Copyright (c) 2013 Dave St.Germain
      +# 
      +# Permission is hereby granted, free of charge, to any person obtaining a copy
      +# of this software and associated documentation files (the "Software"), to deal
      +# in the Software without restriction, including without limitation the rights
      +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
      +# copies of the Software, and to permit persons to whom the Software is
      +# furnished to do so, subject to the following conditions:
      +# 
      +# The above copyright notice and this permission notice shall be included in
      +# all copies or substantial portions of the Software.
      +# 
      +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
      +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
      +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
      +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
      +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
      +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
      +# THE SOFTWARE.
      +
      +
      +import sys
      +is_3 = sys.version_info >= (3, 0)
      +if is_3:
      +    import io
      +else:
      +    import StringIO
      +    try:
      +        import cStringIO
      +    except ImportError:
      +        cStringIO = None
      +
      +
      +__all__ = ['jsmin', 'JavascriptMinify']
      +__version__ = '2.0.11'
      +
      +
      +def jsmin(js, **kwargs):
      +    """
      +    returns a minified version of the javascript string
      +    """
      +    if not is_3:        
      +        if cStringIO and not isinstance(js, unicode):
      +            # strings can use cStringIO for a 3x performance
      +            # improvement, but unicode (in python2) cannot
      +            klass = cStringIO.StringIO
      +        else:
      +            klass = StringIO.StringIO
      +    else:
      +        klass = io.StringIO
      +    ins = klass(js)
      +    outs = klass()
      +    JavascriptMinify(ins, outs, **kwargs).minify()
      +    return outs.getvalue()
      +
      +
      +class JavascriptMinify(object):
      +    """
      +    Minify an input stream of javascript, writing
      +    to an output stream
      +    """
      +
      +    def __init__(self, instream=None, outstream=None, quote_chars="'\""):
      +        self.ins = instream
      +        self.outs = outstream
      +        self.quote_chars = quote_chars
      +
      +    def minify(self, instream=None, outstream=None):
      +        if instream and outstream:
      +            self.ins, self.outs = instream, outstream
      +        
      +        self.is_return = False
      +        self.return_buf = ''
      +        
      +        def write(char):
      +            # all of this is to support literal regular expressions.
      +            # sigh
      +            if char in 'return':
      +                self.return_buf += char
      +                self.is_return = self.return_buf == 'return'
      +            self.outs.write(char)
      +            if self.is_return:
      +                self.return_buf = ''
      +
      +        read = self.ins.read
      +
      +        space_strings = "abcdefghijklmnopqrstuvwxyz"\
      +        "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$\\"
      +        starters, enders = '{[(+-', '}])+-' + self.quote_chars
      +        newlinestart_strings = starters + space_strings
      +        newlineend_strings = enders + space_strings
      +        do_newline = False
      +        do_space = False
      +        escape_slash_count = 0
      +        doing_single_comment = False
      +        previous_before_comment = ''
      +        doing_multi_comment = False
      +        in_re = False
      +        in_quote = ''
      +        quote_buf = []
      +        
      +        previous = read(1)
      +        if previous == '\\':
      +            escape_slash_count += 1
      +        next1 = read(1)
      +        if previous == '/':
      +            if next1 == '/':
      +                doing_single_comment = True
      +            elif next1 == '*':
      +                doing_multi_comment = True
      +                previous = next1
      +                next1 = read(1)
      +            else:
      +                in_re = True  # literal regex at start of script
      +                write(previous)
      +        elif not previous:
      +            return
      +        elif previous >= '!':
      +            if previous in self.quote_chars:
      +                in_quote = previous
      +            write(previous)
      +            previous_non_space = previous
      +        else:
      +            previous_non_space = ' '
      +        if not next1:
      +            return
      +
      +        while 1:
      +            next2 = read(1)
      +            if not next2:
      +                last = next1.strip()
      +                if not (doing_single_comment or doing_multi_comment)\
      +                    and last not in ('', '/'):
      +                    if in_quote:
      +                        write(''.join(quote_buf))
      +                    write(last)
      +                break
      +            if doing_multi_comment:
      +                if next1 == '*' and next2 == '/':
      +                    doing_multi_comment = False
      +                    if previous_before_comment and previous_before_comment in space_strings:
      +                        do_space = True
      +                    next2 = read(1)
      +            elif doing_single_comment:
      +                if next1 in '\r\n':
      +                    doing_single_comment = False
      +                    while next2 in '\r\n':
      +                        next2 = read(1)
      +                        if not next2:
      +                            break
      +                    if previous_before_comment in ')}]':
      +                        do_newline = True
      +                    elif previous_before_comment in space_strings:
      +                        write('\n')
      +            elif in_quote:
      +                quote_buf.append(next1)
      +
      +                if next1 == in_quote:
      +                    numslashes = 0
      +                    for c in reversed(quote_buf[:-1]):
      +                        if c != '\\':
      +                            break
      +                        else:
      +                            numslashes += 1
      +                    if numslashes % 2 == 0:
      +                        in_quote = ''
      +                        write(''.join(quote_buf))
      +            elif next1 in '\r\n':
      +                if previous_non_space in newlineend_strings \
      +                    or previous_non_space > '~':
      +                    while 1:
      +                        if next2 < '!':
      +                            next2 = read(1)
      +                            if not next2:
      +                                break
      +                        else:
      +                            if next2 in newlinestart_strings \
      +                                or next2 > '~' or next2 == '/':
      +                                do_newline = True
      +                            break
      +            elif next1 < '!' and not in_re:
      +                if (previous_non_space in space_strings \
      +                    or previous_non_space > '~') \
      +                    and (next2 in space_strings or next2 > '~'):
      +                    do_space = True
      +                elif previous_non_space in '-+' and next2 == previous_non_space:
      +                    # protect against + ++ or - -- sequences
      +                    do_space = True
      +                elif self.is_return and next2 == '/':
      +                    # returning a regex...
      +                    write(' ')
      +            elif next1 == '/':
      +                if do_space:
      +                    write(' ')
      +                if in_re:
      +                    if previous != '\\' or (not escape_slash_count % 2) or next2 in 'gimy':
      +                        in_re = False
      +                    write('/')
      +                elif next2 == '/':                    
      +                    doing_single_comment = True
      +                    previous_before_comment = previous_non_space
      +                elif next2 == '*':
      +                    doing_multi_comment = True
      +                    previous_before_comment = previous_non_space
      +                    previous = next1
      +                    next1 = next2
      +                    next2 = read(1)
      +                else:
      +                    in_re = previous_non_space in '(,=:[?!&|;' or self.is_return  # literal regular expression
      +                    write('/')
      +            else:
      +                if do_space:
      +                    do_space = False
      +                    write(' ')
      +                if do_newline:
      +                    write('\n')
      +                    do_newline = False
      +
      +                write(next1)
      +                if not in_re and next1 in self.quote_chars:
      +                    in_quote = next1
      +                    quote_buf = []
      +
      +            previous = next1
      +            next1 = next2
      +
      +            if previous >= '!':
      +                previous_non_space = previous
      +
      +            if previous == '\\':
      +                escape_slash_count += 1
      +            else:
      +                escape_slash_count = 0
      diff --git a/python/jsmin/jsmin/test.py b/python/jsmin/jsmin/test.py
      new file mode 100644
      index 000000000..6f7f627fd
      --- /dev/null
      +++ b/python/jsmin/jsmin/test.py
      @@ -0,0 +1,394 @@
      +import unittest
      +import jsmin
      +import sys
      +
      +class JsTests(unittest.TestCase):
      +    def _minify(self, js):
      +        return jsmin.jsmin(js)
      +
      +    def assertEqual(self, thing1, thing2):
      +        if thing1 != thing2:
      +            print(repr(thing1), repr(thing2))
      +            raise AssertionError
      +        return True
      +    
      +    def assertMinified(self, js_input, expected, **kwargs):
      +        minified = jsmin.jsmin(js_input, **kwargs)
      +        assert minified == expected, "%r != %r" % (minified, expected)
      +        
      +    def testQuoted(self):
      +        js = r'''
      +        Object.extend(String, {
      +          interpret: function(value) {
      +            return value == null ? '' : String(value);
      +          },
      +          specialChar: {
      +            '\b': '\\b',
      +            '\t': '\\t',
      +            '\n': '\\n',
      +            '\f': '\\f',
      +            '\r': '\\r',
      +            '\\': '\\\\'
      +          }
      +        });
      +
      +        '''
      +        expected = r"""Object.extend(String,{interpret:function(value){return value==null?'':String(value);},specialChar:{'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','\\':'\\\\'}});"""
      +        self.assertMinified(js, expected)
      +
      +    def testSingleComment(self):
      +        js = r'''// use native browser JS 1.6 implementation if available
      +        if (Object.isFunction(Array.prototype.forEach))
      +          Array.prototype._each = Array.prototype.forEach;
      +
      +        if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) {
      +
      +        // hey there
      +        function() {// testing comment
      +        foo;
      +        //something something
      +
      +        location = 'http://foo.com;';   // goodbye
      +        }
      +        //bye
      +        '''
      +        expected = r""" 
      +if(Object.isFunction(Array.prototype.forEach))
      +Array.prototype._each=Array.prototype.forEach;if(!Array.prototype.indexOf)Array.prototype.indexOf=function(item,i){ function(){ foo; location='http://foo.com;';}"""
      +        # print expected
      +        self.assertMinified(js, expected)
      +    
      +    def testEmpty(self):
      +        self.assertMinified('', '')
      +        self.assertMinified(' ', '')
      +        self.assertMinified('\n', '')
      +        self.assertMinified('\r\n', '')
      +        self.assertMinified('\t', '')
      +        
      +        
      +    def testMultiComment(self):
      +        js = r"""
      +        function foo() {
      +            print('hey');
      +        }
      +        /*
      +        if(this.options.zindex) {
      +          this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0);
      +          this.element.style.zIndex = this.options.zindex;
      +        }
      +        */
      +        another thing;
      +        """
      +        expected = r"""function foo(){print('hey');}
      +another thing;"""
      +        self.assertMinified(js, expected)
      +    
      +    def testLeadingComment(self):
      +        js = r"""/* here is a comment at the top
      +        
      +        it ends here */
      +        function foo() {
      +            alert('crud');
      +        }
      +        
      +        """
      +        expected = r"""function foo(){alert('crud');}"""
      +        self.assertMinified(js, expected)
      +
      +    def testBlockCommentStartingWithSlash(self):
      +        self.assertMinified('A; /*/ comment */ B', 'A;B')
      +
      +    def testBlockCommentEndingWithSlash(self):
      +        self.assertMinified('A; /* comment /*/ B', 'A;B')
      +
      +    def testLeadingBlockCommentStartingWithSlash(self):
      +        self.assertMinified('/*/ comment */ A', 'A')
      +
      +    def testLeadingBlockCommentEndingWithSlash(self):
      +        self.assertMinified('/* comment /*/ A', 'A')
      +
      +    def testEmptyBlockComment(self):
      +        self.assertMinified('/**/ A', 'A')
      +
      +    def testBlockCommentMultipleOpen(self):
      +        self.assertMinified('/* A /* B */ C', 'C')
      +
      +    def testJustAComment(self):
      +        self.assertMinified('     // a comment', '')
      +
      +    def test_issue_10(self):
      +        js = '''
      +        files = [{name: value.replace(/^.*\\\\/, '')}];
      +        // comment
      +        A
      +        '''
      +        expected = '''files=[{name:value.replace(/^.*\\\\/,'')}]; A'''
      +        self.assertMinified(js, expected)
      +
      +    def testRe(self):
      +        js = r'''  
      +        var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, '');
      +        return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);
      +        });'''
      +        expected = r"""var str=this.replace(/\\./g,'@').replace(/"[^"\\\n\r]*"/g,'');return(/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);});"""
      +        self.assertMinified(js, expected)
      +
      +    def testIgnoreComment(self):
      +        js = r"""
      +        var options_for_droppable = {
      +          overlap:     options.overlap,
      +          containment: options.containment,
      +          tree:        options.tree,
      +          hoverclass:  options.hoverclass,
      +          onHover:     Sortable.onHover
      +        }
      +
      +        var options_for_tree = {
      +          onHover:      Sortable.onEmptyHover,
      +          overlap:      options.overlap,
      +          containment:  options.containment,
      +          hoverclass:   options.hoverclass
      +        }
      +
      +        // fix for gecko engine   
      +        Element.cleanWhitespace(element); 
      +        """
      +        expected = r"""var options_for_droppable={overlap:options.overlap,containment:options.containment,tree:options.tree,hoverclass:options.hoverclass,onHover:Sortable.onHover}
      +var options_for_tree={onHover:Sortable.onEmptyHover,overlap:options.overlap,containment:options.containment,hoverclass:options.hoverclass} 
      +Element.cleanWhitespace(element);"""
      +        self.assertMinified(js, expected)
      +
      +    def testHairyRe(self):
      +        js = r"""
      +        inspect: function(useDoubleQuotes) {
      +          var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) {
      +            var character = String.specialChar[match[0]];
      +            return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16);
      +          });
      +          if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"';
      +          return "'" + escapedString.replace(/'/g, '\\\'') + "'";
      +        },
      +
      +        toJSON: function() {
      +          return this.inspect(true);
      +        },
      +
      +        unfilterJSON: function(filter) {
      +          return this.sub(filter || Prototype.JSONFilter, '#{1}');
      +        },
      +        """
      +        expected = r"""inspect:function(useDoubleQuotes){var escapedString=this.gsub(/[\x00-\x1f\\]/,function(match){var character=String.specialChar[match[0]];return character?character:'\\u00'+match[0].charCodeAt().toPaddedString(2,16);});if(useDoubleQuotes)return'"'+escapedString.replace(/"/g,'\\"')+'"';return"'"+escapedString.replace(/'/g,'\\\'')+"'";},toJSON:function(){return this.inspect(true);},unfilterJSON:function(filter){return this.sub(filter||Prototype.JSONFilter,'#{1}');},"""
      +        self.assertMinified(js, expected)
      +    
      +    def testLiteralRe(self):
      +        js = r"""
      +        myString.replace(/\\/g, '/');
      +        console.log("hi");
      +        """
      +        expected = r"""myString.replace(/\\/g,'/');console.log("hi");"""
      +        self.assertMinified(js, expected)
      +        
      +        js = r''' return /^data:image\//i.test(url) || 
      +        /^(https?|ftp|file|about|chrome|resource):/.test(url);
      +        '''
      +        expected = r'''return /^data:image\//i.test(url)||/^(https?|ftp|file|about|chrome|resource):/.test(url);'''
      +        self.assertMinified(js, expected)
      +        
      +    def testNoBracesWithComment(self):
      +        js = r"""
      +        onSuccess: function(transport) {
      +            var js = transport.responseText.strip();
      +            if (!/^\[.*\]$/.test(js)) // TODO: improve sanity check
      +              throw 'Server returned an invalid collection representation.';
      +            this._collection = eval(js);
      +            this.checkForExternalText();
      +          }.bind(this),
      +          onFailure: this.onFailure
      +        });
      +        """
      +        expected = r"""onSuccess:function(transport){var js=transport.responseText.strip();if(!/^\[.*\]$/.test(js)) 
      +throw'Server returned an invalid collection representation.';this._collection=eval(js);this.checkForExternalText();}.bind(this),onFailure:this.onFailure});"""
      +        self.assertMinified(js, expected)
      +    
      +    def testSpaceInRe(self):
      +        js = r"""
      +        num = num.replace(/ /g,'');
      +        """
      +        self.assertMinified(js, "num=num.replace(/ /g,'');")
      +    
      +    def testEmptyString(self):
      +        js = r'''
      +        function foo('') {
      +        
      +        }
      +        '''
      +        self.assertMinified(js, "function foo(''){}")
      +    
      +    def testDoubleSpace(self):
      +        js = r'''
      +var  foo    =  "hey";
      +        '''
      +        self.assertMinified(js, 'var foo="hey";')
      +    
      +    def testLeadingRegex(self):
      +        js = r'/[d]+/g    '
      +        self.assertMinified(js, js.strip())
      +    
      +    def testLeadingString(self):
      +        js = r"'a string in the middle of nowhere'; // and a comment"
      +        self.assertMinified(js, "'a string in the middle of nowhere';")
      +    
      +    def testSingleCommentEnd(self):
      +        js = r'// a comment\n'
      +        self.assertMinified(js, '')
      +    
      +    def testInputStream(self):
      +        try:
      +            from StringIO import StringIO
      +        except ImportError:
      +            from io import StringIO
      +            
      +        ins = StringIO(r'''
      +            function foo('') {
      +
      +            }
      +            ''')
      +        outs = StringIO()
      +        m = jsmin.JavascriptMinify()
      +        m.minify(ins, outs)
      +        output = outs.getvalue()
      +        assert output == "function foo(''){}"
      +    
      +    def testUnicode(self):
      +        instr = u'\u4000 //foo'
      +        expected = u'\u4000'
      +        output = jsmin.jsmin(instr)
      +        self.assertEqual(output, expected)
      +
      +    def testCommentBeforeEOF(self):
      +        self.assertMinified("//test\r\n", "")
      +    
      +    def testCommentInObj(self):
      +        self.assertMinified("""{ 
      +            a: 1,//comment
      +            }""", "{a:1,}")
      +
      +    def testCommentInObj2(self):
      +        self.assertMinified("{a: 1//comment\r\n}", "{a:1\n}")
      +
      +    def testImplicitSemicolon(self):
      +        # return \n 1  is equivalent with   return; 1
      +        # so best make sure jsmin retains the newline
      +        self.assertMinified("return;//comment\r\na", "return;a")
      +
      +    def testImplicitSemicolon2(self):
      +        self.assertMinified("return//comment...\r\na", "return\na")
      +    
      +    def testSingleComment2(self):
      +        self.assertMinified('x.replace(/\//, "_")// slash to underscore',
      +                'x.replace(/\//,"_")')
      +
      +    def testSlashesNearComments(self):
      +        original = '''
      +        { a: n / 2, }
      +        // comment
      +        '''
      +        expected = '''{a:n/2,}'''
      +        self.assertMinified(original, expected)
      +    
      +    def testReturn(self):
      +        original = '''
      +        return foo;//comment
      +        return bar;'''
      +        expected = 'return foo; return bar;'
      +        self.assertMinified(original, expected)
      +
      +    def test_space_plus(self):
      +        original = '"s" + ++e + "s"'
      +        expected = '"s"+ ++e+"s"'
      +        self.assertMinified(original, expected)
      +
      +    def test_no_final_newline(self):
      +        original = '"s"'
      +        expected = '"s"'
      +        self.assertMinified(original, expected)
      +
      +    def test_space_with_regex_repeats(self):
      +        original = '/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
      +        self.assertMinified(original, original)  # there should be nothing jsmin can do here
      +
      +    def test_space_with_regex_repeats_not_at_start(self):
      +        original = 'aaa;/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");'
      +        self.assertMinified(original, original)  # there should be nothing jsmin can do here
      +
      +    def test_space_in_regex(self):
      +        original = '/a (a)/.test("a")'
      +        self.assertMinified(original, original)
      +
      +    def test_angular_1(self):
      +        original = '''var /** holds major version number for IE or NaN for real browsers */
      +                      msie,
      +                      jqLite,           // delay binding since jQuery could be loaded after us.'''
      +        minified = jsmin.jsmin(original)
      +        self.assertTrue('var msie' in minified)
      +
      +    def test_angular_2(self):
      +        original = 'var/* comment */msie;'
      +        expected = 'var msie;'
      +        self.assertMinified(original, expected)
      +
      +    def test_angular_3(self):
      +        original = 'var /* comment */msie;'
      +        expected = 'var msie;'
      +        self.assertMinified(original, expected)
      +
      +    def test_angular_4(self):
      +        original = 'var /* comment */ msie;'
      +        expected = 'var msie;'
      +        self.assertMinified(original, expected)
      +
      +    def test_angular_5(self):
      +        original = 'a/b'
      +        self.assertMinified(original, original)
      +
      +    def testBackticks(self):
      +        original = '`test`'
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +        original = '` test with leading whitespace`'
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +        original = '`test with trailing whitespace `'
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +        original = '''`test
      +with a new line`'''
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +        original = '''dumpAvStats: function(stats) {
      +        var statsString = "";
      +        if (stats.mozAvSyncDelay) {
      +          statsString += `A/V sync: ${stats.mozAvSyncDelay} ms `;
      +        }
      +        if (stats.mozJitterBufferDelay) {
      +          statsString += `Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;
      +        }
      +
      +        return React.DOM.div(null, statsString);'''
      +        expected = 'dumpAvStats:function(stats){var statsString="";if(stats.mozAvSyncDelay){statsString+=`A/V sync: ${stats.mozAvSyncDelay} ms `;}\nif(stats.mozJitterBufferDelay){statsString+=`Jitter-buffer delay: ${stats.mozJitterBufferDelay} ms`;}\nreturn React.DOM.div(null,statsString);'
      +        self.assertMinified(original, expected, quote_chars="'\"`")
      +
      +    def testBackticksExpressions(self):
      +        original = '`Fifteen is ${a + b} and not ${2 * a + b}.`'
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +        original = '''`Fifteen is ${a +
      +b} and not ${2 * a + "b"}.`'''
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +    def testBackticksTagged(self):
      +        original = 'tag`Hello ${ a + b } world ${ a * b}`;'
      +        self.assertMinified(original, original, quote_chars="'\"`")
      +
      +if __name__ == '__main__':
      +    unittest.main()
      diff --git a/python/jsmin/setup.cfg b/python/jsmin/setup.cfg
      new file mode 100644
      index 000000000..861a9f554
      --- /dev/null
      +++ b/python/jsmin/setup.cfg
      @@ -0,0 +1,5 @@
      +[egg_info]
      +tag_build = 
      +tag_date = 0
      +tag_svn_revision = 0
      +
      diff --git a/python/jsmin/setup.py b/python/jsmin/setup.py
      new file mode 100644
      index 000000000..8fff56602
      --- /dev/null
      +++ b/python/jsmin/setup.py
      @@ -0,0 +1,42 @@
      +from setuptools import setup
      +
      +import os, sys, re
      +
      +os.environ['COPYFILE_DISABLE'] = 'true'  # this disables including resource forks in tar files on os x
      +
      +
      +extra = {}
      +if sys.version_info >= (3,0):
      +    extra['use_2to3'] = True
      +
      +setup(
      +    name="jsmin",
      +    version=re.search(r'__version__ = ["\']([^"\']+)', open('jsmin/__init__.py').read()).group(1),
      +    packages=['jsmin'],
      +    description='JavaScript minifier.\nPLEASE UPDATE TO VERSION >= 2.0.6. Older versions have a serious bug related to comments.',
      +    long_description=open('README.rst').read(),
      +    author='Dave St.Germain',
      +    author_email='dave@st.germa.in',
      +    maintainer='Tikitu de Jager',
      +    maintainer_email='tikitu+jsmin@logophile.org',
      +    test_suite='jsmin.test.JsTests',
      +    license='MIT License',
      +    url='https://bitbucket.org/dcs/jsmin/',
      +    classifiers=[
      +        'Development Status :: 5 - Production/Stable',
      +        'Environment :: Web Environment',
      +        'Intended Audience :: Developers',
      +        'License :: OSI Approved :: MIT License',
      +        'Operating System :: OS Independent',
      +        'Programming Language :: Python :: 2',
      +        'Programming Language :: Python :: 2.6',
      +        'Programming Language :: Python :: 2.7',
      +        'Programming Language :: Python :: 3',
      +        'Programming Language :: Python :: 3.2',
      +        'Programming Language :: Python :: 3.3',
      +        'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
      +        'Topic :: Software Development :: Pre-processors',
      +        'Topic :: Text Processing :: Filters',
      +    ],
      +    **extra
      +)
      diff --git a/python/lldbutils/README.txt b/python/lldbutils/README.txt
      new file mode 100644
      index 000000000..a8db723f1
      --- /dev/null
      +++ b/python/lldbutils/README.txt
      @@ -0,0 +1,221 @@
      +lldb debugging functionality for Gecko
      +======================================
      +
      +This directory contains a module, lldbutils, which is imported by the
      +in-tree .lldbinit file.  The lldbutil modules define some lldb commands
      +that are handy for debugging Gecko.
      +
      +If you want to add a new command or Python-implemented type summary, either add
      +it to one of the existing broad area Python files (such as lldbutils/layout.py
      +for layout-related commands) or create a new file if none of the existing files
      +is appropriate.  If you add a new file, make sure you add it to __all__ in
      +lldbutils/__init__.py.
      +
      +
      +Supported commands
      +------------------
      +
      +Most commands below that can take a pointer to an object also support being
      +called with a smart pointer like nsRefPtr or nsCOMPtr.
      +
      +
      +* frametree EXPR, ft EXPR
      +  frametreelimited EXPR, ftl EXPR
      +
      +  Shows information about a frame tree.  EXPR is an expression that
      +  is evaluated, and must be an nsIFrame*.  frametree displays the
      +  entire frame tree that contains the given frame.  frametreelimited
      +  displays a subtree of the frame tree rooted at the given frame.
      +
      +  (lldb) p this
      +  (nsBlockFrame *) $4 = 0x000000011687fcb8
      +  (lldb) ftl this
      +  Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
      +    line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} vis-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
      +      Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
      +    >
      +  >
      +  (lldb) ft this
      +  Viewport(-1)@0x116017430 [view=0x115efe190] {0,0,60,60} [state=000b063000002623] [sc=0x1160170f8:-moz-viewport]<
      +    HTMLScroll(html)(-1)@0x1160180d0 {0,0,0,0} [state=000b020000000403] [content=0x115e4d640] [sc=0x116017768:-moz-viewport-scroll]<
      +      ...
      +      Canvas(html)(-1)@0x116017e08 {0,0,60,60} vis-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b002000000601] [content=0x115e4d640] [sc=0x11687e0f8:-moz-scrolled-canvas]<
      +        Block(html)(-1)@0x11687e578 {0,0,60,2196} vis-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b100000d00601] [content=0x115e4d640] [sc=0x11687e4b8,parent=0x0]<
      +          line 0x11687ec48: count=1 state=block,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x48] bm=480 {480,480,0,1236} vis-overflow=360,426,7980,1410 scr-overflow=480,480,7740,1236 <
      +            Block(body)(1)@0x11687ebb0 {480,480,0,1236} vis-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 [state=000b120000100601] [content=0x115ed8980] [sc=0x11687e990]<
      +              line 0x116899170: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x0] {0,0,7740,1236} vis-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 <
      +                nsTextControlFrame@0x11687f068 {0,66,7740,1170} vis-overflow=-120,-120,7980,1410 scr-overflow=0,0,7740,1170 [state=0002000000004621] [content=0x115ca2c50] [sc=0x11687ea40]<
      +                  HTMLScroll(div)(-1)@0x11687f6b0 {180,240,7380,690} [state=0002000000084409] [content=0x11688c0c0] [sc=0x11687eb00]<
      +                    Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
      +                      line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} vis-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
      +                        Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
      + ...
      +
      +
      +* js
      +
      +  Dumps the current JS stack.
      +
      +  (lldb) js
      +  0 anonymous(aForce = false) ["chrome://browser/content/browser.js":13414]
      +      this = [object Object]
      +  1 updateAppearance() ["chrome://browser/content/browser.js":13326]
      +      this = [object Object]
      +  2 handleEvent(aEvent = [object Event]) ["chrome://browser/content/tabbrowser.xml":3811]
      +      this = [object XULElement]
      +
      +
      +* prefcnt EXPR
      +
      +  Shows the refcount of a given object.  EXPR is an expression that is
      +  evaluated, and can be either a pointer to or an actual refcounted
      +  object.  The object can be a standard nsISupports-like refcounted
      +  object, a cycle-collected object or a mozilla::RefCounted object.
      +
      +  (lldb) p this
      +  (nsHTMLDocument *) $1 = 0x0000000116e9d800
      +  (lldb) prefcnt this
      +  20
      +  (lldb) p mDocumentURI
      +  (nsCOMPtr) $3 = {
      +    mRawPtr = 0x0000000117163e50
      +  }
      +  (lldb) prefcnt mDocumentURI
      +  11
      +
      +
      +* pstate EXPR
      +
      +  Shows the frame state bits (using their symbolic names) of a given frame.
      +  EXPR is an expression that is evaluated, and must be an nsIFrame*.
      +
      +  (lldb) p this
      +  (nsTextFrame *) $1 = 0x000000011f470b10
      +  (lldb) p/x mState
      +  (nsFrameState) $2 = 0x0000004080604000
      +  (lldb) pstate this
      +  TEXT_HAS_NONCOLLAPSED_CHARACTERS | TEXT_END_OF_LINE | TEXT_START_OF_LINE | NS_FRAME_PAINTED_THEBES | NS_FRAME_INDEPENDENT_SELECTION
      +
      +
      +* ptag EXPR
      +
      +  Shows the DOM tag name of a node.  EXPR is an expression that is
      +  evaluated, and can be either an nsINode pointer or a concrete DOM
      +  object.
      +
      +  (lldb) p this
      +  (nsHTMLDocument *) $0 = 0x0000000116e9d800
      +  (lldb) ptag this
      +  (PermanentAtomImpl *) $1 = 0x0000000110133ac0 u"#document"
      +  (lldb) p this->GetRootElement()
      +  (mozilla::dom::HTMLSharedElement *) $2 = 0x0000000118429780
      +  (lldb) ptag $2
      +  (PermanentAtomImpl *) $3 = 0x0000000110123b80 u"html"
      +
      +
      +Supported type summaries and synthetic children
      +-----------------------------------------------
      +
      +In lldb terminology, type summaries are rules for how to display a value
      +when using the "expression" command (or its familiar-to-gdb-users "p" alias),
      +and synthetic children are fake member variables or array elements also
      +added by custom rules.
      +
      +For objects that do have synthetic children defined for them, like nsTArray,
      +the "expr -R -- EXPR" command can be used to show its actual member variables.
      +
      +
      +* nsAString, nsACString,
      +  nsFixedString, nsFixedCString,
      +  nsAutoString, nsAutoCString
      +
      +  Strings have a type summary that shows the actual string.
      +
      +  (lldb) frame info
      +  frame #0: 0x000000010400cfea XUL`nsCSSParser::ParseProperty(this=0x00007fff5fbf5248, aPropID=eCSSProperty_margin_top, aPropValue=0x00007fff5fbf53f8, aSheetURI=0x0000000115ae8c00, aBaseURI=0x0000000115ae8c00, aSheetPrincipal=0x000000010ff9e040, aDeclaration=0x00000001826fd580, aChanged=0x00007fff5fbf5247, aIsImportant=false, aIsSVGMode=false) + 74 at nsCSSParser.cpp:12851
      +  (lldb) p aPropValue
      +  (const nsAString_internal) $16 = u"-25px"
      +
      +  (lldb) p this
      +  (nsHTMLDocument *) $18 = 0x0000000115b56000
      +  (lldb) p mContentType
      +  (nsCString) $19 = {
      +    nsACString_internal = "text/html"
      +  }
      +
      +* nscolor
      +
      +  nscolors (32-bit RGBA colors) have a type summary that shows the color as
      +  one of the CSS 2.1 color keywords, a six digit hex color, an rgba() color,
      +  or the "transparent" keyword.
      +
      +  (lldb) p this
      +  (nsTextFrame *) $0 = 0x00000001168245e0
      +  (lldb) p *this->StyleColor()
      +  (const nsStyleColor) $1 = {
      +    mColor = lime
      +  }
      +  (lldb) expr -R -- *this->StyleColor()
      +  (const nsStyleColor) $2 = {
      +    mColor = 4278255360
      +  }
      +
      +* nsIAtom
      +
      +  Atoms have a type summary that shows the string value inside the atom.
      +
      +  (lldb) frame info
      +  frame #0: 0x00000001028b8c49 XUL`mozilla::dom::Element::GetBoolAttr(this=0x0000000115ca1c50, aAttr=0x000000011012a640) const + 25 at Element.h:907
      +  (lldb) p aAttr
      +  (PermanentAtomImpl *) $1 = 0x000000011012a640 u"readonly"
      +
      +* nsTArray and friends
      +
      +  nsTArrays and their auto and fallible varieties have synthetic children
      +  for their elements.  This means when displaying them with "expr" (or "p"),
      +  they will be shown like regular arrays, rather than showing the mHdr and
      +  other fields.
      +
      +  (lldb) frame info
      +  frame #0: 0x00000001043eb8a8 XUL`SVGTextFrame::DoGlyphPositioning(this=0x000000012f3f8778) + 248 at SVGTextFrame.cpp:4940
      +  (lldb) p charPositions
      +  (nsTArray) $5 = {
      +    [0] = {
      +      mozilla::gfx::BasePoint = {
      +        x = 0
      +        y = 816
      +      }
      +    }
      +    [1] = {
      +      mozilla::gfx::BasePoint = {
      +        x = 426
      +        y = 816
      +      }
      +    }
      +    [2] = {
      +      mozilla::gfx::BasePoint = {
      +        x = 906
      +        y = 816
      +      }
      +    }
      +  }
      +  (lldb) expr -R -- charPositions
      +  (nsTArray) $4 = {
      +    nsTArray_Impl = {
      +      nsTArray_base = {
      +        mHdr = 0x000000012f3f1b80
      +      }
      +    }
      +  }
      +
      +* nsTextNode, nsTextFragment
      +
      +  Text nodes have a type summary that shows the nsTextFragment in the
      +  nsTextNode, which itself has a type summary that shows the text
      +  content.
      +
      +  (lldb) p this
      +  (nsTextFrame *) $14 = 0x000000011811bb10
      +  (lldb) p mContent
      +  (nsTextNode *) $15 = 0x0000000118130110 "Search or enter address"
      +
      diff --git a/python/lldbutils/lldbutils/__init__.py b/python/lldbutils/lldbutils/__init__.py
      new file mode 100644
      index 000000000..f27fa7297
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/__init__.py
      @@ -0,0 +1,13 @@
      +import lldb
      +
      +__all__ = ['content', 'general', 'gfx', 'layout', 'utils']
      +
      +def init():
      +    for name in __all__:
      +        init = None
      +        try:
      +            init = __import__('lldbutils.' + name, globals(), locals(), ['init']).init
      +        except AttributeError:
      +            pass
      +        if init:
      +            init(lldb.debugger)
      diff --git a/python/lldbutils/lldbutils/content.py b/python/lldbutils/lldbutils/content.py
      new file mode 100644
      index 000000000..93199001b
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/content.py
      @@ -0,0 +1,21 @@
      +import lldb
      +from lldbutils import utils
      +
      +def summarize_text_fragment(valobj, internal_dict):
      +    content_union = valobj.GetChildAtIndex(0)
      +    state_union = valobj.GetChildAtIndex(1).GetChildMemberWithName("mState")
      +    length = state_union.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
      +    if state_union.GetChildMemberWithName("mIs2b").GetValueAsUnsigned(0):
      +        field = "m2b"
      +    else:
      +        field = "m1b"
      +    ptr = content_union.GetChildMemberWithName(field)
      +    return utils.format_string(ptr, length)
      +
      +def ptag(debugger, command, result, dict):
      +    """Displays the tag name of a content node."""
      +    debugger.HandleCommand("expr (" + command + ")->mNodeInfo.mRawPtr->mInner.mName")
      +
      +def init(debugger):
      +    debugger.HandleCommand("type summary add nsTextFragment -F lldbutils.content.summarize_text_fragment")
      +    debugger.HandleCommand("command script add -f lldbutils.content.ptag ptag")
      diff --git a/python/lldbutils/lldbutils/general.py b/python/lldbutils/lldbutils/general.py
      new file mode 100644
      index 000000000..27cf19aab
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/general.py
      @@ -0,0 +1,105 @@
      +import lldb
      +from lldbutils import utils
      +
      +def summarize_string(valobj, internal_dict):
      +    data = valobj.GetChildMemberWithName("mData")
      +    length = valobj.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
      +    return utils.format_string(data, length)
      +
      +class TArraySyntheticChildrenProvider:
      +    def __init__(self, valobj, internal_dict):
      +        self.valobj = valobj
      +        self.header = self.valobj.GetChildMemberWithName("mHdr")
      +        self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
      +        self.element_size = self.element_type.GetByteSize()
      +        header_size = self.header.GetType().GetPointeeType().GetByteSize()
      +        self.element_base_addr = self.header.GetValueAsUnsigned(0) + header_size
      +
      +    def num_children(self):
      +        return self.header.Dereference().GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
      +
      +    def get_child_index(self, name):
      +        try:
      +            index = int(name)
      +            if index >= self.num_children():
      +                return None
      +        except:
      +            pass
      +        return None
      +
      +    def get_child_at_index(self, index):
      +        if index >= self.num_children():
      +            return None
      +        addr = self.element_base_addr + index * self.element_size
      +        return self.valobj.CreateValueFromAddress("[%d]" % index, addr, self.element_type)
      +
      +def prefcnt(debugger, command, result, dict):
      +    """Displays the refcount of an object."""
      +    # We handled regular nsISupports-like refcounted objects and cycle collected
      +    # objects.
      +    target = debugger.GetSelectedTarget()
      +    process = target.GetProcess()
      +    thread = process.GetSelectedThread()
      +    frame = thread.GetSelectedFrame()
      +    obj = frame.EvaluateExpression(command)
      +    if obj.GetError().Fail():
      +        print "could not evaluate expression"
      +        return
      +    obj = utils.dereference(obj)
      +    field = obj.GetChildMemberWithName("mRefCnt")
      +    if field.GetError().Fail():
      +        field = obj.GetChildMemberWithName("refCnt")
      +    if field.GetError().Fail():
      +        print "not a refcounted object"
      +        return
      +    refcnt_type = field.GetType().GetCanonicalType().GetName()
      +    if refcnt_type == "nsAutoRefCnt":
      +        print field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
      +    elif refcnt_type == "nsCycleCollectingAutoRefCnt":
      +        print field.GetChildMemberWithName("mRefCntAndFlags").GetValueAsUnsigned(0) >> 2
      +    elif refcnt_type == "mozilla::ThreadSafeAutoRefCnt":
      +        print field.GetChildMemberWithName("mValue").GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
      +    elif refcnt_type == "int":  # non-atomic mozilla::RefCounted object
      +        print field.GetValueAsUnsigned(0)
      +    elif refcnt_type == "mozilla::Atomic":  # atomic mozilla::RefCounted object
      +        print field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0)
      +    else:
      +        print "unknown mRefCnt type " + refcnt_type
      +
      +# Used to work around http://llvm.org/bugs/show_bug.cgi?id=22211
      +def callfunc(debugger, command, result, dict):
      +    """Calls a function for which debugger information is unavailable by getting its address from the symbol table.
      +       The function is assumed to return void."""
      +
      +    if '(' not in command:
      +        print 'Usage: callfunc your_function(args)'
      +        return
      +
      +    command_parts = command.split('(')
      +    funcname = command_parts[0].strip()
      +    args = command_parts[1]
      +
      +    target = debugger.GetSelectedTarget()
      +    symbols = target.FindFunctions(funcname).symbols
      +    if not symbols:
      +        print 'Could not find a function symbol for a function called "%s"' % funcname
      +        return
      +
      +    sym = symbols[0]
      +    arg_types = '()'
      +    if sym.name and sym.name.startswith(funcname + '('):
      +        arg_types = sym.name[len(funcname):]
      +    debugger.HandleCommand('print ((void(*)%s)0x%0x)(%s' % (arg_types, sym.addr.GetLoadAddress(target), args))
      +
      +def init(debugger):
      +    debugger.HandleCommand("type summary add nsAString_internal -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type summary add nsACString_internal -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type summary add nsFixedString -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type summary add nsFixedCString -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type summary add nsAutoString -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type summary add nsAutoCString -F lldbutils.general.summarize_string")
      +    debugger.HandleCommand("type synthetic add -x \"nsTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
      +    debugger.HandleCommand("type synthetic add -x \"AutoTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
      +    debugger.HandleCommand("type synthetic add -x \"FallibleTArray<\" -l lldbutils.general.TArraySyntheticChildrenProvider")
      +    debugger.HandleCommand("command script add -f lldbutils.general.prefcnt -f lldbutils.general.prefcnt prefcnt")
      +    debugger.HandleCommand("command script add -f lldbutils.general.callfunc -f lldbutils.general.callfunc callfunc")
      diff --git a/python/lldbutils/lldbutils/gfx.py b/python/lldbutils/lldbutils/gfx.py
      new file mode 100644
      index 000000000..1ad9a37a7
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/gfx.py
      @@ -0,0 +1,130 @@
      +import lldb
      +
      +def summarize_nscolor(valobj, internal_dict):
      +    colors = {
      +        "#800000": "maroon",
      +        "#ff0000": "red",
      +        "#ffa500": "orange",
      +        "#ffff00": "yellow",
      +        "#808000": "olive",
      +        "#800080": "purple",
      +        "#ff00ff": "fuchsia",
      +        "#ffffff": "white",
      +        "#00ff00": "lime",
      +        "#008000": "green",
      +        "#000080": "navy",
      +        "#0000ff": "blue",
      +        "#00ffff": "aqua",
      +        "#008080": "teal",
      +        "#000000": "black",
      +        "#c0c0c0": "silver",
      +        "#808080": "gray"
      +    }
      +    value = valobj.GetValueAsUnsigned(0)
      +    if value == 0:
      +        return "transparent"
      +    if value & 0xff000000 != 0xff000000:
      +        return "rgba(%d, %d, %d, %f)" % (value & 0xff, (value >> 8) & 0xff, (value >> 16) & 0xff, ((value >> 24) & 0xff) / 255.0)
      +    color = "#%02x%02x%02x" % (value & 0xff, (value >> 8) & 0xff, (value >> 16) & 0xff)
      +    if color in colors:
      +        return colors[color]
      +    return color
      +
      +class RegionSyntheticChildrenProvider:
      +
      +    def __init__(self, valobj, internal_dict, rect_type = "nsRect"):
      +        self.rect_type = rect_type
      +        self.valobj = valobj
      +        self.pixman_region = self.valobj.GetChildMemberWithName("mImpl")
      +        self.pixman_data = self.pixman_region.GetChildMemberWithName("data")
      +        self.pixman_extents = self.pixman_region.GetChildMemberWithName("extents")
      +        self.num_rects = self.pixman_region_num_rects()
      +        self.box_type = self.pixman_extents.GetType()
      +        self.box_type_size = self.box_type.GetByteSize()
      +        self.box_list_base_ptr = self.pixman_data.GetValueAsUnsigned(0) + self.pixman_data.GetType().GetPointeeType().GetByteSize()
      +
      +    def pixman_region_num_rects(self):
      +        if self.pixman_data.GetValueAsUnsigned(0):
      +            return self.pixman_data.Dereference().GetChildMemberWithName("numRects").GetValueAsUnsigned(0)
      +        return 1
      +
      +    def num_children(self):
      +        return 2 + self.num_rects
      +
      +    def get_child_index(self, name):
      +        if name == "numRects":
      +            return 0
      +        if name == "bounds":
      +            return 1
      +        return None
      +
      +    def convert_pixman_box_to_rect(self, valobj, name):
      +        x1 = valobj.GetChildMemberWithName("x1").GetValueAsSigned()
      +        x2 = valobj.GetChildMemberWithName("x2").GetValueAsSigned()
      +        y1 = valobj.GetChildMemberWithName("y1").GetValueAsSigned()
      +        y2 = valobj.GetChildMemberWithName("y2").GetValueAsSigned()
      +        return valobj.CreateValueFromExpression(name,
      +            '%s(%d, %d, %d, %d)' % (self.rect_type, x1, y1, x2 - x1, y2 - y1))
      +
      +    def get_child_at_index(self, index):
      +        if index == 0:
      +            return self.pixman_data.CreateValueFromExpression('numRects', '(uint32_t)%d' % self.num_rects)
      +        if index == 1:
      +            return self.convert_pixman_box_to_rect(self.pixman_extents, 'bounds')
      +
      +        rect_index = index - 2
      +        if rect_index >= self.num_rects:
      +            return None
      +        if self.num_rects == 1:
      +            return self.convert_pixman_box_to_rect(self.pixman_extents, 'bounds')
      +        box_address = self.box_list_base_ptr + rect_index * self.box_type_size
      +        box = self.pixman_data.CreateValueFromAddress('', box_address, self.box_type)
      +        return self.convert_pixman_box_to_rect(box, "[%d]" % rect_index)
      +
      +class IntRegionSyntheticChildrenProvider:
      +    def __init__(self, valobj, internal_dict):
      +        wrapped_region = valobj.GetChildMemberWithName("mImpl")
      +        self.wrapped_provider = RegionSyntheticChildrenProvider(wrapped_region, internal_dict, "nsIntRect")
      +
      +    def num_children(self):
      +        return self.wrapped_provider.num_children()
      +
      +    def get_child_index(self, name):
      +        return self.wrapped_provider.get_child_index(name)
      +
      +    def get_child_at_index(self, index):
      +        return self.wrapped_provider.get_child_at_index(index)
      +
      +def summarize_rect(valobj, internal_dict):
      +    x = valobj.GetChildMemberWithName("x").GetValue()
      +    y = valobj.GetChildMemberWithName("y").GetValue()
      +    width = valobj.GetChildMemberWithName("width").GetValue()
      +    height = valobj.GetChildMemberWithName("height").GetValue()
      +    return "%s, %s, %s, %s" % (x, y, width, height)
      +
      +def rect_is_empty(valobj):
      +    width = valobj.GetChildMemberWithName("width").GetValueAsSigned()
      +    height = valobj.GetChildMemberWithName("height").GetValueAsSigned()
      +    return width <= 0 or height <= 0
      +
      +def summarize_region(valobj, internal_dict):
      +    # This function makes use of the synthetic children generated for ns(Int)Regions.
      +    bounds = valobj.GetChildMemberWithName("bounds")
      +    bounds_summary = summarize_rect(bounds, internal_dict)
      +    num_rects = valobj.GetChildMemberWithName("numRects").GetValueAsUnsigned(0)
      +    if num_rects <= 1:
      +        if rect_is_empty(bounds):
      +            return "empty"
      +        if num_rects == 1:
      +            return "one rect: " + bounds_summary
      +    return str(num_rects) + " rects, bounds: " + bounds_summary
      +
      +def init(debugger):
      +    debugger.HandleCommand("type summary add nscolor -v -F lldbutils.gfx.summarize_nscolor")
      +    debugger.HandleCommand("type summary add nsRect -v -F lldbutils.gfx.summarize_rect")
      +    debugger.HandleCommand("type summary add nsIntRect -v -F lldbutils.gfx.summarize_rect")
      +    debugger.HandleCommand("type summary add gfxRect -v -F lldbutils.gfx.summarize_rect")
      +    debugger.HandleCommand("type synthetic add nsRegion -l lldbutils.gfx.RegionSyntheticChildrenProvider")
      +    debugger.HandleCommand("type synthetic add nsIntRegion -l lldbutils.gfx.IntRegionSyntheticChildrenProvider")
      +    debugger.HandleCommand("type summary add nsRegion -v -F lldbutils.gfx.summarize_region")
      +    debugger.HandleCommand("type summary add nsIntRegion -v -F lldbutils.gfx.summarize_region")
      diff --git a/python/lldbutils/lldbutils/layout.py b/python/lldbutils/lldbutils/layout.py
      new file mode 100644
      index 000000000..a4894699c
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/layout.py
      @@ -0,0 +1,20 @@
      +import lldb
      +
      +def frametree(debugger, command, result, dict):
      +    """Dumps the frame tree containing the given nsIFrame*."""
      +    debugger.HandleCommand('expr (' + command + ')->DumpFrameTree()')
      +
      +def frametreelimited(debugger, command, result, dict):
      +    """Dumps the subtree of a frame tree rooted at the given nsIFrame*."""
      +    debugger.HandleCommand('expr (' + command + ')->DumpFrameTreeLimited()')
      +
      +def pstate(debugger, command, result, dict):
      +    """Displays a frame's state bits symbolically."""
      +    debugger.HandleCommand('expr mozilla::PrintFrameState(' + command + ')')
      +
      +def init(debugger):
      +    debugger.HandleCommand('command script add -f lldbutils.layout.frametree frametree')
      +    debugger.HandleCommand('command script add -f lldbutils.layout.frametreelimited frametreelimited')
      +    debugger.HandleCommand('command alias ft frametree')
      +    debugger.HandleCommand('command alias ftl frametreelimited')
      +    debugger.HandleCommand('command script add -f lldbutils.layout.pstate pstate');
      diff --git a/python/lldbutils/lldbutils/utils.py b/python/lldbutils/lldbutils/utils.py
      new file mode 100644
      index 000000000..4e038f630
      --- /dev/null
      +++ b/python/lldbutils/lldbutils/utils.py
      @@ -0,0 +1,70 @@
      +def format_char(c):
      +    if c == 0:
      +        return "\\0"
      +    elif c == 0x07:
      +        return "\\a"
      +    elif c == 0x08:
      +        return "\\b"
      +    elif c == 0x0c:
      +        return "\\f"
      +    elif c == 0x0a:
      +        return "\\n"
      +    elif c == 0x0d:
      +        return "\\r"
      +    elif c == 0x09:
      +        return "\\t"
      +    elif c == 0x0b:
      +        return "\\v"
      +    elif c == 0x5c:
      +        return "\\"
      +    elif c == 0x22:
      +        return "\\\""
      +    elif c == 0x27:
      +        return "\\'"
      +    elif c < 0x20 or c >= 0x80 and c <= 0xff:
      +        return "\\x%02x" % c
      +    elif c >= 0x0100:
      +        return "\\u%04x" % c
      +    else:
      +        return chr(c)
      +
      +# Take an SBValue that is either a char* or char16_t* and formats it like lldb
      +# would when printing it.
      +def format_string(lldb_value, length=100):
      +    ptr = lldb_value.GetValueAsUnsigned(0)
      +    char_type = lldb_value.GetType().GetPointeeType()
      +    if char_type.GetByteSize() == 1:
      +        s = "\""
      +        size = 1
      +        mask = 0xff
      +    elif char_type.GetByteSize() == 2:
      +        s = "u\""
      +        size = 2
      +        mask = 0xffff
      +    else:
      +        return "(cannot format string with char type %s)" % char_type.GetName()
      +    i = 0
      +    terminated = False
      +    while i < length:
      +        c = lldb_value.CreateValueFromAddress("x", ptr + i * size, char_type).GetValueAsUnsigned(0) & mask
      +        if c == 0:
      +            terminated = True
      +            break
      +        s += format_char(c)
      +        i = i + 1
      +    s += "\""
      +    if not terminated and i != length:
      +        s += "..."
      +    return s
      +
      +# Dereferences a raw pointer, nsCOMPtr, RefPtr, nsAutoPtr, already_AddRefed or
      +# mozilla::RefPtr; otherwise returns the value unchanged.
      +def dereference(lldb_value):
      +    if lldb_value.TypeIsPointerType():
      +        return lldb_value.Dereference()
      +    name = lldb_value.GetType().GetUnqualifiedType().GetName()
      +    if name.startswith("nsCOMPtr<") or name.startswith("RefPtr<") or name.startswith("nsAutoPtr<") or name.startswith("already_AddRefed<"):
      +        return lldb_value.GetChildMemberWithName("mRawPtr")
      +    if name.startswith("mozilla::RefPtr<"):
      +        return lldb_value.GetChildMemberWithName("ptr")
      +    return lldb_value
      diff --git a/python/mach/README.rst b/python/mach/README.rst
      new file mode 100644
      index 000000000..7c2e00bec
      --- /dev/null
      +++ b/python/mach/README.rst
      @@ -0,0 +1,13 @@
      +====
      +mach
      +====
      +
      +Mach (German for *do*) is a generic command dispatcher for the command
      +line.
      +
      +To use mach, you install the mach core (a Python package), create an
      +executable *driver* script (named whatever you want), and write mach
      +commands. When the *driver* is executed, mach dispatches to the
      +requested command handler automatically.
      +
      +To learn more, read the docs in ``docs/``.
      diff --git a/python/mach/bash-completion.sh b/python/mach/bash-completion.sh
      new file mode 100644
      index 000000000..e4b151f24
      --- /dev/null
      +++ b/python/mach/bash-completion.sh
      @@ -0,0 +1,29 @@
      +function _mach()
      +{
      +  local cur cmds c subcommand
      +  COMPREPLY=()
      +
      +  # Load the list of commands
      +  cmds=`"${COMP_WORDS[0]}" mach-commands`
      +
      +  # Look for the subcommand.
      +  cur="${COMP_WORDS[COMP_CWORD]}"
      +  subcommand=""
      +  c=1
      +  while [ $c -lt $COMP_CWORD ]; do
      +    word="${COMP_WORDS[c]}"
      +    for cmd in $cmds; do
      +      if [ "$cmd" = "$word" ]; then
      +        subcommand="$word"
      +      fi
      +    done
      +    c=$((++c))
      +  done
      +
      +  if [[ "$subcommand" == "help" || -z "$subcommand" ]]; then
      +      COMPREPLY=( $(compgen -W "$cmds" -- ${cur}) )
      +  fi
      +
      +  return 0
      +}
      +complete -o default -F _mach mach
      diff --git a/python/mach/docs/commands.rst b/python/mach/docs/commands.rst
      new file mode 100644
      index 000000000..af2973dd7
      --- /dev/null
      +++ b/python/mach/docs/commands.rst
      @@ -0,0 +1,145 @@
      +.. _mach_commands:
      +
      +=====================
      +Implementing Commands
      +=====================
      +
      +Mach commands are defined via Python decorators.
      +
      +All the relevant decorators are defined in the *mach.decorators* module.
      +The important decorators are as follows:
      +
      +:py:func:`CommandProvider `
      +  A class decorator that denotes that a class contains mach
      +  commands. The decorator takes no arguments.
      +
      +:py:func:`Command `
      +  A method decorator that denotes that the method should be called when
      +  the specified command is requested. The decorator takes a command name
      +  as its first argument and a number of additional arguments to
      +  configure the behavior of the command.
      +
      +:py:func:`CommandArgument `
      +  A method decorator that defines an argument to the command. Its
      +  arguments are essentially proxied to ArgumentParser.add_argument()
      +
      +:py:func:`SubCommand `
      +  A method decorator that denotes that the method should be a
      +  sub-command to an existing ``@Command``. The decorator takes the
      +  parent command name as its first argument and the sub-command name
      +  as its second argument.
      +
      +  ``@CommandArgument`` can be used on ``@SubCommand`` instances just
      +  like they can on ``@Command`` instances.
      +
      +Classes with the ``@CommandProvider`` decorator **must** have an
      +``__init__`` method that accepts 1 or 2 arguments. If it accepts 2
      +arguments, the 2nd argument will be a
      +:py:class:`mach.base.CommandContext` instance.
      +
      +Here is a complete example:
      +
      +.. code-block:: python
      +
      +   from mach.decorators import (
      +       CommandArgument,
      +       CommandProvider,
      +       Command,
      +   )
      +
      +   @CommandProvider
      +   class MyClass(object):
      +       @Command('doit', help='Do ALL OF THE THINGS.')
      +       @CommandArgument('--force', '-f', action='store_true',
      +           help='Force doing it.')
      +       def doit(self, force=False):
      +           # Do stuff here.
      +
      +When the module is loaded, the decorators tell mach about all handlers.
      +When mach runs, it takes the assembled metadata from these handlers and
      +hooks it up to the command line driver. Under the hood, arguments passed
      +to the decorators are being used to help mach parse command arguments,
      +formulate arguments to the methods, etc. See the documentation in the
      +:py:mod:`mach.base` module for more.
      +
      +The Python modules defining mach commands do not need to live inside the
      +main mach source tree.
      +
      +Conditionally Filtering Commands
      +================================
      +
      +Sometimes it might only make sense to run a command given a certain
      +context. For example, running tests only makes sense if the product
      +they are testing has been built, and said build is available. To make
      +sure a command is only runnable from within a correct context, you can
      +define a series of conditions on the
      +:py:func:`Command ` decorator.
      +
      +A condition is simply a function that takes an instance of the
      +:py:func:`mach.decorators.CommandProvider` class as an argument, and
      +returns ``True`` or ``False``. If any of the conditions defined on a
      +command return ``False``, the command will not be runnable. The
      +docstring of a condition function is used in error messages, to explain
      +why the command cannot currently be run.
      +
      +Here is an example:
      +
      +.. code-block:: python
      +
      +   from mach.decorators import (
      +       CommandProvider,
      +       Command,
      +   )
      +
      +   def build_available(cls):
      +       """The build needs to be available."""
      +       return cls.build_path is not None
      +
      +    @CommandProvider
      +   class MyClass(MachCommandBase):
      +       def __init__(self, build_path=None):
      +           self.build_path = build_path
      +
      +       @Command('run_tests', conditions=[build_available])
      +       def run_tests(self):
      +           # Do stuff here.
      +
      +It is important to make sure that any state needed by the condition is
      +available to instances of the command provider.
      +
      +By default all commands without any conditions applied will be runnable,
      +but it is possible to change this behaviour by setting
      +``require_conditions`` to ``True``:
      +
      +.. code-block:: python
      +
      +   m = mach.main.Mach()
      +   m.require_conditions = True
      +
      +Minimizing Code in Commands
      +===========================
      +
      +Mach command modules, classes, and methods work best when they are
      +minimal dispatchers. The reason is import bloat. Currently, the mach
      +core needs to import every Python file potentially containing mach
      +commands for every command invocation. If you have dozens of commands or
      +commands in modules that import a lot of Python code, these imports
      +could slow mach down and waste memory.
      +
      +It is thus recommended that mach modules, classes, and methods do as
      +little work as possible. Ideally the module should only import from
      +the :py:mod:`mach` package. If you need external modules, you should
      +import them from within the command method.
      +
      +To keep code size small, the body of a command method should be limited
      +to:
      +
      +1. Obtaining user input (parsing arguments, prompting, etc)
      +2. Calling into some other Python package
      +3. Formatting output
      +
      +Of course, these recommendations can be ignored if you want to risk
      +slower performance.
      +
      +In the future, the mach driver may cache the dispatching information or
      +have it intelligently loaded to facilitate lazy loading.
      diff --git a/python/mach/docs/driver.rst b/python/mach/docs/driver.rst
      new file mode 100644
      index 000000000..022ebe657
      --- /dev/null
      +++ b/python/mach/docs/driver.rst
      @@ -0,0 +1,51 @@
      +.. _mach_driver:
      +
      +=======
      +Drivers
      +=======
      +
      +Entry Points
      +============
      +
      +It is possible to use setuptools' entry points to load commands
      +directly from python packages. A mach entry point is a function which
      +returns a list of files or directories containing mach command
      +providers. e.g.:
      +
      +.. code-block:: python
      +
      +   def list_providers():
      +       providers = []
      +       here = os.path.abspath(os.path.dirname(__file__))
      +       for p in os.listdir(here):
      +           if p.endswith('.py'):
      +               providers.append(os.path.join(here, p))
      +       return providers
      +
      +See http://pythonhosted.org/setuptools/setuptools.html#dynamic-discovery-of-services-and-plugins
      +for more information on creating an entry point. To search for entry
      +point plugins, you can call
      +:py:meth:`mach.main.Mach.load_commands_from_entry_point`. e.g.:
      +
      +.. code-block:: python
      +
      +   mach.load_commands_from_entry_point("mach.external.providers")
      +
      +Adding Global Arguments
      +=======================
      +
      +Arguments to mach commands are usually command-specific. However,
      +mach ships with a handful of global arguments that apply to all
      +commands.
      +
      +It is possible to extend the list of global arguments. In your
      +*mach driver*, simply call
      +:py:meth:`mach.main.Mach.add_global_argument`. e.g.:
      +
      +.. code-block:: python
      +
      +   mach = mach.main.Mach(os.getcwd())
      +
      +   # Will allow --example to be specified on every mach command.
      +   mach.add_global_argument('--example', action='store_true',
      +       help='Demonstrate an example global argument.')
      diff --git a/python/mach/docs/index.rst b/python/mach/docs/index.rst
      new file mode 100644
      index 000000000..cd2056333
      --- /dev/null
      +++ b/python/mach/docs/index.rst
      @@ -0,0 +1,75 @@
      +====
      +mach
      +====
      +
      +Mach (German for *do*) is a generic command dispatcher for the command
      +line.
      +
      +To use mach, you install the mach core (a Python package), create an
      +executable *driver* script (named whatever you want), and write mach
      +commands. When the *driver* is executed, mach dispatches to the
      +requested command handler automatically.
      +
      +Features
      +========
      +
      +On a high level, mach is similar to using argparse with subparsers (for
      +command handling). When you dig deeper, mach offers a number of
      +additional features:
      +
      +Distributed command definitions
      +  With optparse/argparse, you have to define your commands on a central
      +  parser instance. With mach, you annotate your command methods with
      +  decorators and mach finds and dispatches to them automatically.
      +
      +Command categories
      +  Mach commands can be grouped into categories when displayed in help.
      +  This is currently not possible with argparse.
      +
      +Logging management
      +  Mach provides a facility for logging (both classical text and
      +  structured) that is available to any command handler.
      +
      +Settings files
      +  Mach provides a facility for reading settings from an ini-like file
      +  format.
      +
      +Components
      +==========
      +
      +Mach is conceptually composed of the following components:
      +
      +core
      +  The mach core is the core code powering mach. This is a Python package
      +  that contains all the business logic that makes mach work. The mach
      +  core is common to all mach deployments.
      +
      +commands
      +  These are what mach dispatches to. Commands are simply Python methods
      +  registered as command names. The set of commands is unique to the
      +  environment mach is deployed in.
      +
      +driver
      +  The *driver* is the entry-point to mach. It is simply an executable
      +  script that loads the mach core, tells it where commands can be found,
      +  then asks the mach core to handle the current request. The driver is
      +  unique to the deployed environment. But, it's usually based on an
      +  example from this source tree.
      +
      +Project State
      +=============
      +
      +mach was originally written as a command dispatching framework to aid
      +Firefox development. While the code is mostly generic, there are still
      +some pieces that closely tie it to Mozilla/Firefox. The goal is for
      +these to eventually be removed and replaced with generic features so
      +mach is suitable for anybody to use. Until then, mach may not be the
      +best fit for you.
      +
      +.. toctree::
      +   :maxdepth: 1
      +
      +   commands
      +   driver
      +   logging
      +   settings
      diff --git a/python/mach/docs/logging.rst b/python/mach/docs/logging.rst
      new file mode 100644
      index 000000000..ff245cf03
      --- /dev/null
      +++ b/python/mach/docs/logging.rst
      @@ -0,0 +1,100 @@
      +.. _mach_logging:
      +
      +=======
      +Logging
      +=======
      +
      +Mach configures a built-in logging facility so commands can easily log
      +data.
      +
      +What sets the logging facility apart from most loggers you've seen is
      +that it encourages structured logging. Instead of conventional logging
      +where simple strings are logged, the internal logging mechanism logs all
      +events with the following pieces of information:
      +
      +* A string *action*
      +* A dict of log message fields
      +* A formatting string
      +
      +Essentially, instead of assembling a human-readable string at
      +logging-time, you create an object holding all the pieces of data that
      +will constitute your logged event. For each unique type of logged event,
      +you assign an *action* name.
      +
      +Depending on how logging is configured, your logged event could get
      +written a couple of different ways.
      +
      +JSON Logging
      +============
      +
      +Where machines are the intended target of the logging data, a JSON
      +logger is configured. The JSON logger assembles an array consisting of
      +the following elements:
      +
      +* Decimal wall clock time in seconds since UNIX epoch
      +* String *action* of message
      +* Object with structured message data
      +
      +The JSON-serialized array is written to a configured file handle.
      +Consumers of this logging stream can just perform a readline() then feed
      +that into a JSON deserializer to reconstruct the original logged
      +message. They can key off the *action* element to determine how to
      +process individual events. There is no need to invent a parser.
      +Convenient, isn't it?
      +
      +Logging for Humans
      +==================
      +
      +Where humans are the intended consumer of a log message, the structured
      +log message are converted to more human-friendly form. This is done by
      +utilizing the *formatting* string provided at log time. The logger
      +simply calls the *format* method of the formatting string, passing the
      +dict containing the message's fields.
      +
      +When *mach* is used in a terminal that supports it, the logging facility
      +also supports terminal features such as colorization. This is done
      +automatically in the logging layer - there is no need to control this at
      +logging time.
      +
      +In addition, messages intended for humans typically prepends every line
      +with the time passed since the application started.
      +
      +Logging HOWTO
      +=============
      +
      +Structured logging piggybacks on top of Python's built-in logging
      +infrastructure provided by the *logging* package. We accomplish this by
      +taking advantage of *logging.Logger.log()*'s *extra* argument. To this
      +argument, we pass a dict with the fields *action* and *params*. These
      +are the string *action* and dict of message fields, respectively. The
      +formatting string is passed as the *msg* argument, like normal.
      +
      +If you were logging to a logger directly, you would do something like:
      +
      +.. code-block:: python
      +
      +   logger.log(logging.INFO, 'My name is {name}',
      +       extra={'action': 'my_name', 'params': {'name': 'Gregory'}})
      +
      +The JSON logging would produce something like::
      +
      +   [1339985554.306338, "my_name", {"name": "Gregory"}]
      +
      +Human logging would produce something like::
      +
      +   0.52 My name is Gregory
      +
      +Since there is a lot of complexity using logger.log directly, it is
      +recommended to go through a wrapping layer that hides part of the
      +complexity for you. The easiest way to do this is by utilizing the
      +LoggingMixin:
      +
      +.. code-block:: python
      +
      +   import logging
      +   from mach.mixin.logging import LoggingMixin
      +
      +   class MyClass(LoggingMixin):
      +       def foo(self):
      +            self.log(logging.INFO, 'foo_start', {'bar': True},
      +                'Foo performed. Bar: {bar}')
      diff --git a/python/mach/docs/settings.rst b/python/mach/docs/settings.rst
      new file mode 100644
      index 000000000..b51dc54a2
      --- /dev/null
      +++ b/python/mach/docs/settings.rst
      @@ -0,0 +1,140 @@
      +.. _mach_settings:
      +
      +========
      +Settings
      +========
      +
      +Mach can read settings in from a set of configuration files. These
      +configuration files are either named ``machrc`` or ``.machrc`` and
      +are specified by the bootstrap script. In mozilla-central, these files
      +can live in ``~/.mozbuild`` and/or ``topsrcdir``.
      +
      +Settings can be specified anywhere, and used both by mach core or
      +individual commands.
      +
      +
      +Core Settings
      +=============
      +
      +These settings are implemented by mach core.
      +
      +* alias - Create a command alias. This is useful if you want to alias a command to something else, optionally including some defaults. It can either be used to create an entire new command, or provide defaults for an existing one. For example:
      +
      +.. parsed-literal::
      +
      +    [alias]
      +    mochitest = mochitest -f browser
      +    browser-test = mochitest -f browser
      +
      +
      +Defining Settings
      +=================
      +
      +Settings need to be explicitly defined, along with their type,
      +otherwise mach will throw when trying to access them.
      +
      +To define settings, use the :func:`~decorators.SettingsProvider`
      +decorator in an existing mach command module. E.g:
      +
      +.. code-block:: python
      +
      +    from mach.decorators import SettingsProvider
      +
      +    @SettingsProvider
      +    class ArbitraryClassName(object):
      +        config_settings = [
      +            ('foo.bar', 'string'),
      +            ('foo.baz', 'int', 0, set([0,1,2])),
      +        ]
      +
      +``@SettingsProvider``'s must specify a variable called ``config_settings``
      +that returns a list of tuples. Alternatively, it can specify a function
      +called ``config_settings`` that returns a list of tuples.
      +
      +Each tuple is of the form:
      +
      +.. code-block:: python
      +
      +    ('
      .