summaryrefslogtreecommitdiffstats
path: root/addon-sdk/source/python-lib/simplejson
diff options
context:
space:
mode:
authorMatt A. Tobin <email@mattatobin.com>2018-02-09 06:46:43 -0500
committerMatt A. Tobin <email@mattatobin.com>2018-02-09 06:46:43 -0500
commitac46df8daea09899ce30dc8fd70986e258c746bf (patch)
tree2750d3125fc253fd5b0671e4bd268eff1fd97296 /addon-sdk/source/python-lib/simplejson
parent8cecf8d5208f3945b35f879bba3015bb1a11bec6 (diff)
downloadUXP-ac46df8daea09899ce30dc8fd70986e258c746bf.tar
UXP-ac46df8daea09899ce30dc8fd70986e258c746bf.tar.gz
UXP-ac46df8daea09899ce30dc8fd70986e258c746bf.tar.lz
UXP-ac46df8daea09899ce30dc8fd70986e258c746bf.tar.xz
UXP-ac46df8daea09899ce30dc8fd70986e258c746bf.zip
Move Add-on SDK source to toolkit/jetpack
Diffstat (limited to 'addon-sdk/source/python-lib/simplejson')
-rw-r--r--addon-sdk/source/python-lib/simplejson/LICENSE.txt19
-rw-r--r--addon-sdk/source/python-lib/simplejson/__init__.py376
-rw-r--r--addon-sdk/source/python-lib/simplejson/decoder.py343
-rw-r--r--addon-sdk/source/python-lib/simplejson/encoder.py395
-rw-r--r--addon-sdk/source/python-lib/simplejson/scanner.py67
-rw-r--r--addon-sdk/source/python-lib/simplejson/tool.py44
6 files changed, 0 insertions, 1244 deletions
diff --git a/addon-sdk/source/python-lib/simplejson/LICENSE.txt b/addon-sdk/source/python-lib/simplejson/LICENSE.txt
deleted file mode 100644
index ad95f29c1..000000000
--- a/addon-sdk/source/python-lib/simplejson/LICENSE.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2006 Bob Ippolito
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/addon-sdk/source/python-lib/simplejson/__init__.py b/addon-sdk/source/python-lib/simplejson/__init__.py
deleted file mode 100644
index adcce7efb..000000000
--- a/addon-sdk/source/python-lib/simplejson/__init__.py
+++ /dev/null
@@ -1,376 +0,0 @@
-r"""
-A simple, fast, extensible JSON encoder and decoder
-
-JSON (JavaScript Object Notation) <http://json.org> is a subset of
-JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
-interchange format.
-
-simplejson exposes an API familiar to uses of the standard library
-marshal and pickle modules.
-
-Encoding basic Python object hierarchies::
-
- >>> import simplejson
- >>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
- '["foo", {"bar": ["baz", null, 1.0, 2]}]'
- >>> print simplejson.dumps("\"foo\bar")
- "\"foo\bar"
- >>> print simplejson.dumps(u'\u1234')
- "\u1234"
- >>> print simplejson.dumps('\\')
- "\\"
- >>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
- {"a": 0, "b": 0, "c": 0}
- >>> from StringIO import StringIO
- >>> io = StringIO()
- >>> simplejson.dump(['streaming API'], io)
- >>> io.getvalue()
- '["streaming API"]'
-
-Compact encoding::
-
- >>> import simplejson
- >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
- '[1,2,3,{"4":5,"6":7}]'
-
-Pretty printing::
-
- >>> import simplejson
- >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
- {
- "4": 5,
- "6": 7
- }
-
-Decoding JSON::
-
- >>> import simplejson
- >>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
- [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
- >>> simplejson.loads('"\\"foo\\bar"')
- u'"foo\x08ar'
- >>> from StringIO import StringIO
- >>> io = StringIO('["streaming API"]')
- >>> simplejson.load(io)
- [u'streaming API']
-
-Specializing JSON object decoding::
-
- >>> import simplejson
- >>> def as_complex(dct):
- ... if '__complex__' in dct:
- ... return complex(dct['real'], dct['imag'])
- ... return dct
- ...
- >>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
- ... object_hook=as_complex)
- (1+2j)
- >>> import decimal
- >>> simplejson.loads('1.1', parse_float=decimal.Decimal)
- Decimal("1.1")
-
-Extending JSONEncoder::
-
- >>> import simplejson
- >>> class ComplexEncoder(simplejson.JSONEncoder):
- ... def default(self, obj):
- ... if isinstance(obj, complex):
- ... return [obj.real, obj.imag]
- ... return simplejson.JSONEncoder.default(self, obj)
- ...
- >>> dumps(2 + 1j, cls=ComplexEncoder)
- '[2.0, 1.0]'
- >>> ComplexEncoder().encode(2 + 1j)
- '[2.0, 1.0]'
- >>> list(ComplexEncoder().iterencode(2 + 1j))
- ['[', '2.0', ', ', '1.0', ']']
-
-
-Using simplejson from the shell to validate and
-pretty-print::
-
- $ echo '{"json":"obj"}' | python -msimplejson.tool
- {
- "json": "obj"
- }
- $ echo '{ 1.2:3.4}' | python -msimplejson.tool
- Expecting property name: line 1 column 2 (char 2)
-
-Note that the JSON produced by this module's default settings
-is a subset of YAML, so it may be used as a serializer for that as well.
-"""
-__version__ = '1.9.2'
-__all__ = [
- 'dump', 'dumps', 'load', 'loads',
- 'JSONDecoder', 'JSONEncoder',
-]
-
-if __name__ == '__main__':
- import warnings
- warnings.warn('python -msimplejson is deprecated, use python -msiplejson.tool', DeprecationWarning)
- from simplejson.decoder import JSONDecoder
- from simplejson.encoder import JSONEncoder
-else:
- from decoder import JSONDecoder
- from encoder import JSONEncoder
-
-_default_encoder = JSONEncoder(
- skipkeys=False,
- ensure_ascii=True,
- check_circular=True,
- allow_nan=True,
- indent=None,
- separators=None,
- encoding='utf-8',
- default=None,
-)
-
-def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
- """
- Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
- ``.write()``-supporting file-like object).
-
- If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
- may be ``unicode`` instances, subject to normal Python ``str`` to
- ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
- understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
- to cause an error.
-
- If ``check_circular`` is ``False``, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
- in strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If ``indent`` is a non-negative integer, then JSON array elements and object
- members will be pretty-printed with that indent level. An indent level
- of 0 will only insert newlines. ``None`` is the most compact representation.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
- """
- # cached encoder
- if (skipkeys is False and ensure_ascii is True and
- check_circular is True and allow_nan is True and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
- iterable = _default_encoder.iterencode(obj)
- else:
- if cls is None:
- cls = JSONEncoder
- iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding,
- default=default, **kw).iterencode(obj)
- # could accelerate with writelines in some versions of Python, at
- # a debuggability cost
- for chunk in iterable:
- fp.write(chunk)
-
-
-def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, **kw):
- """
- Serialize ``obj`` to a JSON formatted ``str``.
-
- If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is ``False``, then the return value will be a
- ``unicode`` instance subject to normal Python ``str`` to ``unicode``
- coercion rules instead of being escaped to an ASCII ``str``.
-
- If ``check_circular`` is ``False``, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
- strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If ``indent`` is a non-negative integer, then JSON array elements and
- object members will be pretty-printed with that indent level. An indent
- level of 0 will only insert newlines. ``None`` is the most compact
- representation.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
- """
- # cached encoder
- if (skipkeys is False and ensure_ascii is True and
- check_circular is True and allow_nan is True and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not kw):
- return _default_encoder.encode(obj)
- if cls is None:
- cls = JSONEncoder
- return cls(
- skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding, default=default,
- **kw).encode(obj)
-
-
-_default_decoder = JSONDecoder(encoding=None, object_hook=None)
-
-
-def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
- """
- Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
- a JSON document) to a Python object.
-
- If the contents of ``fp`` is encoded with an ASCII based encoding other
- than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
- be specified. Encodings that are not ASCII based (such as UCS-2) are
- not allowed, and should be wrapped with
- ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
- object and passed to ``loads()``
-
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
- """
- return loads(fp.read(),
- encoding=encoding, cls=cls, object_hook=object_hook,
- parse_float=parse_float, parse_int=parse_int,
- parse_constant=parse_constant, **kw)
-
-
-def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, **kw):
- """
- Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
- document) to a Python object.
-
- If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
- other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
- must be specified. Encodings that are not ASCII based (such as UCS-2)
- are not allowed and should be decoded to ``unicode`` first.
-
- ``object_hook`` is an optional function that will be called with the
- result of any object literal decode (a ``dict``). The return value of
- ``object_hook`` will be used instead of the ``dict``. This feature
- can be used to implement custom decoders (e.g. JSON-RPC class hinting).
-
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
-
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
-
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN, null, true, false.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
- """
- if (cls is None and encoding is None and object_hook is None and
- parse_int is None and parse_float is None and
- parse_constant is None and not kw):
- return _default_decoder.decode(s)
- if cls is None:
- cls = JSONDecoder
- if object_hook is not None:
- kw['object_hook'] = object_hook
- if parse_float is not None:
- kw['parse_float'] = parse_float
- if parse_int is not None:
- kw['parse_int'] = parse_int
- if parse_constant is not None:
- kw['parse_constant'] = parse_constant
- return cls(encoding=encoding, **kw).decode(s)
-
-
-#
-# Compatibility cruft from other libraries
-#
-
-
-def decode(s):
- """
- demjson, python-cjson API compatibility hook. Use loads(s) instead.
- """
- import warnings
- warnings.warn("simplejson.loads(s) should be used instead of decode(s)",
- DeprecationWarning)
- return loads(s)
-
-
-def encode(obj):
- """
- demjson, python-cjson compatibility hook. Use dumps(s) instead.
- """
- import warnings
- warnings.warn("simplejson.dumps(s) should be used instead of encode(s)",
- DeprecationWarning)
- return dumps(obj)
-
-
-def read(s):
- """
- jsonlib, JsonUtils, python-json, json-py API compatibility hook.
- Use loads(s) instead.
- """
- import warnings
- warnings.warn("simplejson.loads(s) should be used instead of read(s)",
- DeprecationWarning)
- return loads(s)
-
-
-def write(obj):
- """
- jsonlib, JsonUtils, python-json, json-py API compatibility hook.
- Use dumps(s) instead.
- """
- import warnings
- warnings.warn("simplejson.dumps(s) should be used instead of write(s)",
- DeprecationWarning)
- return dumps(obj)
-
-
-if __name__ == '__main__':
- import simplejson.tool
- simplejson.tool.main()
diff --git a/addon-sdk/source/python-lib/simplejson/decoder.py b/addon-sdk/source/python-lib/simplejson/decoder.py
deleted file mode 100644
index baf10e990..000000000
--- a/addon-sdk/source/python-lib/simplejson/decoder.py
+++ /dev/null
@@ -1,343 +0,0 @@
-"""
-Implementation of JSONDecoder
-"""
-import re
-import sys
-
-from simplejson.scanner import Scanner, pattern
-try:
- from simplejson._speedups import scanstring as c_scanstring
-except ImportError:
- pass
-
-FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-
-def _floatconstants():
- import struct
- import sys
- _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
-
-
-def linecol(doc, pos):
- lineno = doc.count('\n', 0, pos) + 1
- if lineno == 1:
- colno = pos
- else:
- colno = pos - doc.rindex('\n', 0, pos)
- return lineno, colno
-
-
-def errmsg(msg, doc, pos, end=None):
- lineno, colno = linecol(doc, pos)
- if end is None:
- return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
- endlineno, endcolno = linecol(doc, end)
- return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
- msg, lineno, colno, endlineno, endcolno, pos, end)
-
-
-_CONSTANTS = {
- '-Infinity': NegInf,
- 'Infinity': PosInf,
- 'NaN': NaN,
- 'true': True,
- 'false': False,
- 'null': None,
-}
-
-def JSONConstant(match, context, c=_CONSTANTS):
- s = match.group(0)
- fn = getattr(context, 'parse_constant', None)
- if fn is None:
- rval = c[s]
- else:
- rval = fn(s)
- return rval, None
-pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
-
-
-def JSONNumber(match, context):
- match = JSONNumber.regex.match(match.string, *match.span())
- integer, frac, exp = match.groups()
- if frac or exp:
- fn = getattr(context, 'parse_float', None) or float
- res = fn(integer + (frac or '') + (exp or ''))
- else:
- fn = getattr(context, 'parse_int', None) or int
- res = fn(integer)
- return res, None
-pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
-
-
-STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
-BACKSLASH = {
- '"': u'"', '\\': u'\\', '/': u'/',
- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
-}
-
-DEFAULT_ENCODING = "utf-8"
-
-def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
- if encoding is None:
- encoding = DEFAULT_ENCODING
- chunks = []
- _append = chunks.append
- begin = end - 1
- while 1:
- chunk = _m(s, end)
- if chunk is None:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
- end = chunk.end()
- content, terminator = chunk.groups()
- if content:
- if not isinstance(content, unicode):
- content = unicode(content, encoding)
- _append(content)
- if terminator == '"':
- break
- elif terminator != '\\':
- if strict:
- raise ValueError(errmsg("Invalid control character %r at", s, end))
- else:
- _append(terminator)
- continue
- try:
- esc = s[end]
- except IndexError:
- raise ValueError(
- errmsg("Unterminated string starting at", s, begin))
- if esc != 'u':
- try:
- m = _b[esc]
- except KeyError:
- raise ValueError(
- errmsg("Invalid \\escape: %r" % (esc,), s, end))
- end += 1
- else:
- esc = s[end + 1:end + 5]
- next_end = end + 5
- msg = "Invalid \\uXXXX escape"
- try:
- if len(esc) != 4:
- raise ValueError
- uni = int(esc, 16)
- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
- msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
- if not s[end + 5:end + 7] == '\\u':
- raise ValueError
- esc2 = s[end + 7:end + 11]
- if len(esc2) != 4:
- raise ValueError
- uni2 = int(esc2, 16)
- uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
- next_end += 6
- m = unichr(uni)
- except ValueError:
- raise ValueError(errmsg(msg, s, end))
- end = next_end
- _append(m)
- return u''.join(chunks), end
-
-
-# Use speedup
-try:
- scanstring = c_scanstring
-except NameError:
- scanstring = py_scanstring
-
-def JSONString(match, context):
- encoding = getattr(context, 'encoding', None)
- strict = getattr(context, 'strict', True)
- return scanstring(match.string, match.end(), encoding, strict)
-pattern(r'"')(JSONString)
-
-
-WHITESPACE = re.compile(r'\s*', FLAGS)
-
-def JSONObject(match, context, _w=WHITESPACE.match):
- pairs = {}
- s = match.string
- end = _w(s, match.end()).end()
- nextchar = s[end:end + 1]
- # Trivial empty object
- if nextchar == '}':
- return pairs, end + 1
- if nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end))
- end += 1
- encoding = getattr(context, 'encoding', None)
- strict = getattr(context, 'strict', True)
- iterscan = JSONScanner.iterscan
- while True:
- key, end = scanstring(s, end, encoding, strict)
- end = _w(s, end).end()
- if s[end:end + 1] != ':':
- raise ValueError(errmsg("Expecting : delimiter", s, end))
- end = _w(s, end + 1).end()
- try:
- value, end = iterscan(s, idx=end, context=context).next()
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
- pairs[key] = value
- end = _w(s, end).end()
- nextchar = s[end:end + 1]
- end += 1
- if nextchar == '}':
- break
- if nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
- end = _w(s, end).end()
- nextchar = s[end:end + 1]
- end += 1
- if nextchar != '"':
- raise ValueError(errmsg("Expecting property name", s, end - 1))
- object_hook = getattr(context, 'object_hook', None)
- if object_hook is not None:
- pairs = object_hook(pairs)
- return pairs, end
-pattern(r'{')(JSONObject)
-
-
-def JSONArray(match, context, _w=WHITESPACE.match):
- values = []
- s = match.string
- end = _w(s, match.end()).end()
- # Look-ahead for trivial empty array
- nextchar = s[end:end + 1]
- if nextchar == ']':
- return values, end + 1
- iterscan = JSONScanner.iterscan
- while True:
- try:
- value, end = iterscan(s, idx=end, context=context).next()
- except StopIteration:
- raise ValueError(errmsg("Expecting object", s, end))
- values.append(value)
- end = _w(s, end).end()
- nextchar = s[end:end + 1]
- end += 1
- if nextchar == ']':
- break
- if nextchar != ',':
- raise ValueError(errmsg("Expecting , delimiter", s, end))
- end = _w(s, end).end()
- return values, end
-pattern(r'\[')(JSONArray)
-
-
-ANYTHING = [
- JSONObject,
- JSONArray,
- JSONString,
- JSONConstant,
- JSONNumber,
-]
-
-JSONScanner = Scanner(ANYTHING)
-
-
-class JSONDecoder(object):
- """
- Simple JSON <http://json.org> decoder
-
- Performs the following translations in decoding by default:
-
- +---------------+-------------------+
- | JSON | Python |
- +===============+===================+
- | object | dict |
- +---------------+-------------------+
- | array | list |
- +---------------+-------------------+
- | string | unicode |
- +---------------+-------------------+
- | number (int) | int, long |
- +---------------+-------------------+
- | number (real) | float |
- +---------------+-------------------+
- | true | True |
- +---------------+-------------------+
- | false | False |
- +---------------+-------------------+
- | null | None |
- +---------------+-------------------+
-
- It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
- their corresponding ``float`` values, which is outside the JSON spec.
- """
-
- _scanner = Scanner(ANYTHING)
- __all__ = ['__init__', 'decode', 'raw_decode']
-
- def __init__(self, encoding=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, strict=True):
- """
- ``encoding`` determines the encoding used to interpret any ``str``
- objects decoded by this instance (utf-8 by default). It has no
- effect when decoding ``unicode`` objects.
-
- Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as ``unicode``.
-
- ``object_hook``, if specified, will be called with the result
- of every JSON object decoded and its return value will be used in
- place of the given ``dict``. This can be used to provide custom
- deserializations (e.g. to support JSON-RPC class hinting).
-
- ``parse_float``, if specified, will be called with the string
- of every JSON float to be decoded. By default this is equivalent to
- float(num_str). This can be used to use another datatype or parser
- for JSON floats (e.g. decimal.Decimal).
-
- ``parse_int``, if specified, will be called with the string
- of every JSON int to be decoded. By default this is equivalent to
- int(num_str). This can be used to use another datatype or parser
- for JSON integers (e.g. float).
-
- ``parse_constant``, if specified, will be called with one of the
- following strings: -Infinity, Infinity, NaN, null, true, false.
- This can be used to raise an exception if invalid JSON numbers
- are encountered.
- """
- self.encoding = encoding
- self.object_hook = object_hook
- self.parse_float = parse_float
- self.parse_int = parse_int
- self.parse_constant = parse_constant
- self.strict = strict
-
- def decode(self, s, _w=WHITESPACE.match):
- """
- Return the Python representation of ``s`` (a ``str`` or ``unicode``
- instance containing a JSON document)
- """
- obj, end = self.raw_decode(s, idx=_w(s, 0).end())
- end = _w(s, end).end()
- if end != len(s):
- raise ValueError(errmsg("Extra data", s, end, len(s)))
- return obj
-
- def raw_decode(self, s, **kw):
- """
- Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
- with a JSON document) and return a 2-tuple of the Python
- representation and the index in ``s`` where the document ended.
-
- This can be used to decode a JSON document from a string that may
- have extraneous data at the end.
- """
- kw.setdefault('context', self)
- try:
- obj, end = self._scanner.iterscan(s, **kw).next()
- except StopIteration:
- raise ValueError("No JSON object could be decoded")
- return obj, end
-
-__all__ = ['JSONDecoder']
diff --git a/addon-sdk/source/python-lib/simplejson/encoder.py b/addon-sdk/source/python-lib/simplejson/encoder.py
deleted file mode 100644
index befc5c1f5..000000000
--- a/addon-sdk/source/python-lib/simplejson/encoder.py
+++ /dev/null
@@ -1,395 +0,0 @@
-"""
-Implementation of JSONEncoder
-"""
-import re
-
-try:
- from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
-except ImportError:
- pass
-
-ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
-ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
-HAS_UTF8 = re.compile(r'[\x80-\xff]')
-ESCAPE_DCT = {
- '\\': '\\\\',
- '"': '\\"',
- '\b': '\\b',
- '\f': '\\f',
- '\n': '\\n',
- '\r': '\\r',
- '\t': '\\t',
-}
-for i in range(0x20):
- ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
-
-# Assume this produces an infinity on all machines (probably not guaranteed)
-INFINITY = float('1e66666')
-FLOAT_REPR = repr
-
-def floatstr(o, allow_nan=True):
- """
- Check for specials. Note that this type of test is processor- and/or
- platform-specific, so do tests which don't depend on the internals.
- """
- if o != o:
- text = 'NaN'
- elif o == INFINITY:
- text = 'Infinity'
- elif o == -INFINITY:
- text = '-Infinity'
- else:
- return FLOAT_REPR(o)
-
- if not allow_nan:
- raise ValueError("Out of range float values are not JSON compliant: %r"
- % (o,))
-
- return text
-
-
-def encode_basestring(s):
- """
- Return a JSON representation of a Python string
- """
- def replace(match):
- return ESCAPE_DCT[match.group(0)]
- return '"' + ESCAPE.sub(replace, s) + '"'
-
-
-def py_encode_basestring_ascii(s):
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
- s = s.decode('utf-8')
- def replace(match):
- s = match.group(0)
- try:
- return ESCAPE_DCT[s]
- except KeyError:
- n = ord(s)
- if n < 0x10000:
- return '\\u%04x' % (n,)
- else:
- # surrogate pair
- n -= 0x10000
- s1 = 0xd800 | ((n >> 10) & 0x3ff)
- s2 = 0xdc00 | (n & 0x3ff)
- return '\\u%04x\\u%04x' % (s1, s2)
- return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
-
-
-try:
- encode_basestring_ascii = c_encode_basestring_ascii
-except NameError:
- encode_basestring_ascii = py_encode_basestring_ascii
-
-
-class JSONEncoder(object):
- """
- Extensible JSON <http://json.org> encoder for Python data structures.
-
- Supports the following objects and types by default:
-
- +-------------------+---------------+
- | Python | JSON |
- +===================+===============+
- | dict | object |
- +-------------------+---------------+
- | list, tuple | array |
- +-------------------+---------------+
- | str, unicode | string |
- +-------------------+---------------+
- | int, long, float | number |
- +-------------------+---------------+
- | True | true |
- +-------------------+---------------+
- | False | false |
- +-------------------+---------------+
- | None | null |
- +-------------------+---------------+
-
- To extend this to recognize other objects, subclass and implement a
- ``.default()`` method with another method that returns a serializable
- object for ``o`` if possible, otherwise it should call the superclass
- implementation (to raise ``TypeError``).
- """
- __all__ = ['__init__', 'default', 'encode', 'iterencode']
- item_separator = ', '
- key_separator = ': '
- def __init__(self, skipkeys=False, ensure_ascii=True,
- check_circular=True, allow_nan=True, sort_keys=False,
- indent=None, separators=None, encoding='utf-8', default=None):
- """
- Constructor for JSONEncoder, with sensible defaults.
-
- If skipkeys is False, then it is a TypeError to attempt
- encoding of keys that are not str, int, long, float or None. If
- skipkeys is True, such items are simply skipped.
-
- If ensure_ascii is True, the output is guaranteed to be str
- objects with all incoming unicode characters escaped. If
- ensure_ascii is false, the output will be unicode object.
-
- If check_circular is True, then lists, dicts, and custom encoded
- objects will be checked for circular references during encoding to
- prevent an infinite recursion (which would cause an OverflowError).
- Otherwise, no such check takes place.
-
- If allow_nan is True, then NaN, Infinity, and -Infinity will be
- encoded as such. This behavior is not JSON specification compliant,
- but is consistent with most JavaScript based encoders and decoders.
- Otherwise, it will be a ValueError to encode such floats.
-
- If sort_keys is True, then the output of dictionaries will be
- sorted by key; this is useful for regression tests to ensure
- that JSON serializations can be compared on a day-to-day basis.
-
- If indent is a non-negative integer, then JSON array
- elements and object members will be pretty-printed with that
- indent level. An indent level of 0 will only insert newlines.
- None is the most compact representation.
-
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
-
- If specified, default is a function that gets called for objects
- that can't otherwise be serialized. It should return a JSON encodable
- version of the object or raise a ``TypeError``.
-
- If encoding is not None, then all input strings will be
- transformed into unicode using that encoding prior to JSON-encoding.
- The default is UTF-8.
- """
-
- self.skipkeys = skipkeys
- self.ensure_ascii = ensure_ascii
- self.check_circular = check_circular
- self.allow_nan = allow_nan
- self.sort_keys = sort_keys
- self.indent = indent
- self.current_indent_level = 0
- if separators is not None:
- self.item_separator, self.key_separator = separators
- if default is not None:
- self.default = default
- self.encoding = encoding
-
- def _newline_indent(self):
- """
- Indent lines by level
- """
- return '\n' + (' ' * (self.indent * self.current_indent_level))
-
- def _iterencode_list(self, lst, markers=None):
- """
- Encoding lists, yielding by level
- """
- if not lst:
- yield '[]'
- return
- if markers is not None:
- markerid = id(lst)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = lst
- yield '['
- if self.indent is not None:
- self.current_indent_level += 1
- newline_indent = self._newline_indent()
- separator = self.item_separator + newline_indent
- yield newline_indent
- else:
- newline_indent = None
- separator = self.item_separator
- first = True
- for value in lst:
- if first:
- first = False
- else:
- yield separator
- for chunk in self._iterencode(value, markers):
- yield chunk
- if newline_indent is not None:
- self.current_indent_level -= 1
- yield self._newline_indent()
- yield ']'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode_dict(self, dct, markers=None):
- """
- Encoding dictionaries, yielding by level
- """
- if not dct:
- yield '{}'
- return
- if markers is not None:
- markerid = id(dct)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = dct
- yield '{'
- key_separator = self.key_separator
- if self.indent is not None:
- self.current_indent_level += 1
- newline_indent = self._newline_indent()
- item_separator = self.item_separator + newline_indent
- yield newline_indent
- else:
- newline_indent = None
- item_separator = self.item_separator
- first = True
- if self.ensure_ascii:
- encoder = encode_basestring_ascii
- else:
- encoder = encode_basestring
- allow_nan = self.allow_nan
- if self.sort_keys:
- keys = dct.keys()
- keys.sort()
- items = [(k, dct[k]) for k in keys]
- else:
- items = dct.iteritems()
- _encoding = self.encoding
- _do_decode = (_encoding is not None
- and not (_encoding == 'utf-8'))
- for key, value in items:
- if isinstance(key, str):
- if _do_decode:
- key = key.decode(_encoding)
- elif isinstance(key, basestring):
- pass
- # JavaScript is weakly typed for these, so it makes sense to
- # also allow them. Many encoders seem to do something like this.
- elif isinstance(key, float):
- key = floatstr(key, allow_nan)
- elif isinstance(key, (int, long)):
- key = str(key)
- elif key is True:
- key = 'true'
- elif key is False:
- key = 'false'
- elif key is None:
- key = 'null'
- elif self.skipkeys:
- continue
- else:
- raise TypeError("key %r is not a string" % (key,))
- if first:
- first = False
- else:
- yield item_separator
- yield encoder(key)
- yield key_separator
- for chunk in self._iterencode(value, markers):
- yield chunk
- if newline_indent is not None:
- self.current_indent_level -= 1
- yield self._newline_indent()
- yield '}'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode(self, o, markers=None):
- if isinstance(o, basestring):
- if self.ensure_ascii:
- encoder = encode_basestring_ascii
- else:
- encoder = encode_basestring
- _encoding = self.encoding
- if (_encoding is not None and isinstance(o, str)
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
- yield encoder(o)
- elif o is None:
- yield 'null'
- elif o is True:
- yield 'true'
- elif o is False:
- yield 'false'
- elif isinstance(o, (int, long)):
- yield str(o)
- elif isinstance(o, float):
- yield floatstr(o, self.allow_nan)
- elif isinstance(o, (list, tuple)):
- for chunk in self._iterencode_list(o, markers):
- yield chunk
- elif isinstance(o, dict):
- for chunk in self._iterencode_dict(o, markers):
- yield chunk
- else:
- if markers is not None:
- markerid = id(o)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = o
- for chunk in self._iterencode_default(o, markers):
- yield chunk
- if markers is not None:
- del markers[markerid]
-
- def _iterencode_default(self, o, markers=None):
- newobj = self.default(o)
- return self._iterencode(newobj, markers)
-
- def default(self, o):
- """
- Implement this method in a subclass such that it returns
- a serializable object for ``o``, or calls the base implementation
- (to raise a ``TypeError``).
-
- For example, to support arbitrary iterators, you could
- implement default like this::
-
- def default(self, o):
- try:
- iterable = iter(o)
- except TypeError:
- pass
- else:
- return list(iterable)
- return JSONEncoder.default(self, o)
- """
- raise TypeError("%r is not JSON serializable" % (o,))
-
- def encode(self, o):
- """
- Return a JSON string representation of a Python data structure.
-
- >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
- '{"foo": ["bar", "baz"]}'
- """
- # This is for extremely simple cases and benchmarks.
- if isinstance(o, basestring):
- if isinstance(o, str):
- _encoding = self.encoding
- if (_encoding is not None
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
- if self.ensure_ascii:
- return encode_basestring_ascii(o)
- else:
- return encode_basestring(o)
- # This doesn't pass the iterator directly to ''.join() because the
- # exceptions aren't as detailed. The list call should be roughly
- # equivalent to the PySequence_Fast that ''.join() would do.
- chunks = list(self.iterencode(o))
- return ''.join(chunks)
-
- def iterencode(self, o):
- """
- Encode the given object and yield each string
- representation as available.
-
- For example::
-
- for chunk in JSONEncoder().iterencode(bigobject):
- mysocket.write(chunk)
- """
- if self.check_circular:
- markers = {}
- else:
- markers = None
- return self._iterencode(o, markers)
-
-__all__ = ['JSONEncoder']
diff --git a/addon-sdk/source/python-lib/simplejson/scanner.py b/addon-sdk/source/python-lib/simplejson/scanner.py
deleted file mode 100644
index 2a18390d0..000000000
--- a/addon-sdk/source/python-lib/simplejson/scanner.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-Iterator based sre token scanner
-"""
-import re
-from re import VERBOSE, MULTILINE, DOTALL
-import sre_parse
-import sre_compile
-import sre_constants
-from sre_constants import BRANCH, SUBPATTERN
-
-__all__ = ['Scanner', 'pattern']
-
-FLAGS = (VERBOSE | MULTILINE | DOTALL)
-
-class Scanner(object):
- def __init__(self, lexicon, flags=FLAGS):
- self.actions = [None]
- # Combine phrases into a compound pattern
- s = sre_parse.Pattern()
- s.flags = flags
- p = []
- for idx, token in enumerate(lexicon):
- phrase = token.pattern
- try:
- subpattern = sre_parse.SubPattern(s,
- [(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
- except sre_constants.error:
- raise
- p.append(subpattern)
- self.actions.append(token)
-
- s.groups = len(p) + 1 # NOTE(guido): Added to make SRE validation work
- p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
- self.scanner = sre_compile.compile(p)
-
- def iterscan(self, string, idx=0, context=None):
- """
- Yield match, end_idx for each match
- """
- match = self.scanner.scanner(string, idx).match
- actions = self.actions
- lastend = idx
- end = len(string)
- while True:
- m = match()
- if m is None:
- break
- matchbegin, matchend = m.span()
- if lastend == matchend:
- break
- action = actions[m.lastindex]
- if action is not None:
- rval, next_pos = action(m, context)
- if next_pos is not None and next_pos != matchend:
- # "fast forward" the scanner
- matchend = next_pos
- match = self.scanner.scanner(string, matchend).match
- yield rval, matchend
- lastend = matchend
-
-
-def pattern(pattern, flags=FLAGS):
- def decorator(fn):
- fn.pattern = pattern
- fn.regex = re.compile(pattern, flags)
- return fn
- return decorator \ No newline at end of file
diff --git a/addon-sdk/source/python-lib/simplejson/tool.py b/addon-sdk/source/python-lib/simplejson/tool.py
deleted file mode 100644
index caa1818f8..000000000
--- a/addon-sdk/source/python-lib/simplejson/tool.py
+++ /dev/null
@@ -1,44 +0,0 @@
-r"""
-Using simplejson from the shell to validate and
-pretty-print::
-
- $ echo '{"json":"obj"}' | python -msimplejson
- {
- "json": "obj"
- }
- $ echo '{ 1.2:3.4}' | python -msimplejson
- Expecting property name: line 1 column 2 (char 2)
-
-Note that the JSON produced by this module's default settings
-is a subset of YAML, so it may be used as a serializer for that as well.
-"""
-import simplejson
-
-#
-# Pretty printer:
-# curl http://mochikit.com/examples/ajax_tables/domains.json | python -msimplejson.tool
-#
-
-def main():
- import sys
- if len(sys.argv) == 1:
- infile = sys.stdin
- outfile = sys.stdout
- elif len(sys.argv) == 2:
- infile = open(sys.argv[1], 'rb')
- outfile = sys.stdout
- elif len(sys.argv) == 3:
- infile = open(sys.argv[1], 'rb')
- outfile = open(sys.argv[2], 'wb')
- else:
- raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],))
- try:
- obj = simplejson.load(infile)
- except ValueError, e:
- raise SystemExit(e)
- simplejson.dump(obj, outfile, sort_keys=True, indent=4)
- outfile.write('\n')
-
-
-if __name__ == '__main__':
- main()