From 0d80d569fa60b7906867aecd056d4f809ee18413 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Tue, 10 Jul 2007 15:22:53 -0700 Subject: [PATCH] import simplejson-1.7.1 into src/simplejson --- src/simplejson/LICENSE.txt | 19 + src/simplejson/PKG-INFO | 29 ++ .../docs/class-simplejson.JSONDecoder.html | 210 ++++++++ .../docs/class-simplejson.JSONEncoder.html | 261 ++++++++++ src/simplejson/docs/class_to_source.js | 22 + src/simplejson/docs/index.html | 455 ++++++++++++++++++ src/simplejson/docs/layout.css | 208 ++++++++ src/simplejson/docs/module-index.html | 111 +++++ .../docs/module-simplejson-index.html | 127 +++++ src/simplejson/docs/module-simplejson.html | 455 ++++++++++++++++++ src/simplejson/docs/pudge.css | 60 +++ src/simplejson/docs/rst.css | 142 ++++++ .../docs/simplejson/__init__.py.html | 357 ++++++++++++++ .../docs/simplejson/decoder.py.html | 345 +++++++++++++ .../docs/simplejson/encoder.py.html | 443 +++++++++++++++++ .../docs/simplejson/jsonfilter.py.html | 112 +++++ .../docs/simplejson/scanner.py.html | 135 ++++++ src/simplejson/ez_setup/README.txt | 15 + src/simplejson/ez_setup/__init__.py | 228 +++++++++ src/simplejson/scripts/bench.sh | 2 + src/simplejson/scripts/make_docs.py | 28 ++ src/simplejson/scripts/prof.py | 9 + src/simplejson/setup.cfg | 5 + src/simplejson/setup.py | 81 ++++ src/simplejson/simplejson.egg-info/PKG-INFO | 29 ++ .../simplejson.egg-info/SOURCES.txt | 46 ++ .../simplejson.egg-info/dependency_links.txt | 1 + .../simplejson.egg-info/entry_points.txt | 3 + .../simplejson.egg-info/top_level.txt | 1 + src/simplejson/simplejson.egg-info/zip-safe | 1 + src/simplejson/simplejson/__init__.py | 287 +++++++++++ src/simplejson/simplejson/_speedups.c | 215 +++++++++ src/simplejson/simplejson/decoder.py | 273 +++++++++++ src/simplejson/simplejson/encoder.py | 371 ++++++++++++++ src/simplejson/simplejson/jsonfilter.py | 40 ++ src/simplejson/simplejson/scanner.py | 63 +++ src/simplejson/simplejson/tests/__init__.py | 0 .../simplejson/tests/test_attacks.py | 6 + src/simplejson/simplejson/tests/test_dump.py | 10 + src/simplejson/simplejson/tests/test_fail.py | 70 +++ .../simplejson/tests/test_indent.py | 41 ++ src/simplejson/simplejson/tests/test_pass1.py | 72 +++ src/simplejson/simplejson/tests/test_pass2.py | 11 + src/simplejson/simplejson/tests/test_pass3.py | 16 + .../simplejson/tests/test_recursion.py | 62 +++ .../simplejson/tests/test_separators.py | 41 ++ .../simplejson/tests/test_unicode.py | 16 + 47 files changed, 5534 insertions(+) create mode 100644 src/simplejson/LICENSE.txt create mode 100644 src/simplejson/PKG-INFO create mode 100644 src/simplejson/docs/class-simplejson.JSONDecoder.html create mode 100644 src/simplejson/docs/class-simplejson.JSONEncoder.html create mode 100644 src/simplejson/docs/class_to_source.js create mode 100644 src/simplejson/docs/index.html create mode 100644 src/simplejson/docs/layout.css create mode 100644 src/simplejson/docs/module-index.html create mode 100644 src/simplejson/docs/module-simplejson-index.html create mode 100644 src/simplejson/docs/module-simplejson.html create mode 100644 src/simplejson/docs/pudge.css create mode 100644 src/simplejson/docs/rst.css create mode 100644 src/simplejson/docs/simplejson/__init__.py.html create mode 100644 src/simplejson/docs/simplejson/decoder.py.html create mode 100644 src/simplejson/docs/simplejson/encoder.py.html create mode 100644 src/simplejson/docs/simplejson/jsonfilter.py.html create mode 100644 src/simplejson/docs/simplejson/scanner.py.html create mode 100644 src/simplejson/ez_setup/README.txt create mode 100644 src/simplejson/ez_setup/__init__.py create mode 100644 src/simplejson/scripts/bench.sh create mode 100644 src/simplejson/scripts/make_docs.py create mode 100644 src/simplejson/scripts/prof.py create mode 100644 src/simplejson/setup.cfg create mode 100644 src/simplejson/setup.py create mode 100644 src/simplejson/simplejson.egg-info/PKG-INFO create mode 100644 src/simplejson/simplejson.egg-info/SOURCES.txt create mode 100644 src/simplejson/simplejson.egg-info/dependency_links.txt create mode 100644 src/simplejson/simplejson.egg-info/entry_points.txt create mode 100644 src/simplejson/simplejson.egg-info/top_level.txt create mode 100644 src/simplejson/simplejson.egg-info/zip-safe create mode 100644 src/simplejson/simplejson/__init__.py create mode 100644 src/simplejson/simplejson/_speedups.c create mode 100644 src/simplejson/simplejson/decoder.py create mode 100644 src/simplejson/simplejson/encoder.py create mode 100644 src/simplejson/simplejson/jsonfilter.py create mode 100644 src/simplejson/simplejson/scanner.py create mode 100644 src/simplejson/simplejson/tests/__init__.py create mode 100644 src/simplejson/simplejson/tests/test_attacks.py create mode 100644 src/simplejson/simplejson/tests/test_dump.py create mode 100644 src/simplejson/simplejson/tests/test_fail.py create mode 100644 src/simplejson/simplejson/tests/test_indent.py create mode 100644 src/simplejson/simplejson/tests/test_pass1.py create mode 100644 src/simplejson/simplejson/tests/test_pass2.py create mode 100644 src/simplejson/simplejson/tests/test_pass3.py create mode 100644 src/simplejson/simplejson/tests/test_recursion.py create mode 100644 src/simplejson/simplejson/tests/test_separators.py create mode 100644 src/simplejson/simplejson/tests/test_unicode.py diff --git a/src/simplejson/LICENSE.txt b/src/simplejson/LICENSE.txt new file mode 100644 index 00000000..ad95f29c --- /dev/null +++ b/src/simplejson/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2006 Bob Ippolito + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/simplejson/PKG-INFO b/src/simplejson/PKG-INFO new file mode 100644 index 00000000..c202c5fa --- /dev/null +++ b/src/simplejson/PKG-INFO @@ -0,0 +1,29 @@ +Metadata-Version: 1.0 +Name: simplejson +Version: 1.7.1 +Summary: Simple, fast, extensible JSON encoder/decoder for Python +Home-page: http://undefined.org/python/#simplejson +Author: Bob Ippolito +Author-email: bob@redivi.com +License: MIT License +Description: + simplejson is a simple, fast, complete, correct and extensible + JSON encoder and decoder for Python 2.3+. It is + pure Python code with no dependencies, but includes an optional C + extension for a serious speed boost. + + simplejson was formerly known as simple_json, but changed its name to + comply with PEP 8 module naming guidelines. + + The encoder may be subclassed to provide serialization in any kind of + situation, without any special support by the objects to be serialized + (somewhat like pickle). + + The decoder can handle incoming JSON strings of any specified encoding + (UTF-8 by default). + +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/src/simplejson/docs/class-simplejson.JSONDecoder.html b/src/simplejson/docs/class-simplejson.JSONDecoder.html new file mode 100644 index 00000000..c5f31ffe --- /dev/null +++ b/src/simplejson/docs/class-simplejson.JSONDecoder.html @@ -0,0 +1,210 @@ + + + + + simplejson.JSONDecoder -- Simple JSON &lt;<a class="reference" href="http://json.org">http://json.org</a>&gt; decoder + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + + + details + + + tree + + + +
+
+ +
+ +

+ JSONDecoder +

+

+ Simple JSON <http://json.org> decoder +

+ +
+ +
+
+
+

Performs the following translations in decoding:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
JSONPython
objectdict
arraylist
stringunicode
number (int)int, long
number (real)float
trueTrue
falseFalse
nullNone
+

It also understands NaN, Infinity, and -Infinity as +their corresponding float values, which is outside the JSON spec.

+ +
+ +
+ + + +

Methods

+
+ +

f + __init__(self, encoding=None, object_hook=None) + ... +

+
+ +

+ encoding determines the encoding used to interpret any str +objects decoded by this instance (utf-8 by default). It has no +effect when decoding unicode objects. +

+

Note that currently only encodings that are a superset of ASCII work, +strings of other encodings should be passed in as unicode.

+

object_hook, if specified, will be called with the result +of every JSON object decoded and its return value will be used in +place of the given dict. This can be used to provide custom +deserializations (e.g. to support JSON-RPC class hinting).

+ + +
+
+ +

f + decode(self, s, _w=<built-in method match of _sre.SRE_Pattern object at 0x13244a0>) + ... +

+
+ +

+ Return the Python representation of s (a str or unicode +instance containing a JSON document) +

+ + +
+
+ +

f + raw_decode(self, s, **kw) + ... +

+
+ +

+ Decode a JSON document from s (a str or unicode beginning +with a JSON document) and return a 2-tuple of the Python +representation and the index in s where the document ended. +

+

This can be used to decode a JSON document from a string that may +have extraneous data at the end.

+ + +
+
+ + + +

+ + + + See + the source + for more information. + +

+ +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/class-simplejson.JSONEncoder.html b/src/simplejson/docs/class-simplejson.JSONEncoder.html new file mode 100644 index 00000000..1e429613 --- /dev/null +++ b/src/simplejson/docs/class-simplejson.JSONEncoder.html @@ -0,0 +1,261 @@ + + + + + simplejson.JSONEncoder -- Extensible JSON &lt;<a class="reference" href="http://json.org">http://json.org</a>&gt; encoder for Python data structures. + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + + + details + + + tree + + + +
+
+ +
+ +

+ JSONEncoder +

+

+ Extensible JSON <http://json.org> encoder for Python data structures. +

+ +
+ +
+
+
+

Supports the following objects and types by default:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PythonJSON
dictobject
list, tuplearray
str, unicodestring
int, long, floatnumber
Truetrue
Falsefalse
Nonenull
+

To extend this to recognize other objects, subclass and implement a +.default() method with another method that returns a serializable +object for o if possible, otherwise it should call the superclass +implementation (to raise TypeError).

+ +
+ +
+ + + +

Methods

+
+ +

f + __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, encoding='utf-8') + ... +

+
+ +

+ Constructor for JSONEncoder, with sensible defaults. +

+

If skipkeys is False, then it is a TypeError to attempt +encoding of keys that are not str, int, long, float or None. If +skipkeys is True, such items are simply skipped.

+

If ensure_ascii is True, the output is guaranteed to be str +objects with all incoming unicode characters escaped. If +ensure_ascii is false, the output will be unicode object.

+

If check_circular is True, then lists, dicts, and custom encoded +objects will be checked for circular references during encoding to +prevent an infinite recursion (which would cause an OverflowError). +Otherwise, no such check takes place.

+

If allow_nan is True, then NaN, Infinity, and -Infinity will be +encoded as such. This behavior is not JSON specification compliant, +but is consistent with most JavaScript based encoders and decoders. +Otherwise, it will be a ValueError to encode such floats.

+

If sort_keys is True, then the output of dictionaries will be +sorted by key; this is useful for regression tests to ensure +that JSON serializations can be compared on a day-to-day basis.

+

If indent is a non-negative integer, then JSON array +elements and object members will be pretty-printed with that +indent level. An indent level of 0 will only insert newlines. +None is the most compact representation.

+

If specified, separators should be a (item_separator, key_separator) +tuple. The default is (', ', ': '). To get the most compact JSON +representation you should specify (',', ':') to eliminate whitespace.

+

If encoding is not None, then all input strings will be +transformed into unicode using that encoding prior to JSON-encoding. +The default is UTF-8.

+ + +
+
+ +

f + default(self, o) + ... +

+
+ +

+ Implement this method in a subclass such that it returns +a serializable object for o, or calls the base implementation +(to raise a TypeError). +

+

For example, to support arbitrary iterators, you could +implement default like this:

+
+def default(self, o):
+    try:
+        iterable = iter(o)
+    except TypeError:
+        pass
+    else:
+        return list(iterable)
+    return JSONEncoder.default(self, o)
+
+ + +
+
+ +

f + encode(self, o) + ... +

+
+ +

+ Return a JSON string representation of a Python data structure. +

+
+>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+'{"foo":["bar", "baz"]}'
+
+ + +
+
+ +

f + iterencode(self, o) + ... +

+
+ +

+ Encode the given object and yield each string +representation as available. +

+

For example:

+
+for chunk in JSONEncoder().iterencode(bigobject):
+    mysocket.write(chunk)
+
+ + +
+
+ + + +

+ + + + See + the source + for more information. + +

+ +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/class_to_source.js b/src/simplejson/docs/class_to_source.js new file mode 100644 index 00000000..b6dc7651 --- /dev/null +++ b/src/simplejson/docs/class_to_source.js @@ -0,0 +1,22 @@ +(function () { + var href = document.location.href; + if (href.indexOf("file:") == 0 || !document.getElementsByTagName) return; + var _old_onload = window.onload; + window.onload = function () { + if (_old_onload) _old_onload.call(this); + _old_onload = null; + var anchors = document.getElementsByTagName('A'); + var class_re = /\blines-(\d+)-(\d+)\b/; + var hash_re = /#.*$/; + for (var i = 0; i < anchors.length; i++) { + var anchor = anchors[i]; + var found = anchor.className.match(class_re); + if (!found) continue; + href = anchor.href; + var hashidx = href.indexOf("#"); + if (hashidx == -1) hashidx = href.length; + anchor.href = (href.substring(0, hashidx) + "?f=" + found[1] + + "&l=" + found[2] + href.substring(hashidx, href.length)); + } + } +})(); diff --git a/src/simplejson/docs/index.html b/src/simplejson/docs/index.html new file mode 100644 index 00000000..c142f3ff --- /dev/null +++ b/src/simplejson/docs/index.html @@ -0,0 +1,455 @@ + + + + + simplejson -- A simple, fast, extensible JSON encoder and decoder + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + + + details + + + tree + + + +
+
+ +
+ +

+ simplejson +

+

+ A simple, fast, extensible JSON encoder and decoder +

+ +
+ +
+
+
+

JSON (JavaScript Object Notation) <http://json.org> is a subset of +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data +interchange format.

+

simplejson exposes an API familiar to uses of the standard library +marshal and pickle modules.

+

Encoding basic Python object hierarchies:

+
+>>> import simplejson
+>>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+'["foo", {"bar": ["baz", null, 1.0, 2]}]'
+>>> print simplejson.dumps("\"foo\bar")
+"\"foo\bar"
+>>> print simplejson.dumps(u'\u1234')
+"\u1234"
+>>> print simplejson.dumps('\\')
+"\\"
+>>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+{"a": 0, "b": 0, "c": 0}
+>>> from StringIO import StringIO
+>>> io = StringIO()
+>>> simplejson.dump(['streaming API'], io)
+>>> io.getvalue()
+'["streaming API"]'
+
+

Compact encoding:

+
+>>> import simplejson
+>>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+'[1,2,3,{"4":5,"6":7}]'
+
+

Pretty printing:

+
+>>> import simplejson
+>>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+{
+    "4": 5,
+    "6": 7
+}
+
+

Decoding JSON:

+
+>>> import simplejson
+>>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
+[u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+>>> simplejson.loads('"\\"foo\\bar"')
+u'"foo\x08ar'
+>>> from StringIO import StringIO
+>>> io = StringIO('["streaming API"]')
+>>> simplejson.load(io)
+[u'streaming API']
+
+

Specializing JSON object decoding:

+
+>>> import simplejson
+>>> def as_complex(dct):
+...     if '__complex__' in dct:
+...         return complex(dct['real'], dct['imag'])
+...     return dct
+...
+>>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
+...     object_hook=as_complex)
+(1+2j)
+
+

Extending JSONEncoder:

+
+>>> import simplejson
+>>> class ComplexEncoder(simplejson.JSONEncoder):
+...     def default(self, obj):
+...         if isinstance(obj, complex):
+...             return [obj.real, obj.imag]
+...         return simplejson.JSONEncoder.default(self, obj)
+...
+>>> dumps(2 + 1j, cls=ComplexEncoder)
+'[2.0, 1.0]'
+>>> ComplexEncoder().encode(2 + 1j)
+'[2.0, 1.0]'
+>>> list(ComplexEncoder().iterencode(2 + 1j))
+['[', '2.0', ', ', '1.0', ']']
+
+

Note that the JSON produced by this module's default settings +is a subset of YAML, so it may be used as a serializer for that as well.

+ +
+ +
+ + + +

Functions

+
+ +

f + dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', **kw) + ... +

+
+ +

+ Serialize obj as a JSON formatted stream to fp (a +.write()-supporting file-like object). +

+

If skipkeys is True then dict keys that are not basic types +(str, unicode, int, long, float, bool, None) +will be skipped instead of raising a TypeError.

+

If ensure_ascii is False, then the some chunks written to fp +may be unicode instances, subject to normal Python str to +unicode coercion rules. Unless fp.write() explicitly +understands unicode (as in codecs.getwriter()) this is likely +to cause an error.

+

If check_circular is False, then the circular reference check +for container types will be skipped and a circular reference will +result in an OverflowError (or worse).

+

If allow_nan is False, then it will be a ValueError to +serialize out of range float values (nan, inf, -inf) +in strict compliance of the JSON specification, instead of using the +JavaScript equivalents (NaN, Infinity, -Infinity).

+

If indent is a non-negative integer, then JSON array elements and object +members will be pretty-printed with that indent level. An indent level +of 0 will only insert newlines. None is the most compact representation.

+

If separators is an (item_separator, dict_separator) tuple +then it will be used instead of the default (', ', ': ') separators. +(',', ':') is the most compact JSON representation.

+

encoding is the character encoding for str instances, default is UTF-8.

+

To use a custom JSONEncoder subclass (e.g. one that overrides the +.default() method to serialize additional types), specify it with +the cls kwarg.

+ + +
+
+ +

f + dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', **kw) + ... +

+
+ +

+ Serialize obj to a JSON formatted str. +

+

If skipkeys is True then dict keys that are not basic types +(str, unicode, int, long, float, bool, None) +will be skipped instead of raising a TypeError.

+

If ensure_ascii is False, then the return value will be a +unicode instance subject to normal Python str to unicode +coercion rules instead of being escaped to an ASCII str.

+

If check_circular is False, then the circular reference check +for container types will be skipped and a circular reference will +result in an OverflowError (or worse).

+

If allow_nan is False, then it will be a ValueError to +serialize out of range float values (nan, inf, -inf) in +strict compliance of the JSON specification, instead of using the +JavaScript equivalents (NaN, Infinity, -Infinity).

+

If indent is a non-negative integer, then JSON array elements and +object members will be pretty-printed with that indent level. An indent +level of 0 will only insert newlines. None is the most compact +representation.

+

If separators is an (item_separator, dict_separator) tuple +then it will be used instead of the default (', ', ': ') separators. +(',', ':') is the most compact JSON representation.

+

encoding is the character encoding for str instances, default is UTF-8.

+

To use a custom JSONEncoder subclass (e.g. one that overrides the +.default() method to serialize additional types), specify it with +the cls kwarg.

+ + +
+
+ +

f + load(fp, encoding=None, cls=None, object_hook=None, **kw) + ... +

+
+ +

+ Deserialize fp (a .read()-supporting file-like object containing +a JSON document) to a Python object. +

+

If the contents of fp is encoded with an ASCII based encoding other +than utf-8 (e.g. latin-1), then an appropriate encoding name must +be specified. Encodings that are not ASCII based (such as UCS-2) are +not allowed, and should be wrapped with +codecs.getreader(fp)(encoding), or simply decoded to a unicode +object and passed to loads()

+

object_hook is an optional function that will be called with the +result of any object literal decode (a dict). The return value of +object_hook will be used instead of the dict. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting).

+

To use a custom JSONDecoder subclass, specify it with the cls +kwarg.

+ + +
+
+ +

f + loads(s, encoding=None, cls=None, object_hook=None, **kw) + ... +

+
+ +

+ Deserialize s (a str or unicode instance containing a JSON +document) to a Python object. +

+

If s is a str instance and is encoded with an ASCII based encoding +other than utf-8 (e.g. latin-1) then an appropriate encoding name +must be specified. Encodings that are not ASCII based (such as UCS-2) +are not allowed and should be decoded to unicode first.

+

object_hook is an optional function that will be called with the +result of any object literal decode (a dict). The return value of +object_hook will be used instead of the dict. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting).

+

To use a custom JSONDecoder subclass, specify it with the cls +kwarg.

+ + +
+
+ + + +

Classes

+
+

C + + JSONEncoder(...) + ... +

+
+ +

+ Extensible JSON <http://json.org> encoder for Python data structures. +

+

Supports the following objects and types by default:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PythonJSON
dictobject
list, tuplearray
str, unicodestring
int, long, floatnumber
Truetrue
Falsefalse
Nonenull
+

To extend this to recognize other objects, subclass and implement a +.default() method with another method that returns a serializable +object for o if possible, otherwise it should call the superclass +implementation (to raise TypeError).

+ + + +

+ This class contains + 5 members. +

+
+
+

C + + JSONDecoder(...) + ... +

+
+ +

+ Simple JSON <http://json.org> decoder +

+

Performs the following translations in decoding:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
JSONPython
objectdict
arraylist
stringunicode
number (int)int, long
number (real)float
trueTrue
falseFalse
nullNone
+

It also understands NaN, Infinity, and -Infinity as +their corresponding float values, which is outside the JSON spec.

+ + + +

+ This class contains + 4 members. +

+
+
+ + + +

+ + + + See + the source + for more information. + +

+ +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/layout.css b/src/simplejson/docs/layout.css new file mode 100644 index 00000000..a45c58b3 --- /dev/null +++ b/src/simplejson/docs/layout.css @@ -0,0 +1,208 @@ +@import url("pudge.css"); + +/* Basic Layout +---------------------------------- */ + +body { + margin-left: 1em; + margin-right: 1em; + max-width: 50em; +} +body { + font-size: .80em; + color: #111; +} +div#main-content { + margin-left: 1em; + margin-right: 1em; + max-width: 47em; +} + +/* Top Navigation +----------------------------------- */ + +div#top-nav { + background: #373; + padding: .3em .3em; + margin: 0; + margin-bottom: 1.5em; + font-size: 90%; +} +div#top-nav #doc-title { + font-size: 140%; + font-weight: bold; + margin: 0; + padding-top: .32em; + padding-right: 1em; + padding-left: .3em; + color: #9c9; + float: left; +} +div#top-nav a { + color: #6a6; + text-decoration: none; +} +div#top-nav .online-navigation a:hover, +div#top-nav h1 a +{ + color: #9c9; +} + +/* Footer +----------------------------------- */ + +div#footer { + text-align: right; + margin-top: 1.5em; + border-top: 1px solid #373; + padding-top: .5em; + font-size: 80%; + color: #666; +} +div#footer a { + color: #373; + text-decoration: none; +} +div#footer p { + margin: .2em 1em; +} + +/* Basic Style +----------------------------------- */ +h1, h2, h3, h4 { + margin: 1em auto; + font-family: 'Trebuchet MS', 'Verdana', Sans-serif; + color: #555; + font-weight: normal; +} +h1 { font-size: 200% } +h2 { font-size: 170% } +h3 { font-size: 150% } +h4 { font-size: 120% } +a:link { color: #060; font-weight: bold } +a:visited { color: #060; text-decoration: none } +hr { margin: auto 12px } +pre { color: #036 } + +dl dt { + font-style: italic; + margin-top: .5em; + font-weight: bold; + color: #555; +} +hr { + color: #373; + background-color: #373; + height: 1px; + border: 0; + width: 100%; + margin: 2em 0; +} + +/* Pudge Elements +--------------------------------- */ + +h1.pudge-member-page-heading { + font-size: 300%; + margin-top: .4em; + margin-bottom: .4em; +} +h4.pudge-member-page-subheading { + font-size: 150%; + font-style: italic; + margin-top: -1.3em; + margin-left: 2em; + color: #999; +} +p.pudge-member-blurb { + font-style: italic; + font-weight: bold; + font-size: 120%; + margin-top: 0.2em; + color: #6a6; +} +div.pudge-module-doc { + max-width: 45em; +} +div.pudge-section { + margin-left: 2em; + max-width: 45em; +} +p.pudge-member-blurb { + font-style: italic; + font-weight: bold; + font-size: 120%; +} + +/* Section Navigation +----------------------------------- */ + +div#pudge-section-nav +{ + margin: 1em 0 1.5em 0; + padding: 0; + height: 20px; +} + +div#pudge-section-nav ul { + border: 0; + margin: 0; + padding: 0; + list-style-type: none; + text-align: center; + border-right: 1px solid #aaa; +} +div#pudge-section-nav ul li +{ + display: block; + float: left; + text-align: center; + padding: 0; + margin: 0; +} + +div#pudge-section-nav ul li .pudge-section-link, +div#pudge-section-nav ul li .pudge-missing-section-link +{ + background: #aaa; + width: 11em; + height: 1.8em; + border: 0; + border-right: 3px solid #fff; + padding: 0; + margin: 0 0 10px 0; + color: #ddd; + text-decoration: none; + display: block; + text-align: center; + font: normal 10px/18px "Lucida Grande", "Lucida Sans Unicode", verdana, lucida, sans-serif; + font-weight: bold; + cursor: hand; +} + +div#pudge-section-nav ul li a:hover +{ + color: #fff; + background: #393; +} + +div#pudge-section-nav ul li .pudge-section-link +{ + background: #373; + color: #9c9; +} + +/* Module Lists +----------------------------------- */ +dl.pudge-module-list dt { + font-style: normal; + font-size: 110%; +} +dl.pudge-module-list dd { + color: #555; +} + +/* misc */ +pre, tt { + font-size: 120%; +} diff --git a/src/simplejson/docs/module-index.html b/src/simplejson/docs/module-index.html new file mode 100644 index 00000000..5e92fca1 --- /dev/null +++ b/src/simplejson/docs/module-index.html @@ -0,0 +1,111 @@ + + + + + simplejson 1.7.1 -- Module Reference + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + +
+
+ +
+ +

+ simplejson 1.7.1 +

+

+ Module Reference +

+ +

Packages and Modules

+ +
+ +
+ simplejson + +
+
+ A simple, fast, extensible JSON encoder and decoder +
+ +
+ simplejson.encoder + +
+
+ Implementation of JSONEncoder +
+ +
+ simplejson.jsonfilter + +
+ + +
+ simplejson.tests + +
+ + +
+ simplejson.decoder + +
+
+ Implementation of JSONDecoder +
+ +
+ simplejson.scanner + +
+
+ Iterator based sre token scanner +
+ +
+ simplejson._speedups + +
+ + +
+ +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/module-simplejson-index.html b/src/simplejson/docs/module-simplejson-index.html new file mode 100644 index 00000000..935e0269 --- /dev/null +++ b/src/simplejson/docs/module-simplejson-index.html @@ -0,0 +1,127 @@ + + + + + Index of simplejson module + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + + + details + + + tree + + + +
+
+ +
+ +

Index of the simplejson module

+ +
    +
  • + m + + simplejson + + ... + + - A simple, fast, extensible JSON encoder and decoder + +
      + + + + + +
    • + f + + load + + ... + + - Deserialize fp (a .read()-supporting file-like object containing +a JSON document) to a Python object. + + +
    • + f + + dump + + ... + + - Serialize obj as a JSON formatted stream to fp (a +.write()-supporting file-like object). + + +
    • + f + + dumps + + ... + + - Serialize obj to a JSON formatted str. + + +
    • + f + + loads + + ... + + - Deserialize s (a str or unicode instance containing a JSON +document) to a Python object. + + +
    • + + + +
    +
  • +
+ + + +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/module-simplejson.html b/src/simplejson/docs/module-simplejson.html new file mode 100644 index 00000000..c142f3ff --- /dev/null +++ b/src/simplejson/docs/module-simplejson.html @@ -0,0 +1,455 @@ + + + + + simplejson -- A simple, fast, extensible JSON encoder and decoder + + + +
+ +
+

simplejson 1.7.1

+
+ index + + + + + + +
+ + + simplejson + + + + + details + + + tree + + + +
+
+ +
+ +

+ simplejson +

+

+ A simple, fast, extensible JSON encoder and decoder +

+ +
+ +
+
+
+

JSON (JavaScript Object Notation) <http://json.org> is a subset of +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data +interchange format.

+

simplejson exposes an API familiar to uses of the standard library +marshal and pickle modules.

+

Encoding basic Python object hierarchies:

+
+>>> import simplejson
+>>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+'["foo", {"bar": ["baz", null, 1.0, 2]}]'
+>>> print simplejson.dumps("\"foo\bar")
+"\"foo\bar"
+>>> print simplejson.dumps(u'\u1234')
+"\u1234"
+>>> print simplejson.dumps('\\')
+"\\"
+>>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+{"a": 0, "b": 0, "c": 0}
+>>> from StringIO import StringIO
+>>> io = StringIO()
+>>> simplejson.dump(['streaming API'], io)
+>>> io.getvalue()
+'["streaming API"]'
+
+

Compact encoding:

+
+>>> import simplejson
+>>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+'[1,2,3,{"4":5,"6":7}]'
+
+

Pretty printing:

+
+>>> import simplejson
+>>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+{
+    "4": 5,
+    "6": 7
+}
+
+

Decoding JSON:

+
+>>> import simplejson
+>>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
+[u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+>>> simplejson.loads('"\\"foo\\bar"')
+u'"foo\x08ar'
+>>> from StringIO import StringIO
+>>> io = StringIO('["streaming API"]')
+>>> simplejson.load(io)
+[u'streaming API']
+
+

Specializing JSON object decoding:

+
+>>> import simplejson
+>>> def as_complex(dct):
+...     if '__complex__' in dct:
+...         return complex(dct['real'], dct['imag'])
+...     return dct
+...
+>>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
+...     object_hook=as_complex)
+(1+2j)
+
+

Extending JSONEncoder:

+
+>>> import simplejson
+>>> class ComplexEncoder(simplejson.JSONEncoder):
+...     def default(self, obj):
+...         if isinstance(obj, complex):
+...             return [obj.real, obj.imag]
+...         return simplejson.JSONEncoder.default(self, obj)
+...
+>>> dumps(2 + 1j, cls=ComplexEncoder)
+'[2.0, 1.0]'
+>>> ComplexEncoder().encode(2 + 1j)
+'[2.0, 1.0]'
+>>> list(ComplexEncoder().iterencode(2 + 1j))
+['[', '2.0', ', ', '1.0', ']']
+
+

Note that the JSON produced by this module's default settings +is a subset of YAML, so it may be used as a serializer for that as well.

+ +
+ +
+ + + +

Functions

+
+ +

f + dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', **kw) + ... +

+
+ +

+ Serialize obj as a JSON formatted stream to fp (a +.write()-supporting file-like object). +

+

If skipkeys is True then dict keys that are not basic types +(str, unicode, int, long, float, bool, None) +will be skipped instead of raising a TypeError.

+

If ensure_ascii is False, then the some chunks written to fp +may be unicode instances, subject to normal Python str to +unicode coercion rules. Unless fp.write() explicitly +understands unicode (as in codecs.getwriter()) this is likely +to cause an error.

+

If check_circular is False, then the circular reference check +for container types will be skipped and a circular reference will +result in an OverflowError (or worse).

+

If allow_nan is False, then it will be a ValueError to +serialize out of range float values (nan, inf, -inf) +in strict compliance of the JSON specification, instead of using the +JavaScript equivalents (NaN, Infinity, -Infinity).

+

If indent is a non-negative integer, then JSON array elements and object +members will be pretty-printed with that indent level. An indent level +of 0 will only insert newlines. None is the most compact representation.

+

If separators is an (item_separator, dict_separator) tuple +then it will be used instead of the default (', ', ': ') separators. +(',', ':') is the most compact JSON representation.

+

encoding is the character encoding for str instances, default is UTF-8.

+

To use a custom JSONEncoder subclass (e.g. one that overrides the +.default() method to serialize additional types), specify it with +the cls kwarg.

+ + +
+
+ +

f + dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', **kw) + ... +

+
+ +

+ Serialize obj to a JSON formatted str. +

+

If skipkeys is True then dict keys that are not basic types +(str, unicode, int, long, float, bool, None) +will be skipped instead of raising a TypeError.

+

If ensure_ascii is False, then the return value will be a +unicode instance subject to normal Python str to unicode +coercion rules instead of being escaped to an ASCII str.

+

If check_circular is False, then the circular reference check +for container types will be skipped and a circular reference will +result in an OverflowError (or worse).

+

If allow_nan is False, then it will be a ValueError to +serialize out of range float values (nan, inf, -inf) in +strict compliance of the JSON specification, instead of using the +JavaScript equivalents (NaN, Infinity, -Infinity).

+

If indent is a non-negative integer, then JSON array elements and +object members will be pretty-printed with that indent level. An indent +level of 0 will only insert newlines. None is the most compact +representation.

+

If separators is an (item_separator, dict_separator) tuple +then it will be used instead of the default (', ', ': ') separators. +(',', ':') is the most compact JSON representation.

+

encoding is the character encoding for str instances, default is UTF-8.

+

To use a custom JSONEncoder subclass (e.g. one that overrides the +.default() method to serialize additional types), specify it with +the cls kwarg.

+ + +
+
+ +

f + load(fp, encoding=None, cls=None, object_hook=None, **kw) + ... +

+
+ +

+ Deserialize fp (a .read()-supporting file-like object containing +a JSON document) to a Python object. +

+

If the contents of fp is encoded with an ASCII based encoding other +than utf-8 (e.g. latin-1), then an appropriate encoding name must +be specified. Encodings that are not ASCII based (such as UCS-2) are +not allowed, and should be wrapped with +codecs.getreader(fp)(encoding), or simply decoded to a unicode +object and passed to loads()

+

object_hook is an optional function that will be called with the +result of any object literal decode (a dict). The return value of +object_hook will be used instead of the dict. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting).

+

To use a custom JSONDecoder subclass, specify it with the cls +kwarg.

+ + +
+
+ +

f + loads(s, encoding=None, cls=None, object_hook=None, **kw) + ... +

+
+ +

+ Deserialize s (a str or unicode instance containing a JSON +document) to a Python object. +

+

If s is a str instance and is encoded with an ASCII based encoding +other than utf-8 (e.g. latin-1) then an appropriate encoding name +must be specified. Encodings that are not ASCII based (such as UCS-2) +are not allowed and should be decoded to unicode first.

+

object_hook is an optional function that will be called with the +result of any object literal decode (a dict). The return value of +object_hook will be used instead of the dict. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting).

+

To use a custom JSONDecoder subclass, specify it with the cls +kwarg.

+ + +
+
+ + + +

Classes

+
+

C + + JSONEncoder(...) + ... +

+
+ +

+ Extensible JSON <http://json.org> encoder for Python data structures. +

+

Supports the following objects and types by default:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PythonJSON
dictobject
list, tuplearray
str, unicodestring
int, long, floatnumber
Truetrue
Falsefalse
Nonenull
+

To extend this to recognize other objects, subclass and implement a +.default() method with another method that returns a serializable +object for o if possible, otherwise it should call the superclass +implementation (to raise TypeError).

+ + + +

+ This class contains + 5 members. +

+
+
+

C + + JSONDecoder(...) + ... +

+
+ +

+ Simple JSON <http://json.org> decoder +

+

Performs the following translations in decoding:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
JSONPython
objectdict
arraylist
stringunicode
number (int)int, long
number (real)float
trueTrue
falseFalse
nullNone
+

It also understands NaN, Infinity, and -Infinity as +their corresponding float values, which is outside the JSON spec.

+ + + +

+ This class contains + 4 members. +

+
+
+ + + +

+ + + + See + the source + for more information. + +

+ +
+ + +
+ + + \ No newline at end of file diff --git a/src/simplejson/docs/pudge.css b/src/simplejson/docs/pudge.css new file mode 100644 index 00000000..b3702171 --- /dev/null +++ b/src/simplejson/docs/pudge.css @@ -0,0 +1,60 @@ +/* Layout +----------------------------------- */ + +@import url("rst.css"); + +/* Pudge Elements +----------------------------------- */ +.note { font-size: 90% } +h4.pudge-member-name { + font-size: 110%; + margin-bottom: 0; +} +h4.pudge-member-name a.obj-link { + font-weight: bold; + text-decoration: none; +} +h4.pudge-member-name .prefix { + font-style: oblique; + padding-right: 6px; + font-weight: bold; + color: #c9c; +} +h1.pudge-member-page-heading { + font-size: 250%; +} +h4.pudge-member-page-subheading { + font-size: 150%; + font-style: italic; +} +h4.pudge-member-page-subheading p { + display: inline; +} +div.pudge-member { + margin-top: 1.5em; + margin-bottom: 1.5em; +} +ul.pudge-module-index { + margin-left: 0; + padding-left: 0; +} +ul.pudge-module-index ul { + padding-left: 1.5em; + margin-left: 0; +} +ul.pudge-module-index li { + list-style-type: none; +} +ul.pudge-module-index .prefix { + font-style: oblique; + padding-right: 6px; + font-size: 90%; + font-weight: bold; + color: purple; +} +ul.pudge-module-index a { + text-decoration: none; +} +div.pudge-section { + margin-left: 2em; +} \ No newline at end of file diff --git a/src/simplejson/docs/rst.css b/src/simplejson/docs/rst.css new file mode 100644 index 00000000..f48c7232 --- /dev/null +++ b/src/simplejson/docs/rst.css @@ -0,0 +1,142 @@ +/* Headings + ------------------------------- */ + +.rst h1 { font-size: 110% } +.rst h2 { font-size: 100% } + +/*.rst-doc h1 { font-size: 200% } +.rst-doc h2 { font-size: 140% } +.rst-doc h3 { font-size: 110% }*/ +.rst-doc h1.title { font-size: 220% } +.rst-doc h1 a, .rst-doc h2 a, .rst-doc h3 a { + color: inherit ; + font-weight: inherit; + text-decoration: inherit; +} + +/* Blockquotes + ------------------------------- */ + +.rst blockquote, +.rst-doc blockquote { + font-style: italic; + font-family: Georgia, serif; + max-width: 30em; +} + +/* Document Info + ------------------------------- */ +.rst-doc table.docinfo { + margin: 2em 0 + width: 100%; +} +.rst-doc table.docinfo th { + text-align: left ; + padding-right: 2em; + color: #555; +} +.rst-doc table.docinfo td { + text-align: left +} + +/* Field Lists + ------------------------------- */ + +.rst table.field-list, +.rst-doc table.field-list { + border: 0; + margin-left: 0; +} +.rst table.field-list ul, +.rst-doc table.field-list ul { + margin-left: 0; +} +.rst-doc table.field-list ul { + border: 0; + margin-left: 2em; + background: #fff; + color: #119; +} + +/* Tables + ------------------------------- */ + +.rst table.docutils, +.rst-doc table.docutils { + border: 0; +} +.rst table.docutils th, +.rst-doc table.docutils th { + border: 0; + background: #777; + color: #fff; + padding: 3px; +} +.rst table.docutils td, +.rst-doc table.docutils td { + border: 0; + border-bottom: 1px solid #ccc; + padding-bottom: 2px; +} + +/* Contents and Back References + ------------------------------- */ + +.rst-doc div.contents { + margin: 2em inherit; +} +.rst-doc div.contents ul { + margin-left: 0; + padding-left: 2em; + line-height: 150%; +} +.rst-doc div.contents ul li { + font-weight: bold; + list-style-type: none; +} +.rst-doc div.contents p.topic-title { + font-size: 160%; + font-weight: normal; + color: #555; + margin-top: .5em; +} +.rst-doc .contents .reference, +.rst-doc .toc-backref { + text-decoration: none; +} + +/* Admonitions + ------------------------------- */ + +.rst-doc div.admonition, +.rst-doc div.warning, +.rst-doc div.note { + margin: 2.5em 6em; + padding: .6em 2.5em; + background: #ddf; + border: 2px solid #ccc; + font-family: Georgia, serif; + color: #333; +} +.rst-doc div.warning { + background: #ff8; + border-color: #fe2; +} +.rst-doc div .admonition-title { + font-weight: bold; + font-style: italic; + color: #223; + font-size: 110%; + font-family: sans-serif; +} + +/* Misc + ------------------------------- */ + +tt.literal { + color: #333; +} +.footnote-reference { + vertical-align: super; + font-size: 20%; +} \ No newline at end of file diff --git a/src/simplejson/docs/simplejson/__init__.py.html b/src/simplejson/docs/simplejson/__init__.py.html new file mode 100644 index 00000000..fb67bda2 --- /dev/null +++ b/src/simplejson/docs/simplejson/__init__.py.html @@ -0,0 +1,357 @@ +/Users/bob/src/simplejson/simplejson/__init__.py + +
0001r"""
+0002A simple, fast, extensible JSON encoder and decoder
+0003
+0004JSON (JavaScript Object Notation) <http://json.org> is a subset of
+0005JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+0006interchange format.
+0007
+0008simplejson exposes an API familiar to uses of the standard library
+0009marshal and pickle modules.
+0010
+0011Encoding basic Python object hierarchies::
+0012    
+0013    >>> import simplejson
+0014    >>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+0015    '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+0016    >>> print simplejson.dumps("\"foo\bar")
+0017    "\"foo\bar"
+0018    >>> print simplejson.dumps(u'\u1234')
+0019    "\u1234"
+0020    >>> print simplejson.dumps('\\')
+0021    "\\"
+0022    >>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+0023    {"a": 0, "b": 0, "c": 0}
+0024    >>> from StringIO import StringIO
+0025    >>> io = StringIO()
+0026    >>> simplejson.dump(['streaming API'], io)
+0027    >>> io.getvalue()
+0028    '["streaming API"]'
+0029
+0030Compact encoding::
+0031
+0032    >>> import simplejson
+0033    >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+0034    '[1,2,3,{"4":5,"6":7}]'
+0035
+0036Pretty printing::
+0037
+0038    >>> import simplejson
+0039    >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+0040    {
+0041        "4": 5, 
+0042        "6": 7
+0043    }
+0044
+0045Decoding JSON::
+0046    
+0047    >>> import simplejson
+0048    >>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
+0049    [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+0050    >>> simplejson.loads('"\\"foo\\bar"')
+0051    u'"foo\x08ar'
+0052    >>> from StringIO import StringIO
+0053    >>> io = StringIO('["streaming API"]')
+0054    >>> simplejson.load(io)
+0055    [u'streaming API']
+0056
+0057Specializing JSON object decoding::
+0058
+0059    >>> import simplejson
+0060    >>> def as_complex(dct):
+0061    ...     if '__complex__' in dct:
+0062    ...         return complex(dct['real'], dct['imag'])
+0063    ...     return dct
+0064    ... 
+0065    >>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
+0066    ...     object_hook=as_complex)
+0067    (1+2j)
+0068
+0069Extending JSONEncoder::
+0070    
+0071    >>> import simplejson
+0072    >>> class ComplexEncoder(simplejson.JSONEncoder):
+0073    ...     def default(self, obj):
+0074    ...         if isinstance(obj, complex):
+0075    ...             return [obj.real, obj.imag]
+0076    ...         return simplejson.JSONEncoder.default(self, obj)
+0077    ... 
+0078    >>> dumps(2 + 1j, cls=ComplexEncoder)
+0079    '[2.0, 1.0]'
+0080    >>> ComplexEncoder().encode(2 + 1j)
+0081    '[2.0, 1.0]'
+0082    >>> list(ComplexEncoder().iterencode(2 + 1j))
+0083    ['[', '2.0', ', ', '1.0', ']']
+0084    
+0085
+0086Note that the JSON produced by this module's default settings
+0087is a subset of YAML, so it may be used as a serializer for that as well.
+0088"""
+0089__version__ = '1.7.1'
+0090__all__ = [
+0091    'dump', 'dumps', 'load', 'loads',
+0092    'JSONDecoder', 'JSONEncoder',
+0093]
+0094
+0095from decoder import JSONDecoder
+0096from encoder import JSONEncoder
+0097
+0098_default_encoder = JSONEncoder(
+0099    skipkeys=False,
+0100    ensure_ascii=True,
+0101    check_circular=True,
+0102    allow_nan=True,
+0103    indent=None,
+0104    separators=None,
+0105    encoding='utf-8'
+0106)
+0107
+0108def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+0109        allow_nan=True, cls=None, indent=None, separators=None,
+0110        encoding='utf-8', **kw):
+0111    """
+0112    Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+0113    ``.write()``-supporting file-like object).
+0114
+0115    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+0116    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+0117    will be skipped instead of raising a ``TypeError``.
+0118
+0119    If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
+0120    may be ``unicode`` instances, subject to normal Python ``str`` to
+0121    ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+0122    understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+0123    to cause an error.
+0124
+0125    If ``check_circular`` is ``False``, then the circular reference check
+0126    for container types will be skipped and a circular reference will
+0127    result in an ``OverflowError`` (or worse).
+0128
+0129    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+0130    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+0131    in strict compliance of the JSON specification, instead of using the
+0132    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+0133
+0134    If ``indent`` is a non-negative integer, then JSON array elements and object
+0135    members will be pretty-printed with that indent level. An indent level
+0136    of 0 will only insert newlines. ``None`` is the most compact representation.
+0137
+0138    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+0139    then it will be used instead of the default ``(', ', ': ')`` separators.
+0140    ``(',', ':')`` is the most compact JSON representation.
+0141
+0142    ``encoding`` is the character encoding for str instances, default is UTF-8.
+0143
+0144    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+0145    ``.default()`` method to serialize additional types), specify it with
+0146    the ``cls`` kwarg.
+0147    """
+0148    # cached encoder
+0149    if (skipkeys is False and ensure_ascii is True and
+0150        check_circular is True and allow_nan is True and
+0151        cls is None and indent is None and separators is None and
+0152        encoding == 'utf-8' and not kw):
+0153        iterable = _default_encoder.iterencode(obj)
+0154    else:
+0155        if cls is None:
+0156            cls = JSONEncoder
+0157        iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+0158            check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+0159            separators=separators, encoding=encoding, **kw).iterencode(obj)
+0160    # could accelerate with writelines in some versions of Python, at
+0161    # a debuggability cost
+0162    for chunk in iterable:
+0163        fp.write(chunk)
+0164
+0165
+0166def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+0167        allow_nan=True, cls=None, indent=None, separators=None,
+0168        encoding='utf-8', **kw):
+0169    """
+0170    Serialize ``obj`` to a JSON formatted ``str``.
+0171
+0172    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+0173    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+0174    will be skipped instead of raising a ``TypeError``.
+0175
+0176    If ``ensure_ascii`` is ``False``, then the return value will be a
+0177    ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+0178    coercion rules instead of being escaped to an ASCII ``str``.
+0179
+0180    If ``check_circular`` is ``False``, then the circular reference check
+0181    for container types will be skipped and a circular reference will
+0182    result in an ``OverflowError`` (or worse).
+0183
+0184    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+0185    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+0186    strict compliance of the JSON specification, instead of using the
+0187    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+0188
+0189    If ``indent`` is a non-negative integer, then JSON array elements and
+0190    object members will be pretty-printed with that indent level. An indent
+0191    level of 0 will only insert newlines. ``None`` is the most compact
+0192    representation.
+0193
+0194    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+0195    then it will be used instead of the default ``(', ', ': ')`` separators.
+0196    ``(',', ':')`` is the most compact JSON representation.
+0197
+0198    ``encoding`` is the character encoding for str instances, default is UTF-8.
+0199
+0200    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+0201    ``.default()`` method to serialize additional types), specify it with
+0202    the ``cls`` kwarg.
+0203    """
+0204    # cached encoder
+0205    if (skipkeys is False and ensure_ascii is True and
+0206        check_circular is True and allow_nan is True and
+0207        cls is None and indent is None and separators is None and
+0208        encoding == 'utf-8' and not kw):
+0209        return _default_encoder.encode(obj)
+0210    if cls is None:
+0211        cls = JSONEncoder
+0212    return cls(
+0213        skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+0214        check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+0215        separators=separators, encoding=encoding,
+0216        **kw).encode(obj)
+0217
+0218_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+0219
+0220def load(fp, encoding=None, cls=None, object_hook=None, **kw):
+0221    """
+0222    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+0223    a JSON document) to a Python object.
+0224
+0225    If the contents of ``fp`` is encoded with an ASCII based encoding other
+0226    than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+0227    be specified. Encodings that are not ASCII based (such as UCS-2) are
+0228    not allowed, and should be wrapped with
+0229    ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+0230    object and passed to ``loads()``
+0231
+0232    ``object_hook`` is an optional function that will be called with the
+0233    result of any object literal decode (a ``dict``). The return value of
+0234    ``object_hook`` will be used instead of the ``dict``. This feature
+0235    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+0236    
+0237    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+0238    kwarg.
+0239    """
+0240    return loads(fp.read(),
+0241        encoding=encoding, cls=cls, object_hook=object_hook, **kw)
+0242
+0243def loads(s, encoding=None, cls=None, object_hook=None, **kw):
+0244    """
+0245    Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+0246    document) to a Python object.
+0247
+0248    If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+0249    other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+0250    must be specified. Encodings that are not ASCII based (such as UCS-2)
+0251    are not allowed and should be decoded to ``unicode`` first.
+0252
+0253    ``object_hook`` is an optional function that will be called with the
+0254    result of any object literal decode (a ``dict``). The return value of
+0255    ``object_hook`` will be used instead of the ``dict``. This feature
+0256    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+0257
+0258    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+0259    kwarg.
+0260    """
+0261    if cls is None and encoding is None and object_hook is None and not kw:
+0262        return _default_decoder.decode(s)
+0263    if cls is None:
+0264        cls = JSONDecoder
+0265    if object_hook is not None:
+0266        kw['object_hook'] = object_hook
+0267    return cls(encoding=encoding, **kw).decode(s)
+0268
+0269def read(s):
+0270    """
+0271    json-py API compatibility hook. Use loads(s) instead.
+0272    """
+0273    import warnings
+0274    warnings.warn("simplejson.loads(s) should be used instead of read(s)",
+0275        DeprecationWarning)
+0276    return loads(s)
+0277
+0278def write(obj):
+0279    """
+0280    json-py API compatibility hook. Use dumps(s) instead.
+0281    """
+0282    import warnings
+0283    warnings.warn("simplejson.dumps(s) should be used instead of write(s)",
+0284        DeprecationWarning)
+0285    return dumps(obj)
\ No newline at end of file diff --git a/src/simplejson/docs/simplejson/decoder.py.html b/src/simplejson/docs/simplejson/decoder.py.html new file mode 100644 index 00000000..d252e691 --- /dev/null +++ b/src/simplejson/docs/simplejson/decoder.py.html @@ -0,0 +1,345 @@ +/Users/bob/src/simplejson/simplejson/decoder.py + +
0001"""
+0002Implementation of JSONDecoder
+0003"""
+0004import re
+0005
+0006from simplejson.scanner import Scanner, pattern
+0007
+0008FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+0009
+0010def _floatconstants():
+0011    import struct
+0012    import sys
+0013    _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+0014    if sys.byteorder != 'big':
+0015        _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+0016    nan, inf = struct.unpack('dd', _BYTES)
+0017    return nan, inf, -inf
+0018
+0019NaN, PosInf, NegInf = _floatconstants()
+0020
+0021def linecol(doc, pos):
+0022    lineno = doc.count('\n', 0, pos) + 1
+0023    if lineno == 1:
+0024        colno = pos
+0025    else:
+0026        colno = pos - doc.rindex('\n', 0, pos)
+0027    return lineno, colno
+0028
+0029def errmsg(msg, doc, pos, end=None):
+0030    lineno, colno = linecol(doc, pos)
+0031    if end is None:
+0032        return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
+0033    endlineno, endcolno = linecol(doc, end)
+0034    return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
+0035        msg, lineno, colno, endlineno, endcolno, pos, end)
+0036
+0037_CONSTANTS = {
+0038    '-Infinity': NegInf,
+0039    'Infinity': PosInf,
+0040    'NaN': NaN,
+0041    'true': True,
+0042    'false': False,
+0043    'null': None,
+0044}
+0045
+0046def JSONConstant(match, context, c=_CONSTANTS):
+0047    return c[match.group(0)], None
+0048pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
+0049
+0050def JSONNumber(match, context):
+0051    match = JSONNumber.regex.match(match.string, *match.span())
+0052    integer, frac, exp = match.groups()
+0053    if frac or exp:
+0054        res = float(integer + (frac or '') + (exp or ''))
+0055    else:
+0056        res = int(integer)
+0057    return res, None
+0058pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
+0059
+0060STRINGCHUNK = re.compile(r'(.*?)(["\\])', FLAGS)
+0061BACKSLASH = {
+0062    '"': u'"', '\\': u'\\', '/': u'/',
+0063    'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+0064}
+0065
+0066DEFAULT_ENCODING = "utf-8"
+0067
+0068def scanstring(s, end, encoding=None, _b=BACKSLASH, _m=STRINGCHUNK.match):
+0069    if encoding is None:
+0070        encoding = DEFAULT_ENCODING
+0071    chunks = []
+0072    _append = chunks.append
+0073    begin = end - 1
+0074    while 1:
+0075        chunk = _m(s, end)
+0076        if chunk is None:
+0077            raise ValueError(
+0078                errmsg("Unterminated string starting at", s, begin))
+0079        end = chunk.end()
+0080        content, terminator = chunk.groups()
+0081        if content:
+0082            if not isinstance(content, unicode):
+0083                content = unicode(content, encoding)
+0084            _append(content)
+0085        if terminator == '"':
+0086            break
+0087        try:
+0088            esc = s[end]
+0089        except IndexError:
+0090            raise ValueError(
+0091                errmsg("Unterminated string starting at", s, begin))
+0092        if esc != 'u':
+0093            try:
+0094                m = _b[esc]
+0095            except KeyError:
+0096                raise ValueError(
+0097                    errmsg("Invalid \\escape: %r" % (esc,), s, end))
+0098            end += 1
+0099        else:
+0100            esc = s[end + 1:end + 5]
+0101            try:
+0102                m = unichr(int(esc, 16))
+0103                if len(esc) != 4 or not esc.isalnum():
+0104                    raise ValueError
+0105            except ValueError:
+0106                raise ValueError(errmsg("Invalid \\uXXXX escape", s, end))
+0107            end += 5
+0108        _append(m)
+0109    return u''.join(chunks), end
+0110
+0111def JSONString(match, context):
+0112    encoding = getattr(context, 'encoding', None)
+0113    return scanstring(match.string, match.end(), encoding)
+0114pattern(r'"')(JSONString)
+0115
+0116WHITESPACE = re.compile(r'\s*', FLAGS)
+0117
+0118def JSONObject(match, context, _w=WHITESPACE.match):
+0119    pairs = {}
+0120    s = match.string
+0121    end = _w(s, match.end()).end()
+0122    nextchar = s[end:end + 1]
+0123    # trivial empty object
+0124    if nextchar == '}':
+0125        return pairs, end + 1
+0126    if nextchar != '"':
+0127        raise ValueError(errmsg("Expecting property name", s, end))
+0128    end += 1
+0129    encoding = getattr(context, 'encoding', None)
+0130    iterscan = JSONScanner.iterscan
+0131    while True:
+0132        key, end = scanstring(s, end, encoding)
+0133        end = _w(s, end).end()
+0134        if s[end:end + 1] != ':':
+0135            raise ValueError(errmsg("Expecting : delimiter", s, end))
+0136        end = _w(s, end + 1).end()
+0137        try:
+0138            value, end = iterscan(s, idx=end, context=context).next()
+0139        except StopIteration:
+0140            raise ValueError(errmsg("Expecting object", s, end))
+0141        pairs[key] = value
+0142        end = _w(s, end).end()
+0143        nextchar = s[end:end + 1]
+0144        end += 1
+0145        if nextchar == '}':
+0146            break
+0147        if nextchar != ',':
+0148            raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+0149        end = _w(s, end).end()
+0150        nextchar = s[end:end + 1]
+0151        end += 1
+0152        if nextchar != '"':
+0153            raise ValueError(errmsg("Expecting property name", s, end - 1))
+0154    object_hook = getattr(context, 'object_hook', None)
+0155    if object_hook is not None:
+0156        pairs = object_hook(pairs)
+0157    return pairs, end
+0158pattern(r'{')(JSONObject)
+0159
+0160def JSONArray(match, context, _w=WHITESPACE.match):
+0161    values = []
+0162    s = match.string
+0163    end = _w(s, match.end()).end()
+0164    # look-ahead for trivial empty array
+0165    nextchar = s[end:end + 1]
+0166    if nextchar == ']':
+0167        return values, end + 1
+0168    iterscan = JSONScanner.iterscan
+0169    while True:
+0170        try:
+0171            value, end = iterscan(s, idx=end, context=context).next()
+0172        except StopIteration:
+0173            raise ValueError(errmsg("Expecting object", s, end))
+0174        values.append(value)
+0175        end = _w(s, end).end()
+0176        nextchar = s[end:end + 1]
+0177        end += 1
+0178        if nextchar == ']':
+0179            break
+0180        if nextchar != ',':
+0181            raise ValueError(errmsg("Expecting , delimiter", s, end))
+0182        end = _w(s, end).end()
+0183    return values, end
+0184pattern(r'\[')(JSONArray)
+0185
+0186ANYTHING = [
+0187    JSONObject,
+0188    JSONArray,
+0189    JSONString,
+0190    JSONConstant,
+0191    JSONNumber,
+0192]
+0193
+0194JSONScanner = Scanner(ANYTHING)
+0195
+0196class JSONDecoder(object):
+0197    """
+0198    Simple JSON <http://json.org> decoder
+0199
+0200    Performs the following translations in decoding:
+0201    
+0202    +---------------+-------------------+
+0203    | JSON          | Python            |
+0204    +===============+===================+
+0205    | object        | dict              |
+0206    +---------------+-------------------+
+0207    | array         | list              |
+0208    +---------------+-------------------+
+0209    | string        | unicode           |
+0210    +---------------+-------------------+
+0211    | number (int)  | int, long         |
+0212    +---------------+-------------------+
+0213    | number (real) | float             |
+0214    +---------------+-------------------+
+0215    | true          | True              |
+0216    +---------------+-------------------+
+0217    | false         | False             |
+0218    +---------------+-------------------+
+0219    | null          | None              |
+0220    +---------------+-------------------+
+0221
+0222    It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+0223    their corresponding ``float`` values, which is outside the JSON spec.
+0224    """
+0225
+0226    _scanner = Scanner(ANYTHING)
+0227    __all__ = ['__init__', 'decode', 'raw_decode']
+0228
+0229    def __init__(self, encoding=None, object_hook=None):
+0230        """
+0231        ``encoding`` determines the encoding used to interpret any ``str``
+0232        objects decoded by this instance (utf-8 by default).  It has no
+0233        effect when decoding ``unicode`` objects.
+0234        
+0235        Note that currently only encodings that are a superset of ASCII work,
+0236        strings of other encodings should be passed in as ``unicode``.
+0237
+0238        ``object_hook``, if specified, will be called with the result
+0239        of every JSON object decoded and its return value will be used in
+0240        place of the given ``dict``.  This can be used to provide custom
+0241        deserializations (e.g. to support JSON-RPC class hinting).
+0242        """
+0243        self.encoding = encoding
+0244        self.object_hook = object_hook
+0245
+0246    def decode(self, s, _w=WHITESPACE.match):
+0247        """
+0248        Return the Python representation of ``s`` (a ``str`` or ``unicode``
+0249        instance containing a JSON document)
+0250        """
+0251        obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+0252        end = _w(s, end).end()
+0253        if end != len(s):
+0254            raise ValueError(errmsg("Extra data", s, end, len(s)))
+0255        return obj
+0256
+0257    def raw_decode(self, s, **kw):
+0258        """
+0259        Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+0260        with a JSON document) and return a 2-tuple of the Python
+0261        representation and the index in ``s`` where the document ended.
+0262
+0263        This can be used to decode a JSON document from a string that may
+0264        have extraneous data at the end.
+0265        """
+0266        kw.setdefault('context', self)
+0267        try:
+0268            obj, end = self._scanner.iterscan(s, **kw).next()
+0269        except StopIteration:
+0270            raise ValueError("No JSON object could be decoded")
+0271        return obj, end
+0272
+0273__all__ = ['JSONDecoder']
\ No newline at end of file diff --git a/src/simplejson/docs/simplejson/encoder.py.html b/src/simplejson/docs/simplejson/encoder.py.html new file mode 100644 index 00000000..8c814ab6 --- /dev/null +++ b/src/simplejson/docs/simplejson/encoder.py.html @@ -0,0 +1,443 @@ +/Users/bob/src/simplejson/simplejson/encoder.py + +
0001"""
+0002Implementation of JSONEncoder
+0003"""
+0004import re
+0005try:
+0006    from simplejson import _speedups
+0007except ImportError:
+0008    _speedups = None
+0009
+0010ESCAPE = re.compile(r'[\x00-\x19\\"\b\f\n\r\t]')
+0011ESCAPE_ASCII = re.compile(r'([\\"/]|[^\ -~])')
+0012ESCAPE_DCT = {
+0013    # escape all forward slashes to prevent </script> attack
+0014    '/': '\\/',
+0015    '\\': '\\\\',
+0016    '"': '\\"',
+0017    '\b': '\\b',
+0018    '\f': '\\f',
+0019    '\n': '\\n',
+0020    '\r': '\\r',
+0021    '\t': '\\t',
+0022}
+0023for i in range(0x20):
+0024    ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+0025
+0026# assume this produces an infinity on all machines (probably not guaranteed)
+0027INFINITY = float('1e66666')
+0028
+0029def floatstr(o, allow_nan=True):
+0030    # Check for specials.  Note that this type of test is processor- and/or
+0031    # platform-specific, so do tests which don't depend on the internals.
+0032
+0033    if o != o:
+0034        text = 'NaN'
+0035    elif o == INFINITY:
+0036        text = 'Infinity'
+0037    elif o == -INFINITY:
+0038        text = '-Infinity'
+0039    else:
+0040        return str(o)
+0041
+0042    if not allow_nan:
+0043        raise ValueError("Out of range float values are not JSON compliant: %r"
+0044            % (o,))
+0045
+0046    return text
+0047
+0048
+0049def encode_basestring(s):
+0050    """
+0051    Return a JSON representation of a Python string
+0052    """
+0053    def replace(match):
+0054        return ESCAPE_DCT[match.group(0)]
+0055    return '"' + ESCAPE.sub(replace, s) + '"'
+0056
+0057def encode_basestring_ascii(s):
+0058    def replace(match):
+0059        s = match.group(0)
+0060        try:
+0061            return ESCAPE_DCT[s]
+0062        except KeyError:
+0063            n = ord(s)
+0064            if n < 0x10000:
+0065                return '\\u%04x' % (n,)
+0066            else:
+0067                # surrogate pair
+0068                n -= 0x10000
+0069                s1 = 0xd800 | ((n >> 10) & 0x3ff)
+0070                s2 = 0xdc00 | (n & 0x3ff)
+0071                return '\\u%04x\\u%04x' % (s1, s2)
+0072    return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+0073
+0074try:
+0075    encode_basestring_ascii = _speedups.encode_basestring_ascii
+0076    _need_utf8 = True
+0077except AttributeError:
+0078    _need_utf8 = False
+0079
+0080class JSONEncoder(object):
+0081    """
+0082    Extensible JSON <http://json.org> encoder for Python data structures.
+0083
+0084    Supports the following objects and types by default:
+0085    
+0086    +-------------------+---------------+
+0087    | Python            | JSON          |
+0088    +===================+===============+
+0089    | dict              | object        |
+0090    +-------------------+---------------+
+0091    | list, tuple       | array         |
+0092    +-------------------+---------------+
+0093    | str, unicode      | string        |
+0094    +-------------------+---------------+
+0095    | int, long, float  | number        |
+0096    +-------------------+---------------+
+0097    | True              | true          |
+0098    +-------------------+---------------+
+0099    | False             | false         |
+0100    +-------------------+---------------+
+0101    | None              | null          |
+0102    +-------------------+---------------+
+0103
+0104    To extend this to recognize other objects, subclass and implement a
+0105    ``.default()`` method with another method that returns a serializable
+0106    object for ``o`` if possible, otherwise it should call the superclass
+0107    implementation (to raise ``TypeError``).
+0108    """
+0109    __all__ = ['__init__', 'default', 'encode', 'iterencode']
+0110    item_separator = ', '
+0111    key_separator = ': '
+0112    def __init__(self, skipkeys=False, ensure_ascii=True,
+0113            check_circular=True, allow_nan=True, sort_keys=False,
+0114            indent=None, separators=None, encoding='utf-8'):
+0115        """
+0116        Constructor for JSONEncoder, with sensible defaults.
+0117
+0118        If skipkeys is False, then it is a TypeError to attempt
+0119        encoding of keys that are not str, int, long, float or None.  If
+0120        skipkeys is True, such items are simply skipped.
+0121
+0122        If ensure_ascii is True, the output is guaranteed to be str
+0123        objects with all incoming unicode characters escaped.  If
+0124        ensure_ascii is false, the output will be unicode object.
+0125
+0126        If check_circular is True, then lists, dicts, and custom encoded
+0127        objects will be checked for circular references during encoding to
+0128        prevent an infinite recursion (which would cause an OverflowError).
+0129        Otherwise, no such check takes place.
+0130
+0131        If allow_nan is True, then NaN, Infinity, and -Infinity will be
+0132        encoded as such.  This behavior is not JSON specification compliant,
+0133        but is consistent with most JavaScript based encoders and decoders.
+0134        Otherwise, it will be a ValueError to encode such floats.
+0135
+0136        If sort_keys is True, then the output of dictionaries will be
+0137        sorted by key; this is useful for regression tests to ensure
+0138        that JSON serializations can be compared on a day-to-day basis.
+0139
+0140        If indent is a non-negative integer, then JSON array
+0141        elements and object members will be pretty-printed with that
+0142        indent level.  An indent level of 0 will only insert newlines.
+0143        None is the most compact representation.
+0144
+0145        If specified, separators should be a (item_separator, key_separator)
+0146        tuple. The default is (', ', ': '). To get the most compact JSON
+0147        representation you should specify (',', ':') to eliminate whitespace.
+0148
+0149        If encoding is not None, then all input strings will be
+0150        transformed into unicode using that encoding prior to JSON-encoding. 
+0151        The default is UTF-8.
+0152        """
+0153
+0154        self.skipkeys = skipkeys
+0155        self.ensure_ascii = ensure_ascii
+0156        self.check_circular = check_circular
+0157        self.allow_nan = allow_nan
+0158        self.sort_keys = sort_keys
+0159        self.indent = indent
+0160        self.current_indent_level = 0
+0161        if separators is not None:
+0162            self.item_separator, self.key_separator = separators
+0163        self.encoding = encoding
+0164
+0165    def _newline_indent(self):
+0166        return '\n' + (' ' * (self.indent * self.current_indent_level))
+0167
+0168    def _iterencode_list(self, lst, markers=None):
+0169        if not lst:
+0170            yield '[]'
+0171            return
+0172        if markers is not None:
+0173            markerid = id(lst)
+0174            if markerid in markers:
+0175                raise ValueError("Circular reference detected")
+0176            markers[markerid] = lst
+0177        yield '['
+0178        if self.indent is not None:
+0179            self.current_indent_level += 1
+0180            newline_indent = self._newline_indent()
+0181            separator = self.item_separator + newline_indent
+0182            yield newline_indent
+0183        else:
+0184            newline_indent = None
+0185            separator = self.item_separator
+0186        first = True
+0187        for value in lst:
+0188            if first:
+0189                first = False
+0190            else:
+0191                yield separator
+0192            for chunk in self._iterencode(value, markers):
+0193                yield chunk
+0194        if newline_indent is not None:
+0195            self.current_indent_level -= 1
+0196            yield self._newline_indent()
+0197        yield ']'
+0198        if markers is not None:
+0199            del markers[markerid]
+0200
+0201    def _iterencode_dict(self, dct, markers=None):
+0202        if not dct:
+0203            yield '{}'
+0204            return
+0205        if markers is not None:
+0206            markerid = id(dct)
+0207            if markerid in markers:
+0208                raise ValueError("Circular reference detected")
+0209            markers[markerid] = dct
+0210        yield '{'
+0211        key_separator = self.key_separator
+0212        if self.indent is not None:
+0213            self.current_indent_level += 1
+0214            newline_indent = self._newline_indent()
+0215            item_separator = self.item_separator + newline_indent
+0216            yield newline_indent
+0217        else:
+0218            newline_indent = None
+0219            item_separator = self.item_separator
+0220        first = True
+0221        if self.ensure_ascii:
+0222            encoder = encode_basestring_ascii
+0223        else:
+0224            encoder = encode_basestring
+0225        allow_nan = self.allow_nan
+0226        if self.sort_keys:
+0227            keys = dct.keys()
+0228            keys.sort()
+0229            items = [(k, dct[k]) for k in keys]
+0230        else:
+0231            items = dct.iteritems()
+0232        _encoding = self.encoding
+0233        _do_decode = (_encoding is not None
+0234            and not (_need_utf8 and _encoding == 'utf-8'))
+0235        for key, value in items:
+0236            if isinstance(key, str):
+0237                if _do_decode:
+0238                    key = key.decode(_encoding)
+0239            elif isinstance(key, basestring):
+0240                pass
+0241            # JavaScript is weakly typed for these, so it makes sense to
+0242            # also allow them.  Many encoders seem to do something like this.
+0243            elif isinstance(key, float):
+0244                key = floatstr(key, allow_nan)
+0245            elif isinstance(key, (int, long)):
+0246                key = str(key)
+0247            elif key is True:
+0248                key = 'true'
+0249            elif key is False:
+0250                key = 'false'
+0251            elif key is None:
+0252                key = 'null'
+0253            elif self.skipkeys:
+0254                continue
+0255            else:
+0256                raise TypeError("key %r is not a string" % (key,))
+0257            if first:
+0258                first = False
+0259            else:
+0260                yield item_separator
+0261            yield encoder(key)
+0262            yield key_separator
+0263            for chunk in self._iterencode(value, markers):
+0264                yield chunk
+0265        if newline_indent is not None:
+0266            self.current_indent_level -= 1
+0267            yield self._newline_indent()
+0268        yield '}'
+0269        if markers is not None:
+0270            del markers[markerid]
+0271
+0272    def _iterencode(self, o, markers=None):
+0273        if isinstance(o, basestring):
+0274            if self.ensure_ascii:
+0275                encoder = encode_basestring_ascii
+0276            else:
+0277                encoder = encode_basestring
+0278            _encoding = self.encoding
+0279            if (_encoding is not None and isinstance(o, str)
+0280                    and not (_need_utf8 and _encoding == 'utf-8')):
+0281                o = o.decode(_encoding)
+0282            yield encoder(o)
+0283        elif o is None:
+0284            yield 'null'
+0285        elif o is True:
+0286            yield 'true'
+0287        elif o is False:
+0288            yield 'false'
+0289        elif isinstance(o, (int, long)):
+0290            yield str(o)
+0291        elif isinstance(o, float):
+0292            yield floatstr(o, self.allow_nan)
+0293        elif isinstance(o, (list, tuple)):
+0294            for chunk in self._iterencode_list(o, markers):
+0295                yield chunk
+0296        elif isinstance(o, dict):
+0297            for chunk in self._iterencode_dict(o, markers):
+0298                yield chunk
+0299        else:
+0300            if markers is not None:
+0301                markerid = id(o)
+0302                if markerid in markers:
+0303                    raise ValueError("Circular reference detected")
+0304                markers[markerid] = o
+0305            for chunk in self._iterencode_default(o, markers):
+0306                yield chunk
+0307            if markers is not None:
+0308                del markers[markerid]
+0309
+0310    def _iterencode_default(self, o, markers=None):
+0311        newobj = self.default(o)
+0312        return self._iterencode(newobj, markers)
+0313
+0314    def default(self, o):
+0315        """
+0316        Implement this method in a subclass such that it returns
+0317        a serializable object for ``o``, or calls the base implementation
+0318        (to raise a ``TypeError``).
+0319
+0320        For example, to support arbitrary iterators, you could
+0321        implement default like this::
+0322            
+0323            def default(self, o):
+0324                try:
+0325                    iterable = iter(o)
+0326                except TypeError:
+0327                    pass
+0328                else:
+0329                    return list(iterable)
+0330                return JSONEncoder.default(self, o)
+0331        """
+0332        raise TypeError("%r is not JSON serializable" % (o,))
+0333
+0334    def encode(self, o):
+0335        """
+0336        Return a JSON string representation of a Python data structure.
+0337
+0338        >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+0339        '{"foo":["bar", "baz"]}'
+0340        """
+0341        # This is for extremely simple cases and benchmarks...
+0342        if isinstance(o, basestring):
+0343            if isinstance(o, str):
+0344                _encoding = self.encoding
+0345                if (_encoding is not None
+0346                        and not (_encoding == 'utf-8' and _need_utf8)):
+0347                    o = o.decode(_encoding)
+0348            return encode_basestring_ascii(o)
+0349        # This doesn't pass the iterator directly to ''.join() because it
+0350        # sucks at reporting exceptions.  It's going to do this internally
+0351        # anyway because it uses PySequence_Fast or similar.
+0352        chunks = list(self.iterencode(o))
+0353        return ''.join(chunks)
+0354
+0355    def iterencode(self, o):
+0356        """
+0357        Encode the given object and yield each string
+0358        representation as available.
+0359        
+0360        For example::
+0361            
+0362            for chunk in JSONEncoder().iterencode(bigobject):
+0363                mysocket.write(chunk)
+0364        """
+0365        if self.check_circular:
+0366            markers = {}
+0367        else:
+0368            markers = None
+0369        return self._iterencode(o, markers)
+0370
+0371__all__ = ['JSONEncoder']
\ No newline at end of file diff --git a/src/simplejson/docs/simplejson/jsonfilter.py.html b/src/simplejson/docs/simplejson/jsonfilter.py.html new file mode 100644 index 00000000..874290bf --- /dev/null +++ b/src/simplejson/docs/simplejson/jsonfilter.py.html @@ -0,0 +1,112 @@ +/Users/bob/src/simplejson/simplejson/jsonfilter.py + +
0001import simplejson
+0002import cgi
+0003
+0004class JSONFilter(object):
+0005    def __init__(self, app, mime_type='text/x-json'):
+0006        self.app = app
+0007        self.mime_type = mime_type
+0008
+0009    def __call__(self, environ, start_response):
+0010        # Read JSON POST input to jsonfilter.json if matching mime type
+0011        response = {'status': '200 OK', 'headers': []}
+0012        def json_start_response(status, headers):
+0013            response['status'] = status
+0014            response['headers'].extend(headers)
+0015        environ['jsonfilter.mime_type'] = self.mime_type
+0016        if environ.get('REQUEST_METHOD', '') == 'POST':
+0017            if environ.get('CONTENT_TYPE', '') == self.mime_type:
+0018                args = [_ for _ in [environ.get('CONTENT_LENGTH')] if _]
+0019                data = environ['wsgi.input'].read(*map(int, args))
+0020                environ['jsonfilter.json'] = simplejson.loads(data)
+0021        res = simplejson.dumps(self.app(environ, json_start_response))
+0022        jsonp = cgi.parse_qs(environ.get('QUERY_STRING', '')).get('jsonp')
+0023        if jsonp:
+0024            content_type = 'text/javascript'
+0025            res = ''.join(jsonp + ['(', res, ')'])
+0026        elif 'Opera' in environ.get('HTTP_USER_AGENT', ''):
+0027            # Opera has bunk XMLHttpRequest support for most mime types
+0028            content_type = 'text/plain'
+0029        else:
+0030            content_type = self.mime_type
+0031        headers = [
+0032            ('Content-type', content_type),
+0033            ('Content-length', len(res)),
+0034        ]
+0035        headers.extend(response['headers'])
+0036        start_response(response['status'], headers)
+0037        return [res]
+0038
+0039def factory(app, global_conf, **kw):
+0040    return JSONFilter(app, **kw)
\ No newline at end of file diff --git a/src/simplejson/docs/simplejson/scanner.py.html b/src/simplejson/docs/simplejson/scanner.py.html new file mode 100644 index 00000000..031da08d --- /dev/null +++ b/src/simplejson/docs/simplejson/scanner.py.html @@ -0,0 +1,135 @@ +/Users/bob/src/simplejson/simplejson/scanner.py + +
0001"""
+0002Iterator based sre token scanner
+0003"""
+0004import sre_parse, sre_compile, sre_constants
+0005from sre_constants import BRANCH, SUBPATTERN
+0006from re import VERBOSE, MULTILINE, DOTALL
+0007import re
+0008
+0009__all__ = ['Scanner', 'pattern']
+0010
+0011FLAGS = (VERBOSE | MULTILINE | DOTALL)
+0012class Scanner(object):
+0013    def __init__(self, lexicon, flags=FLAGS):
+0014        self.actions = [None]
+0015        # combine phrases into a compound pattern
+0016        s = sre_parse.Pattern()
+0017        s.flags = flags
+0018        p = []
+0019        for idx, token in enumerate(lexicon):
+0020            phrase = token.pattern
+0021            try:
+0022                subpattern = sre_parse.SubPattern(s,
+0023                    [(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
+0024            except sre_constants.error:
+0025                raise
+0026            p.append(subpattern)
+0027            self.actions.append(token)
+0028
+0029        p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
+0030        self.scanner = sre_compile.compile(p)
+0031
+0032
+0033    def iterscan(self, string, idx=0, context=None):
+0034        """
+0035        Yield match, end_idx for each match
+0036        """
+0037        match = self.scanner.scanner(string, idx).match
+0038        actions = self.actions
+0039        lastend = idx
+0040        end = len(string)
+0041        while True:
+0042            m = match()
+0043            if m is None:
+0044                break
+0045            matchbegin, matchend = m.span()
+0046            if lastend == matchend:
+0047                break
+0048            action = actions[m.lastindex]
+0049            if action is not None:
+0050                rval, next_pos = action(m, context)
+0051                if next_pos is not None and next_pos != matchend:
+0052                    # "fast forward" the scanner
+0053                    matchend = next_pos
+0054                    match = self.scanner.scanner(string, matchend).match
+0055                yield rval, matchend
+0056            lastend = matchend
+0057
+0058def pattern(pattern, flags=FLAGS):
+0059    def decorator(fn):
+0060        fn.pattern = pattern
+0061        fn.regex = re.compile(pattern, flags)
+0062        return fn
+0063    return decorator
\ No newline at end of file diff --git a/src/simplejson/ez_setup/README.txt b/src/simplejson/ez_setup/README.txt new file mode 100644 index 00000000..9287f5a6 --- /dev/null +++ b/src/simplejson/ez_setup/README.txt @@ -0,0 +1,15 @@ +This directory exists so that Subversion-based projects can share a single +copy of the ``ez_setup`` bootstrap module for ``setuptools``, and have it +automatically updated in their projects when ``setuptools`` is updated. + +For your convenience, you may use the following svn:externals definition:: + + ez_setup svn://svn.eby-sarna.com/svnroot/ez_setup + +You can set this by executing this command in your project directory:: + + svn propedit svn:externals . + +And then adding the line shown above to the file that comes up for editing. +Then, whenever you update your project, ``ez_setup`` will be updated as well. + diff --git a/src/simplejson/ez_setup/__init__.py b/src/simplejson/ez_setup/__init__.py new file mode 100644 index 00000000..38c09c62 --- /dev/null +++ b/src/simplejson/ez_setup/__init__.py @@ -0,0 +1,228 @@ +#!python +"""Bootstrap setuptools installation + +If you want to use setuptools in your package's setup.py, just include this +file in the same directory with it, and add this to the top of your setup.py:: + + from ez_setup import use_setuptools + use_setuptools() + +If you want to require a specific version of setuptools, set a download +mirror, or use an alternate download directory, you can do so by supplying +the appropriate options to ``use_setuptools()``. + +This file can also be run as a script to install or upgrade setuptools. +""" +import sys +DEFAULT_VERSION = "0.6c5" +DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3] + +md5_data = { + 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', + 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', + 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', + 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', + 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', + 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', + 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', + 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', + 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', + 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', + 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', + 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', + 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', + 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', + 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', + 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', + 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', + 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', + 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', + 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', + 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', +} + +import sys, os + +def _validate_md5(egg_name, data): + if egg_name in md5_data: + from md5 import md5 + digest = md5(data).hexdigest() + if digest != md5_data[egg_name]: + print >>sys.stderr, ( + "md5 validation of %s failed! (Possible download problem?)" + % egg_name + ) + sys.exit(2) + return data + + +def use_setuptools( + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + download_delay=15 +): + """Automatically find/download setuptools and make it available on sys.path + + `version` should be a valid setuptools version number that is available + as an egg for download under the `download_base` URL (which should end with + a '/'). `to_dir` is the directory where setuptools will be downloaded, if + it is not already available. If `download_delay` is specified, it should + be the number of seconds that will be paused before initiating a download, + should one be required. If an older version of setuptools is installed, + this routine will print a message to ``sys.stderr`` and raise SystemExit in + an attempt to abort the calling script. + """ + try: + import setuptools + if setuptools.__version__ == '0.0.1': + print >>sys.stderr, ( + "You have an obsolete version of setuptools installed. Please\n" + "remove it from your system entirely before rerunning this script." + ) + sys.exit(2) + except ImportError: + egg = download_setuptools(version, download_base, to_dir, download_delay) + sys.path.insert(0, egg) + import setuptools; setuptools.bootstrap_install_from = egg + + import pkg_resources + try: + pkg_resources.require("setuptools>="+version) + + except pkg_resources.VersionConflict, e: + # XXX could we install in a subprocess here? + print >>sys.stderr, ( + "The required version of setuptools (>=%s) is not available, and\n" + "can't be installed while this script is running. Please install\n" + " a more recent version first.\n\n(Currently using %r)" + ) % (version, e.args[0]) + sys.exit(2) + +def download_setuptools( + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + delay = 15 +): + """Download setuptools from a specified location and return its filename + + `version` should be a valid setuptools version number that is available + as an egg for download under the `download_base` URL (which should end + with a '/'). `to_dir` is the directory where the egg will be downloaded. + `delay` is the number of seconds to pause before an actual download attempt. + """ + import urllib2, shutil + egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) + url = download_base + egg_name + saveto = os.path.join(to_dir, egg_name) + src = dst = None + if not os.path.exists(saveto): # Avoid repeated downloads + try: + from distutils import log + if delay: + log.warn(""" +--------------------------------------------------------------------------- +This script requires setuptools version %s to run (even to display +help). I will attempt to download it for you (from +%s), but +you may need to enable firewall access for this script first. +I will start the download in %d seconds. + +(Note: if this machine does not have network access, please obtain the file + + %s + +and place it in this directory before rerunning this script.) +---------------------------------------------------------------------------""", + version, download_base, delay, url + ); from time import sleep; sleep(delay) + log.warn("Downloading %s", url) + src = urllib2.urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = _validate_md5(egg_name, src.read()) + dst = open(saveto,"wb"); dst.write(data) + finally: + if src: src.close() + if dst: dst.close() + return os.path.realpath(saveto) + +def main(argv, version=DEFAULT_VERSION): + """Install or upgrade setuptools and EasyInstall""" + + try: + import setuptools + except ImportError: + egg = None + try: + egg = download_setuptools(version, delay=0) + sys.path.insert(0,egg) + from setuptools.command.easy_install import main + return main(list(argv)+[egg]) # we're done here + finally: + if egg and os.path.exists(egg): + os.unlink(egg) + else: + if setuptools.__version__ == '0.0.1': + # tell the user to uninstall obsolete version + use_setuptools(version) + + req = "setuptools>="+version + import pkg_resources + try: + pkg_resources.require(req) + except pkg_resources.VersionConflict: + try: + from setuptools.command.easy_install import main + except ImportError: + from easy_install import main + main(list(argv)+[download_setuptools(delay=0)]) + sys.exit(0) # try to force an exit + else: + if argv: + from setuptools.command.easy_install import main + main(argv) + else: + print "Setuptools version",version,"or greater has been installed." + print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' + + + +def update_md5(filenames): + """Update our built-in md5 registry""" + + import re + from md5 import md5 + + for name in filenames: + base = os.path.basename(name) + f = open(name,'rb') + md5_data[base] = md5(f.read()).hexdigest() + f.close() + + data = [" %r: %r,\n" % it for it in md5_data.items()] + data.sort() + repl = "".join(data) + + import inspect + srcfile = inspect.getsourcefile(sys.modules[__name__]) + f = open(srcfile, 'rb'); src = f.read(); f.close() + + match = re.search("\nmd5_data = {\n([^}]+)}", src) + if not match: + print >>sys.stderr, "Internal error!" + sys.exit(2) + + src = src[:match.start(1)] + repl + src[match.end(1):] + f = open(srcfile,'w') + f.write(src) + f.close() + + +if __name__=='__main__': + if len(sys.argv)>2 and sys.argv[1]=='--md5update': + update_md5(sys.argv[2:]) + else: + main(sys.argv[1:]) + + + + + diff --git a/src/simplejson/scripts/bench.sh b/src/simplejson/scripts/bench.sh new file mode 100644 index 00000000..e30e2467 --- /dev/null +++ b/src/simplejson/scripts/bench.sh @@ -0,0 +1,2 @@ +#!/bin/sh +/usr/bin/env python -mtimeit -s 'from simplejson.tests.test_pass1 import test_parse' 'test_parse()' diff --git a/src/simplejson/scripts/make_docs.py b/src/simplejson/scripts/make_docs.py new file mode 100644 index 00000000..2c59032b --- /dev/null +++ b/src/simplejson/scripts/make_docs.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +import os +import subprocess +import shutil + +PROJECT='simplejson' + +def _get_version(): + from pkg_resources import PathMetadata, Distribution + egg_info = PROJECT + '.egg-info' + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(base_dir, project_name=dist_name, metadata=metadata) + return dist.version +VERSION = _get_version() + +PUDGE = '/Library/Frameworks/Python.framework/Versions/2.4/bin/pudge' +#PUDGE = 'pudge' + +res = subprocess.call([ + PUDGE, '-v', '-d', 'docs', '-m', PROJECT, + '-l', '%s %s' % (PROJECT, VERSION), + '--theme=green' +]) +if not res: + shutil.copyfile('docs/module-simplejson.html', 'docs/index.html') +raise SystemExit(res) diff --git a/src/simplejson/scripts/prof.py b/src/simplejson/scripts/prof.py new file mode 100644 index 00000000..45c7a85e --- /dev/null +++ b/src/simplejson/scripts/prof.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python2.4 +from pkg_resources import require +require('simplejson') + +import profile + +from simplejson.tests.test_pass1 import test_parse + +profile.run("for x in xrange(10): test_parse()") diff --git a/src/simplejson/setup.cfg b/src/simplejson/setup.cfg new file mode 100644 index 00000000..861a9f55 --- /dev/null +++ b/src/simplejson/setup.cfg @@ -0,0 +1,5 @@ +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/src/simplejson/setup.py b/src/simplejson/setup.py new file mode 100644 index 00000000..83d160c0 --- /dev/null +++ b/src/simplejson/setup.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python + +import ez_setup +ez_setup.use_setuptools() + +from setuptools import setup, find_packages, Extension, Feature +from distutils.command.build_ext import build_ext +from distutils.errors import CCompilerError + +VERSION = '1.7.1' +DESCRIPTION = "Simple, fast, extensible JSON encoder/decoder for Python" +LONG_DESCRIPTION = """ +simplejson is a simple, fast, complete, correct and extensible +JSON encoder and decoder for Python 2.3+. It is +pure Python code with no dependencies, but includes an optional C +extension for a serious speed boost. + +simplejson was formerly known as simple_json, but changed its name to +comply with PEP 8 module naming guidelines. + +The encoder may be subclassed to provide serialization in any kind of +situation, without any special support by the objects to be serialized +(somewhat like pickle). + +The decoder can handle incoming JSON strings of any specified encoding +(UTF-8 by default). +""" + +CLASSIFIERS = filter(None, map(str.strip, +""" +Intended Audience :: Developers +License :: OSI Approved :: MIT License +Programming Language :: Python +Topic :: Software Development :: Libraries :: Python Modules +""".splitlines())) + + +BUILD_EXT_WARNING=""" +WARNING: The C extension could not be compiled, speedups are not enabled. + +Above is the output showing how the compilation failed. +""" + +class ve_build_ext(build_ext): + # This class allows C extension building to fail. + def build_extension(self, ext): + try: + build_ext.build_extension(self, ext) + except CCompilerError, x: + print ('*'*70+'\n') + print BUILD_EXT_WARNING + print ('*'*70+'\n') + +speedups = Feature( + "options C speed-enhancement modules", + standard=True, + ext_modules = [ + Extension("simplejson._speedups", ["simplejson/_speedups.c"]), + ], +) + +setup( + name="simplejson", + version=VERSION, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + classifiers=CLASSIFIERS, + author="Bob Ippolito", + author_email="bob@redivi.com", + url="http://undefined.org/python/#simplejson", + license="MIT License", + packages=find_packages(exclude=['ez_setup']), + platforms=['any'], + test_suite="nose.collector", + zip_safe=True, + entry_points={ + 'paste.filter_app_factory': ['json = simplejson.jsonfilter:factory'], + }, + features={'speedups': speedups}, + cmdclass={'build_ext': ve_build_ext}, +) diff --git a/src/simplejson/simplejson.egg-info/PKG-INFO b/src/simplejson/simplejson.egg-info/PKG-INFO new file mode 100644 index 00000000..c202c5fa --- /dev/null +++ b/src/simplejson/simplejson.egg-info/PKG-INFO @@ -0,0 +1,29 @@ +Metadata-Version: 1.0 +Name: simplejson +Version: 1.7.1 +Summary: Simple, fast, extensible JSON encoder/decoder for Python +Home-page: http://undefined.org/python/#simplejson +Author: Bob Ippolito +Author-email: bob@redivi.com +License: MIT License +Description: + simplejson is a simple, fast, complete, correct and extensible + JSON encoder and decoder for Python 2.3+. It is + pure Python code with no dependencies, but includes an optional C + extension for a serious speed boost. + + simplejson was formerly known as simple_json, but changed its name to + comply with PEP 8 module naming guidelines. + + The encoder may be subclassed to provide serialization in any kind of + situation, without any special support by the objects to be serialized + (somewhat like pickle). + + The decoder can handle incoming JSON strings of any specified encoding + (UTF-8 by default). + +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/src/simplejson/simplejson.egg-info/SOURCES.txt b/src/simplejson/simplejson.egg-info/SOURCES.txt new file mode 100644 index 00000000..7e0139f0 --- /dev/null +++ b/src/simplejson/simplejson.egg-info/SOURCES.txt @@ -0,0 +1,46 @@ +LICENSE.txt +setup.cfg +setup.py +docs/class-simplejson.JSONDecoder.html +docs/class-simplejson.JSONEncoder.html +docs/class_to_source.js +docs/index.html +docs/layout.css +docs/module-index.html +docs/module-simplejson-index.html +docs/module-simplejson.html +docs/pudge.css +docs/rst.css +docs/simplejson/__init__.py.html +docs/simplejson/decoder.py.html +docs/simplejson/encoder.py.html +docs/simplejson/jsonfilter.py.html +docs/simplejson/scanner.py.html +ez_setup/README.txt +ez_setup/__init__.py +scripts/bench.sh +scripts/make_docs.py +scripts/prof.py +simplejson/__init__.py +simplejson/_speedups.c +simplejson/decoder.py +simplejson/encoder.py +simplejson/jsonfilter.py +simplejson/scanner.py +simplejson.egg-info/PKG-INFO +simplejson.egg-info/SOURCES.txt +simplejson.egg-info/dependency_links.txt +simplejson.egg-info/entry_points.txt +simplejson.egg-info/top_level.txt +simplejson.egg-info/zip-safe +simplejson/tests/__init__.py +simplejson/tests/test_attacks.py +simplejson/tests/test_dump.py +simplejson/tests/test_fail.py +simplejson/tests/test_indent.py +simplejson/tests/test_pass1.py +simplejson/tests/test_pass2.py +simplejson/tests/test_pass3.py +simplejson/tests/test_recursion.py +simplejson/tests/test_separators.py +simplejson/tests/test_unicode.py diff --git a/src/simplejson/simplejson.egg-info/dependency_links.txt b/src/simplejson/simplejson.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/simplejson/simplejson.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/src/simplejson/simplejson.egg-info/entry_points.txt b/src/simplejson/simplejson.egg-info/entry_points.txt new file mode 100644 index 00000000..e08f296b --- /dev/null +++ b/src/simplejson/simplejson.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[paste.filter_app_factory] +json = simplejson.jsonfilter:factory + diff --git a/src/simplejson/simplejson.egg-info/top_level.txt b/src/simplejson/simplejson.egg-info/top_level.txt new file mode 100644 index 00000000..322630ee --- /dev/null +++ b/src/simplejson/simplejson.egg-info/top_level.txt @@ -0,0 +1 @@ +simplejson diff --git a/src/simplejson/simplejson.egg-info/zip-safe b/src/simplejson/simplejson.egg-info/zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/simplejson/simplejson.egg-info/zip-safe @@ -0,0 +1 @@ + diff --git a/src/simplejson/simplejson/__init__.py b/src/simplejson/simplejson/__init__.py new file mode 100644 index 00000000..8dae51a7 --- /dev/null +++ b/src/simplejson/simplejson/__init__.py @@ -0,0 +1,287 @@ +r""" +A simple, fast, extensible JSON encoder and decoder + +JSON (JavaScript Object Notation) is a subset of +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data +interchange format. + +simplejson exposes an API familiar to uses of the standard library +marshal and pickle modules. + +Encoding basic Python object hierarchies:: + + >>> import simplejson + >>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) + '["foo", {"bar": ["baz", null, 1.0, 2]}]' + >>> print simplejson.dumps("\"foo\bar") + "\"foo\bar" + >>> print simplejson.dumps(u'\u1234') + "\u1234" + >>> print simplejson.dumps('\\') + "\\" + >>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) + {"a": 0, "b": 0, "c": 0} + >>> from StringIO import StringIO + >>> io = StringIO() + >>> simplejson.dump(['streaming API'], io) + >>> io.getvalue() + '["streaming API"]' + +Compact encoding:: + + >>> import simplejson + >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) + '[1,2,3,{"4":5,"6":7}]' + +Pretty printing:: + + >>> import simplejson + >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4) + { + "4": 5, + "6": 7 + } + +Decoding JSON:: + + >>> import simplejson + >>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') + [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] + >>> simplejson.loads('"\\"foo\\bar"') + u'"foo\x08ar' + >>> from StringIO import StringIO + >>> io = StringIO('["streaming API"]') + >>> simplejson.load(io) + [u'streaming API'] + +Specializing JSON object decoding:: + + >>> import simplejson + >>> def as_complex(dct): + ... if '__complex__' in dct: + ... return complex(dct['real'], dct['imag']) + ... return dct + ... + >>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}', + ... object_hook=as_complex) + (1+2j) + +Extending JSONEncoder:: + + >>> import simplejson + >>> class ComplexEncoder(simplejson.JSONEncoder): + ... def default(self, obj): + ... if isinstance(obj, complex): + ... return [obj.real, obj.imag] + ... return simplejson.JSONEncoder.default(self, obj) + ... + >>> dumps(2 + 1j, cls=ComplexEncoder) + '[2.0, 1.0]' + >>> ComplexEncoder().encode(2 + 1j) + '[2.0, 1.0]' + >>> list(ComplexEncoder().iterencode(2 + 1j)) + ['[', '2.0', ', ', '1.0', ']'] + + +Note that the JSON produced by this module's default settings +is a subset of YAML, so it may be used as a serializer for that as well. +""" +__version__ = '1.7.1' +__all__ = [ + 'dump', 'dumps', 'load', 'loads', + 'JSONDecoder', 'JSONEncoder', +] + +from decoder import JSONDecoder +from encoder import JSONEncoder + +_default_encoder = JSONEncoder( + skipkeys=False, + ensure_ascii=True, + check_circular=True, + allow_nan=True, + indent=None, + separators=None, + encoding='utf-8' +) + +def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, + allow_nan=True, cls=None, indent=None, separators=None, + encoding='utf-8', **kw): + """ + Serialize ``obj`` as a JSON formatted stream to ``fp`` (a + ``.write()``-supporting file-like object). + + If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) + will be skipped instead of raising a ``TypeError``. + + If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp`` + may be ``unicode`` instances, subject to normal Python ``str`` to + ``unicode`` coercion rules. Unless ``fp.write()`` explicitly + understands ``unicode`` (as in ``codecs.getwriter()``) this is likely + to cause an error. + + If ``check_circular`` is ``False``, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``OverflowError`` (or worse). + + If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) + in strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + + If ``indent`` is a non-negative integer, then JSON array elements and object + members will be pretty-printed with that indent level. An indent level + of 0 will only insert newlines. ``None`` is the most compact representation. + + If ``separators`` is an ``(item_separator, dict_separator)`` tuple + then it will be used instead of the default ``(', ', ': ')`` separators. + ``(',', ':')`` is the most compact JSON representation. + + ``encoding`` is the character encoding for str instances, default is UTF-8. + + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg. + """ + # cached encoder + if (skipkeys is False and ensure_ascii is True and + check_circular is True and allow_nan is True and + cls is None and indent is None and separators is None and + encoding == 'utf-8' and not kw): + iterable = _default_encoder.iterencode(obj) + else: + if cls is None: + cls = JSONEncoder + iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, + check_circular=check_circular, allow_nan=allow_nan, indent=indent, + separators=separators, encoding=encoding, **kw).iterencode(obj) + # could accelerate with writelines in some versions of Python, at + # a debuggability cost + for chunk in iterable: + fp.write(chunk) + + +def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, + allow_nan=True, cls=None, indent=None, separators=None, + encoding='utf-8', **kw): + """ + Serialize ``obj`` to a JSON formatted ``str``. + + If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) + will be skipped instead of raising a ``TypeError``. + + If ``ensure_ascii`` is ``False``, then the return value will be a + ``unicode`` instance subject to normal Python ``str`` to ``unicode`` + coercion rules instead of being escaped to an ASCII ``str``. + + If ``check_circular`` is ``False``, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``OverflowError`` (or worse). + + If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in + strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + + If ``indent`` is a non-negative integer, then JSON array elements and + object members will be pretty-printed with that indent level. An indent + level of 0 will only insert newlines. ``None`` is the most compact + representation. + + If ``separators`` is an ``(item_separator, dict_separator)`` tuple + then it will be used instead of the default ``(', ', ': ')`` separators. + ``(',', ':')`` is the most compact JSON representation. + + ``encoding`` is the character encoding for str instances, default is UTF-8. + + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg. + """ + # cached encoder + if (skipkeys is False and ensure_ascii is True and + check_circular is True and allow_nan is True and + cls is None and indent is None and separators is None and + encoding == 'utf-8' and not kw): + return _default_encoder.encode(obj) + if cls is None: + cls = JSONEncoder + return cls( + skipkeys=skipkeys, ensure_ascii=ensure_ascii, + check_circular=check_circular, allow_nan=allow_nan, indent=indent, + separators=separators, encoding=encoding, + **kw).encode(obj) + +_default_decoder = JSONDecoder(encoding=None, object_hook=None) + +def load(fp, encoding=None, cls=None, object_hook=None, **kw): + """ + Deserialize ``fp`` (a ``.read()``-supporting file-like object containing + a JSON document) to a Python object. + + If the contents of ``fp`` is encoded with an ASCII based encoding other + than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must + be specified. Encodings that are not ASCII based (such as UCS-2) are + not allowed, and should be wrapped with + ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` + object and passed to ``loads()`` + + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg. + """ + return loads(fp.read(), + encoding=encoding, cls=cls, object_hook=object_hook, **kw) + +def loads(s, encoding=None, cls=None, object_hook=None, **kw): + """ + Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON + document) to a Python object. + + If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding + other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name + must be specified. Encodings that are not ASCII based (such as UCS-2) + are not allowed and should be decoded to ``unicode`` first. + + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg. + """ + if cls is None and encoding is None and object_hook is None and not kw: + return _default_decoder.decode(s) + if cls is None: + cls = JSONDecoder + if object_hook is not None: + kw['object_hook'] = object_hook + return cls(encoding=encoding, **kw).decode(s) + +def read(s): + """ + json-py API compatibility hook. Use loads(s) instead. + """ + import warnings + warnings.warn("simplejson.loads(s) should be used instead of read(s)", + DeprecationWarning) + return loads(s) + +def write(obj): + """ + json-py API compatibility hook. Use dumps(s) instead. + """ + import warnings + warnings.warn("simplejson.dumps(s) should be used instead of write(s)", + DeprecationWarning) + return dumps(obj) + + diff --git a/src/simplejson/simplejson/_speedups.c b/src/simplejson/simplejson/_speedups.c new file mode 100644 index 00000000..8f290bb4 --- /dev/null +++ b/src/simplejson/simplejson/_speedups.c @@ -0,0 +1,215 @@ +#include "Python.h" +#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) +typedef int Py_ssize_t; +#define PY_SSIZE_T_MAX INT_MAX +#define PY_SSIZE_T_MIN INT_MIN +#endif + +static Py_ssize_t +ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars); +static PyObject * +ascii_escape_unicode(PyObject *pystr); +static PyObject * +ascii_escape_str(PyObject *pystr); +static PyObject * +py_encode_basestring_ascii(PyObject* self __attribute__((__unused__)), PyObject *pystr); +void init_speedups(void); + +#define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '/' && c != '"') + +#define MIN_EXPANSION 6 +#ifdef Py_UNICODE_WIDE +#define MAX_EXPANSION (2 * MIN_EXPANSION) +#else +#define MAX_EXPANSION MIN_EXPANSION +#endif + +static Py_ssize_t +ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars) { + Py_UNICODE x; + output[chars++] = '\\'; + switch (c) { + case '/': output[chars++] = (char)c; break; + case '\\': output[chars++] = (char)c; break; + case '"': output[chars++] = (char)c; break; + case '\b': output[chars++] = 'b'; break; + case '\f': output[chars++] = 'f'; break; + case '\n': output[chars++] = 'n'; break; + case '\r': output[chars++] = 'r'; break; + case '\t': output[chars++] = 't'; break; + default: +#ifdef Py_UNICODE_WIDE + if (c >= 0x10000) { + /* UTF-16 surrogate pair */ + Py_UNICODE v = c - 0x10000; + c = 0xd800 | ((v >> 10) & 0x3ff); + output[chars++] = 'u'; + x = (c & 0xf000) >> 12; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x0f00) >> 8; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x00f0) >> 4; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x000f); + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + c = 0xdc00 | (v & 0x3ff); + output[chars++] = '\\'; + } +#endif + output[chars++] = 'u'; + x = (c & 0xf000) >> 12; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x0f00) >> 8; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x00f0) >> 4; + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + x = (c & 0x000f); + output[chars++] = (x < 10) ? '0' + x : 'a' + (x - 10); + } + return chars; +} + +static PyObject * +ascii_escape_unicode(PyObject *pystr) { + Py_ssize_t i; + Py_ssize_t input_chars; + Py_ssize_t output_size; + Py_ssize_t chars; + PyObject *rval; + char *output; + Py_UNICODE *input_unicode; + + input_chars = PyUnicode_GET_SIZE(pystr); + input_unicode = PyUnicode_AS_UNICODE(pystr); + /* One char input can be up to 6 chars output, estimate 4 of these */ + output_size = 2 + (MIN_EXPANSION * 4) + input_chars; + rval = PyString_FromStringAndSize(NULL, output_size); + if (rval == NULL) { + return NULL; + } + output = PyString_AS_STRING(rval); + chars = 0; + output[chars++] = '"'; + for (i = 0; i < input_chars; i++) { + Py_UNICODE c = input_unicode[i]; + if (S_CHAR(c)) { + output[chars++] = (char)c; + } else { + chars = ascii_escape_char(c, output, chars); + } + if (output_size - chars < (1 + MAX_EXPANSION)) { + /* There's more than four, so let's resize by a lot */ + output_size *= 2; + /* This is an upper bound */ + if (output_size > 2 + (input_chars * MAX_EXPANSION)) { + output_size = 2 + (input_chars * MAX_EXPANSION); + } + if (_PyString_Resize(&rval, output_size) == -1) { + return NULL; + } + output = PyString_AS_STRING(rval); + } + } + output[chars++] = '"'; + if (_PyString_Resize(&rval, chars) == -1) { + return NULL; + } + return rval; +} + +static PyObject * +ascii_escape_str(PyObject *pystr) { + Py_ssize_t i; + Py_ssize_t input_chars; + Py_ssize_t output_size; + Py_ssize_t chars; + PyObject *rval; + char *output; + char *input_str; + + input_chars = PyString_GET_SIZE(pystr); + input_str = PyString_AS_STRING(pystr); + /* One char input can be up to 6 chars output, estimate 4 of these */ + output_size = 2 + (MIN_EXPANSION * 4) + input_chars; + rval = PyString_FromStringAndSize(NULL, output_size); + if (rval == NULL) { + return NULL; + } + output = PyString_AS_STRING(rval); + chars = 0; + output[chars++] = '"'; + for (i = 0; i < input_chars; i++) { + Py_UNICODE c = (Py_UNICODE)input_str[i]; + if (S_CHAR(c)) { + output[chars++] = (char)c; + } else if (c > 0x7F) { + /* We hit a non-ASCII character, bail to unicode mode */ + PyObject *uni; + Py_DECREF(rval); + uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict"); + if (uni == NULL) { + return NULL; + } + rval = ascii_escape_unicode(uni); + Py_DECREF(uni); + return rval; + } else { + chars = ascii_escape_char(c, output, chars); + } + /* An ASCII char can't possibly expand to a surrogate! */ + if (output_size - chars < (1 + MIN_EXPANSION)) { + /* There's more than four, so let's resize by a lot */ + output_size *= 2; + if (output_size > 2 + (input_chars * MIN_EXPANSION)) { + output_size = 2 + (input_chars * MIN_EXPANSION); + } + if (_PyString_Resize(&rval, output_size) == -1) { + return NULL; + } + output = PyString_AS_STRING(rval); + } + } + output[chars++] = '"'; + if (_PyString_Resize(&rval, chars) == -1) { + return NULL; + } + return rval; +} + +PyDoc_STRVAR(pydoc_encode_basestring_ascii, + "encode_basestring_ascii(basestring) -> str\n" + "\n" + "..." +); + +static PyObject * +py_encode_basestring_ascii(PyObject* self __attribute__((__unused__)), PyObject *pystr) { + /* METH_O */ + if (PyString_Check(pystr)) { + return ascii_escape_str(pystr); + } else if (PyUnicode_Check(pystr)) { + return ascii_escape_unicode(pystr); + } + PyErr_SetString(PyExc_TypeError, "first argument must be a string"); + return NULL; +} + +#define DEFN(n, k) \ + { \ + #n, \ + (PyCFunction)py_ ##n, \ + k, \ + pydoc_ ##n \ + } +static PyMethodDef speedups_methods[] = { + DEFN(encode_basestring_ascii, METH_O), + {} +}; +#undef DEFN + +void +init_speedups(void) +{ + PyObject *m; + m = Py_InitModule4("_speedups", speedups_methods, NULL, NULL, PYTHON_API_VERSION); +} diff --git a/src/simplejson/simplejson/decoder.py b/src/simplejson/simplejson/decoder.py new file mode 100644 index 00000000..a1b53b2a --- /dev/null +++ b/src/simplejson/simplejson/decoder.py @@ -0,0 +1,273 @@ +""" +Implementation of JSONDecoder +""" +import re + +from simplejson.scanner import Scanner, pattern + +FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL + +def _floatconstants(): + import struct + import sys + _BYTES = '7FF80000000000007FF0000000000000'.decode('hex') + if sys.byteorder != 'big': + _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] + nan, inf = struct.unpack('dd', _BYTES) + return nan, inf, -inf + +NaN, PosInf, NegInf = _floatconstants() + +def linecol(doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + if lineno == 1: + colno = pos + else: + colno = pos - doc.rindex('\n', 0, pos) + return lineno, colno + +def errmsg(msg, doc, pos, end=None): + lineno, colno = linecol(doc, pos) + if end is None: + return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos) + endlineno, endcolno = linecol(doc, end) + return '%s: line %d column %d - line %d column %d (char %d - %d)' % ( + msg, lineno, colno, endlineno, endcolno, pos, end) + +_CONSTANTS = { + '-Infinity': NegInf, + 'Infinity': PosInf, + 'NaN': NaN, + 'true': True, + 'false': False, + 'null': None, +} + +def JSONConstant(match, context, c=_CONSTANTS): + return c[match.group(0)], None +pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant) + +def JSONNumber(match, context): + match = JSONNumber.regex.match(match.string, *match.span()) + integer, frac, exp = match.groups() + if frac or exp: + res = float(integer + (frac or '') + (exp or '')) + else: + res = int(integer) + return res, None +pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber) + +STRINGCHUNK = re.compile(r'(.*?)(["\\])', FLAGS) +BACKSLASH = { + '"': u'"', '\\': u'\\', '/': u'/', + 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', +} + +DEFAULT_ENCODING = "utf-8" + +def scanstring(s, end, encoding=None, _b=BACKSLASH, _m=STRINGCHUNK.match): + if encoding is None: + encoding = DEFAULT_ENCODING + chunks = [] + _append = chunks.append + begin = end - 1 + while 1: + chunk = _m(s, end) + if chunk is None: + raise ValueError( + errmsg("Unterminated string starting at", s, begin)) + end = chunk.end() + content, terminator = chunk.groups() + if content: + if not isinstance(content, unicode): + content = unicode(content, encoding) + _append(content) + if terminator == '"': + break + try: + esc = s[end] + except IndexError: + raise ValueError( + errmsg("Unterminated string starting at", s, begin)) + if esc != 'u': + try: + m = _b[esc] + except KeyError: + raise ValueError( + errmsg("Invalid \\escape: %r" % (esc,), s, end)) + end += 1 + else: + esc = s[end + 1:end + 5] + try: + m = unichr(int(esc, 16)) + if len(esc) != 4 or not esc.isalnum(): + raise ValueError + except ValueError: + raise ValueError(errmsg("Invalid \\uXXXX escape", s, end)) + end += 5 + _append(m) + return u''.join(chunks), end + +def JSONString(match, context): + encoding = getattr(context, 'encoding', None) + return scanstring(match.string, match.end(), encoding) +pattern(r'"')(JSONString) + +WHITESPACE = re.compile(r'\s*', FLAGS) + +def JSONObject(match, context, _w=WHITESPACE.match): + pairs = {} + s = match.string + end = _w(s, match.end()).end() + nextchar = s[end:end + 1] + # trivial empty object + if nextchar == '}': + return pairs, end + 1 + if nextchar != '"': + raise ValueError(errmsg("Expecting property name", s, end)) + end += 1 + encoding = getattr(context, 'encoding', None) + iterscan = JSONScanner.iterscan + while True: + key, end = scanstring(s, end, encoding) + end = _w(s, end).end() + if s[end:end + 1] != ':': + raise ValueError(errmsg("Expecting : delimiter", s, end)) + end = _w(s, end + 1).end() + try: + value, end = iterscan(s, idx=end, context=context).next() + except StopIteration: + raise ValueError(errmsg("Expecting object", s, end)) + pairs[key] = value + end = _w(s, end).end() + nextchar = s[end:end + 1] + end += 1 + if nextchar == '}': + break + if nextchar != ',': + raise ValueError(errmsg("Expecting , delimiter", s, end - 1)) + end = _w(s, end).end() + nextchar = s[end:end + 1] + end += 1 + if nextchar != '"': + raise ValueError(errmsg("Expecting property name", s, end - 1)) + object_hook = getattr(context, 'object_hook', None) + if object_hook is not None: + pairs = object_hook(pairs) + return pairs, end +pattern(r'{')(JSONObject) + +def JSONArray(match, context, _w=WHITESPACE.match): + values = [] + s = match.string + end = _w(s, match.end()).end() + # look-ahead for trivial empty array + nextchar = s[end:end + 1] + if nextchar == ']': + return values, end + 1 + iterscan = JSONScanner.iterscan + while True: + try: + value, end = iterscan(s, idx=end, context=context).next() + except StopIteration: + raise ValueError(errmsg("Expecting object", s, end)) + values.append(value) + end = _w(s, end).end() + nextchar = s[end:end + 1] + end += 1 + if nextchar == ']': + break + if nextchar != ',': + raise ValueError(errmsg("Expecting , delimiter", s, end)) + end = _w(s, end).end() + return values, end +pattern(r'\[')(JSONArray) + +ANYTHING = [ + JSONObject, + JSONArray, + JSONString, + JSONConstant, + JSONNumber, +] + +JSONScanner = Scanner(ANYTHING) + +class JSONDecoder(object): + """ + Simple JSON decoder + + Performs the following translations in decoding: + + +---------------+-------------------+ + | JSON | Python | + +===============+===================+ + | object | dict | + +---------------+-------------------+ + | array | list | + +---------------+-------------------+ + | string | unicode | + +---------------+-------------------+ + | number (int) | int, long | + +---------------+-------------------+ + | number (real) | float | + +---------------+-------------------+ + | true | True | + +---------------+-------------------+ + | false | False | + +---------------+-------------------+ + | null | None | + +---------------+-------------------+ + + It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as + their corresponding ``float`` values, which is outside the JSON spec. + """ + + _scanner = Scanner(ANYTHING) + __all__ = ['__init__', 'decode', 'raw_decode'] + + def __init__(self, encoding=None, object_hook=None): + """ + ``encoding`` determines the encoding used to interpret any ``str`` + objects decoded by this instance (utf-8 by default). It has no + effect when decoding ``unicode`` objects. + + Note that currently only encodings that are a superset of ASCII work, + strings of other encodings should be passed in as ``unicode``. + + ``object_hook``, if specified, will be called with the result + of every JSON object decoded and its return value will be used in + place of the given ``dict``. This can be used to provide custom + deserializations (e.g. to support JSON-RPC class hinting). + """ + self.encoding = encoding + self.object_hook = object_hook + + def decode(self, s, _w=WHITESPACE.match): + """ + Return the Python representation of ``s`` (a ``str`` or ``unicode`` + instance containing a JSON document) + """ + obj, end = self.raw_decode(s, idx=_w(s, 0).end()) + end = _w(s, end).end() + if end != len(s): + raise ValueError(errmsg("Extra data", s, end, len(s))) + return obj + + def raw_decode(self, s, **kw): + """ + Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning + with a JSON document) and return a 2-tuple of the Python + representation and the index in ``s`` where the document ended. + + This can be used to decode a JSON document from a string that may + have extraneous data at the end. + """ + kw.setdefault('context', self) + try: + obj, end = self._scanner.iterscan(s, **kw).next() + except StopIteration: + raise ValueError("No JSON object could be decoded") + return obj, end + +__all__ = ['JSONDecoder'] diff --git a/src/simplejson/simplejson/encoder.py b/src/simplejson/simplejson/encoder.py new file mode 100644 index 00000000..eec9c7ff --- /dev/null +++ b/src/simplejson/simplejson/encoder.py @@ -0,0 +1,371 @@ +""" +Implementation of JSONEncoder +""" +import re +try: + from simplejson import _speedups +except ImportError: + _speedups = None + +ESCAPE = re.compile(r'[\x00-\x19\\"\b\f\n\r\t]') +ESCAPE_ASCII = re.compile(r'([\\"/]|[^\ -~])') +ESCAPE_DCT = { + # escape all forward slashes to prevent attack + '/': '\\/', + '\\': '\\\\', + '"': '\\"', + '\b': '\\b', + '\f': '\\f', + '\n': '\\n', + '\r': '\\r', + '\t': '\\t', +} +for i in range(0x20): + ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) + +# assume this produces an infinity on all machines (probably not guaranteed) +INFINITY = float('1e66666') + +def floatstr(o, allow_nan=True): + # Check for specials. Note that this type of test is processor- and/or + # platform-specific, so do tests which don't depend on the internals. + + if o != o: + text = 'NaN' + elif o == INFINITY: + text = 'Infinity' + elif o == -INFINITY: + text = '-Infinity' + else: + return str(o) + + if not allow_nan: + raise ValueError("Out of range float values are not JSON compliant: %r" + % (o,)) + + return text + + +def encode_basestring(s): + """ + Return a JSON representation of a Python string + """ + def replace(match): + return ESCAPE_DCT[match.group(0)] + return '"' + ESCAPE.sub(replace, s) + '"' + +def encode_basestring_ascii(s): + def replace(match): + s = match.group(0) + try: + return ESCAPE_DCT[s] + except KeyError: + n = ord(s) + if n < 0x10000: + return '\\u%04x' % (n,) + else: + # surrogate pair + n -= 0x10000 + s1 = 0xd800 | ((n >> 10) & 0x3ff) + s2 = 0xdc00 | (n & 0x3ff) + return '\\u%04x\\u%04x' % (s1, s2) + return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' + +try: + encode_basestring_ascii = _speedups.encode_basestring_ascii + _need_utf8 = True +except AttributeError: + _need_utf8 = False + +class JSONEncoder(object): + """ + Extensible JSON encoder for Python data structures. + + Supports the following objects and types by default: + + +-------------------+---------------+ + | Python | JSON | + +===================+===============+ + | dict | object | + +-------------------+---------------+ + | list, tuple | array | + +-------------------+---------------+ + | str, unicode | string | + +-------------------+---------------+ + | int, long, float | number | + +-------------------+---------------+ + | True | true | + +-------------------+---------------+ + | False | false | + +-------------------+---------------+ + | None | null | + +-------------------+---------------+ + + To extend this to recognize other objects, subclass and implement a + ``.default()`` method with another method that returns a serializable + object for ``o`` if possible, otherwise it should call the superclass + implementation (to raise ``TypeError``). + """ + __all__ = ['__init__', 'default', 'encode', 'iterencode'] + item_separator = ', ' + key_separator = ': ' + def __init__(self, skipkeys=False, ensure_ascii=True, + check_circular=True, allow_nan=True, sort_keys=False, + indent=None, separators=None, encoding='utf-8'): + """ + Constructor for JSONEncoder, with sensible defaults. + + If skipkeys is False, then it is a TypeError to attempt + encoding of keys that are not str, int, long, float or None. If + skipkeys is True, such items are simply skipped. + + If ensure_ascii is True, the output is guaranteed to be str + objects with all incoming unicode characters escaped. If + ensure_ascii is false, the output will be unicode object. + + If check_circular is True, then lists, dicts, and custom encoded + objects will be checked for circular references during encoding to + prevent an infinite recursion (which would cause an OverflowError). + Otherwise, no such check takes place. + + If allow_nan is True, then NaN, Infinity, and -Infinity will be + encoded as such. This behavior is not JSON specification compliant, + but is consistent with most JavaScript based encoders and decoders. + Otherwise, it will be a ValueError to encode such floats. + + If sort_keys is True, then the output of dictionaries will be + sorted by key; this is useful for regression tests to ensure + that JSON serializations can be compared on a day-to-day basis. + + If indent is a non-negative integer, then JSON array + elements and object members will be pretty-printed with that + indent level. An indent level of 0 will only insert newlines. + None is the most compact representation. + + If specified, separators should be a (item_separator, key_separator) + tuple. The default is (', ', ': '). To get the most compact JSON + representation you should specify (',', ':') to eliminate whitespace. + + If encoding is not None, then all input strings will be + transformed into unicode using that encoding prior to JSON-encoding. + The default is UTF-8. + """ + + self.skipkeys = skipkeys + self.ensure_ascii = ensure_ascii + self.check_circular = check_circular + self.allow_nan = allow_nan + self.sort_keys = sort_keys + self.indent = indent + self.current_indent_level = 0 + if separators is not None: + self.item_separator, self.key_separator = separators + self.encoding = encoding + + def _newline_indent(self): + return '\n' + (' ' * (self.indent * self.current_indent_level)) + + def _iterencode_list(self, lst, markers=None): + if not lst: + yield '[]' + return + if markers is not None: + markerid = id(lst) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = lst + yield '[' + if self.indent is not None: + self.current_indent_level += 1 + newline_indent = self._newline_indent() + separator = self.item_separator + newline_indent + yield newline_indent + else: + newline_indent = None + separator = self.item_separator + first = True + for value in lst: + if first: + first = False + else: + yield separator + for chunk in self._iterencode(value, markers): + yield chunk + if newline_indent is not None: + self.current_indent_level -= 1 + yield self._newline_indent() + yield ']' + if markers is not None: + del markers[markerid] + + def _iterencode_dict(self, dct, markers=None): + if not dct: + yield '{}' + return + if markers is not None: + markerid = id(dct) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = dct + yield '{' + key_separator = self.key_separator + if self.indent is not None: + self.current_indent_level += 1 + newline_indent = self._newline_indent() + item_separator = self.item_separator + newline_indent + yield newline_indent + else: + newline_indent = None + item_separator = self.item_separator + first = True + if self.ensure_ascii: + encoder = encode_basestring_ascii + else: + encoder = encode_basestring + allow_nan = self.allow_nan + if self.sort_keys: + keys = dct.keys() + keys.sort() + items = [(k, dct[k]) for k in keys] + else: + items = dct.iteritems() + _encoding = self.encoding + _do_decode = (_encoding is not None + and not (_need_utf8 and _encoding == 'utf-8')) + for key, value in items: + if isinstance(key, str): + if _do_decode: + key = key.decode(_encoding) + elif isinstance(key, basestring): + pass + # JavaScript is weakly typed for these, so it makes sense to + # also allow them. Many encoders seem to do something like this. + elif isinstance(key, float): + key = floatstr(key, allow_nan) + elif isinstance(key, (int, long)): + key = str(key) + elif key is True: + key = 'true' + elif key is False: + key = 'false' + elif key is None: + key = 'null' + elif self.skipkeys: + continue + else: + raise TypeError("key %r is not a string" % (key,)) + if first: + first = False + else: + yield item_separator + yield encoder(key) + yield key_separator + for chunk in self._iterencode(value, markers): + yield chunk + if newline_indent is not None: + self.current_indent_level -= 1 + yield self._newline_indent() + yield '}' + if markers is not None: + del markers[markerid] + + def _iterencode(self, o, markers=None): + if isinstance(o, basestring): + if self.ensure_ascii: + encoder = encode_basestring_ascii + else: + encoder = encode_basestring + _encoding = self.encoding + if (_encoding is not None and isinstance(o, str) + and not (_need_utf8 and _encoding == 'utf-8')): + o = o.decode(_encoding) + yield encoder(o) + elif o is None: + yield 'null' + elif o is True: + yield 'true' + elif o is False: + yield 'false' + elif isinstance(o, (int, long)): + yield str(o) + elif isinstance(o, float): + yield floatstr(o, self.allow_nan) + elif isinstance(o, (list, tuple)): + for chunk in self._iterencode_list(o, markers): + yield chunk + elif isinstance(o, dict): + for chunk in self._iterencode_dict(o, markers): + yield chunk + else: + if markers is not None: + markerid = id(o) + if markerid in markers: + raise ValueError("Circular reference detected") + markers[markerid] = o + for chunk in self._iterencode_default(o, markers): + yield chunk + if markers is not None: + del markers[markerid] + + def _iterencode_default(self, o, markers=None): + newobj = self.default(o) + return self._iterencode(newobj, markers) + + def default(self, o): + """ + Implement this method in a subclass such that it returns + a serializable object for ``o``, or calls the base implementation + (to raise a ``TypeError``). + + For example, to support arbitrary iterators, you could + implement default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + return JSONEncoder.default(self, o) + """ + raise TypeError("%r is not JSON serializable" % (o,)) + + def encode(self, o): + """ + Return a JSON string representation of a Python data structure. + + >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) + '{"foo":["bar", "baz"]}' + """ + # This is for extremely simple cases and benchmarks... + if isinstance(o, basestring): + if isinstance(o, str): + _encoding = self.encoding + if (_encoding is not None + and not (_encoding == 'utf-8' and _need_utf8)): + o = o.decode(_encoding) + return encode_basestring_ascii(o) + # This doesn't pass the iterator directly to ''.join() because it + # sucks at reporting exceptions. It's going to do this internally + # anyway because it uses PySequence_Fast or similar. + chunks = list(self.iterencode(o)) + return ''.join(chunks) + + def iterencode(self, o): + """ + Encode the given object and yield each string + representation as available. + + For example:: + + for chunk in JSONEncoder().iterencode(bigobject): + mysocket.write(chunk) + """ + if self.check_circular: + markers = {} + else: + markers = None + return self._iterencode(o, markers) + +__all__ = ['JSONEncoder'] diff --git a/src/simplejson/simplejson/jsonfilter.py b/src/simplejson/simplejson/jsonfilter.py new file mode 100644 index 00000000..01ca21df --- /dev/null +++ b/src/simplejson/simplejson/jsonfilter.py @@ -0,0 +1,40 @@ +import simplejson +import cgi + +class JSONFilter(object): + def __init__(self, app, mime_type='text/x-json'): + self.app = app + self.mime_type = mime_type + + def __call__(self, environ, start_response): + # Read JSON POST input to jsonfilter.json if matching mime type + response = {'status': '200 OK', 'headers': []} + def json_start_response(status, headers): + response['status'] = status + response['headers'].extend(headers) + environ['jsonfilter.mime_type'] = self.mime_type + if environ.get('REQUEST_METHOD', '') == 'POST': + if environ.get('CONTENT_TYPE', '') == self.mime_type: + args = [_ for _ in [environ.get('CONTENT_LENGTH')] if _] + data = environ['wsgi.input'].read(*map(int, args)) + environ['jsonfilter.json'] = simplejson.loads(data) + res = simplejson.dumps(self.app(environ, json_start_response)) + jsonp = cgi.parse_qs(environ.get('QUERY_STRING', '')).get('jsonp') + if jsonp: + content_type = 'text/javascript' + res = ''.join(jsonp + ['(', res, ')']) + elif 'Opera' in environ.get('HTTP_USER_AGENT', ''): + # Opera has bunk XMLHttpRequest support for most mime types + content_type = 'text/plain' + else: + content_type = self.mime_type + headers = [ + ('Content-type', content_type), + ('Content-length', len(res)), + ] + headers.extend(response['headers']) + start_response(response['status'], headers) + return [res] + +def factory(app, global_conf, **kw): + return JSONFilter(app, **kw) diff --git a/src/simplejson/simplejson/scanner.py b/src/simplejson/simplejson/scanner.py new file mode 100644 index 00000000..64f4999f --- /dev/null +++ b/src/simplejson/simplejson/scanner.py @@ -0,0 +1,63 @@ +""" +Iterator based sre token scanner +""" +import sre_parse, sre_compile, sre_constants +from sre_constants import BRANCH, SUBPATTERN +from re import VERBOSE, MULTILINE, DOTALL +import re + +__all__ = ['Scanner', 'pattern'] + +FLAGS = (VERBOSE | MULTILINE | DOTALL) +class Scanner(object): + def __init__(self, lexicon, flags=FLAGS): + self.actions = [None] + # combine phrases into a compound pattern + s = sre_parse.Pattern() + s.flags = flags + p = [] + for idx, token in enumerate(lexicon): + phrase = token.pattern + try: + subpattern = sre_parse.SubPattern(s, + [(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))]) + except sre_constants.error: + raise + p.append(subpattern) + self.actions.append(token) + + p = sre_parse.SubPattern(s, [(BRANCH, (None, p))]) + self.scanner = sre_compile.compile(p) + + + def iterscan(self, string, idx=0, context=None): + """ + Yield match, end_idx for each match + """ + match = self.scanner.scanner(string, idx).match + actions = self.actions + lastend = idx + end = len(string) + while True: + m = match() + if m is None: + break + matchbegin, matchend = m.span() + if lastend == matchend: + break + action = actions[m.lastindex] + if action is not None: + rval, next_pos = action(m, context) + if next_pos is not None and next_pos != matchend: + # "fast forward" the scanner + matchend = next_pos + match = self.scanner.scanner(string, matchend).match + yield rval, matchend + lastend = matchend + +def pattern(pattern, flags=FLAGS): + def decorator(fn): + fn.pattern = pattern + fn.regex = re.compile(pattern, flags) + return fn + return decorator diff --git a/src/simplejson/simplejson/tests/__init__.py b/src/simplejson/simplejson/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/simplejson/simplejson/tests/test_attacks.py b/src/simplejson/simplejson/tests/test_attacks.py new file mode 100644 index 00000000..8ecfed8f --- /dev/null +++ b/src/simplejson/simplejson/tests/test_attacks.py @@ -0,0 +1,6 @@ +def test_script_close_attack(): + import simplejson + res = simplejson.dumps('') + assert '' not in res + res = simplejson.dumps(simplejson.loads('""')) + assert '' not in res diff --git a/src/simplejson/simplejson/tests/test_dump.py b/src/simplejson/simplejson/tests/test_dump.py new file mode 100644 index 00000000..b4e236e5 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_dump.py @@ -0,0 +1,10 @@ +from cStringIO import StringIO +import simplejson as S + +def test_dump(): + sio = StringIO() + S.dump({}, sio) + assert sio.getvalue() == '{}' + +def test_dumps(): + assert S.dumps({}) == '{}' diff --git a/src/simplejson/simplejson/tests/test_fail.py b/src/simplejson/simplejson/tests/test_fail.py new file mode 100644 index 00000000..a99d9c40 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_fail.py @@ -0,0 +1,70 @@ +# Fri Dec 30 18:57:26 2005 +JSONDOCS = [ + # http://json.org/JSON_checker/test/fail1.json + '"A JSON payload should be an object or array, not a string."', + # http://json.org/JSON_checker/test/fail2.json + '["Unclosed array"', + # http://json.org/JSON_checker/test/fail3.json + '{unquoted_key: "keys must be quoted}', + # http://json.org/JSON_checker/test/fail4.json + '["extra comma",]', + # http://json.org/JSON_checker/test/fail5.json + '["double extra comma",,]', + # http://json.org/JSON_checker/test/fail6.json + '[ , "<-- missing value"]', + # http://json.org/JSON_checker/test/fail7.json + '["Comma after the close"],', + # http://json.org/JSON_checker/test/fail8.json + '["Extra close"]]', + # http://json.org/JSON_checker/test/fail9.json + '{"Extra comma": true,}', + # http://json.org/JSON_checker/test/fail10.json + '{"Extra value after close": true} "misplaced quoted value"', + # http://json.org/JSON_checker/test/fail11.json + '{"Illegal expression": 1 + 2}', + # http://json.org/JSON_checker/test/fail12.json + '{"Illegal invocation": alert()}', + # http://json.org/JSON_checker/test/fail13.json + '{"Numbers cannot have leading zeroes": 013}', + # http://json.org/JSON_checker/test/fail14.json + '{"Numbers cannot be hex": 0x14}', + # http://json.org/JSON_checker/test/fail15.json + '["Illegal backslash escape: \\x15"]', + # http://json.org/JSON_checker/test/fail16.json + '["Illegal backslash escape: \\\'"]', + # http://json.org/JSON_checker/test/fail17.json + '["Illegal backslash escape: \\017"]', + # http://json.org/JSON_checker/test/fail18.json + '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', + # http://json.org/JSON_checker/test/fail19.json + '{"Missing colon" null}', + # http://json.org/JSON_checker/test/fail20.json + '{"Double colon":: null}', + # http://json.org/JSON_checker/test/fail21.json + '{"Comma instead of colon", null}', + # http://json.org/JSON_checker/test/fail22.json + '["Colon instead of comma": false]', + # http://json.org/JSON_checker/test/fail23.json + '["Bad value", truth]', + # http://json.org/JSON_checker/test/fail24.json + "['single quote']", +] + +SKIPS = { + 1: "why not have a string payload?", + 18: "spec doesn't specify any nesting limitations", +} + +def test_failures(): + import simplejson + for idx, doc in enumerate(JSONDOCS): + idx = idx + 1 + if idx in SKIPS: + simplejson.loads(doc) + continue + try: + simplejson.loads(doc) + except ValueError: + pass + else: + assert False, "Expected failure for fail%d.json: %r" % (idx, doc) diff --git a/src/simplejson/simplejson/tests/test_indent.py b/src/simplejson/simplejson/tests/test_indent.py new file mode 100644 index 00000000..47dd4dc2 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_indent.py @@ -0,0 +1,41 @@ + + + +def test_indent(): + import simplejson + import textwrap + + h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth', + {'nifty': 87}, {'field': 'yes', 'morefield': False} ] + + expect = textwrap.dedent("""\ + [ + [ + "blorpie" + ], + [ + "whoops" + ], + [], + "d-shtaeou", + "d-nthiouh", + "i-vhbjkhnth", + { + "nifty": 87 + }, + { + "field": "yes", + "morefield": false + } + ]""") + + + d1 = simplejson.dumps(h) + d2 = simplejson.dumps(h, indent=2, sort_keys=True, separators=(',', ': ')) + + h1 = simplejson.loads(d1) + h2 = simplejson.loads(d2) + + assert h1 == h + assert h2 == h + assert d2 == expect diff --git a/src/simplejson/simplejson/tests/test_pass1.py b/src/simplejson/simplejson/tests/test_pass1.py new file mode 100644 index 00000000..4eda1925 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_pass1.py @@ -0,0 +1,72 @@ +# from http://json.org/JSON_checker/test/pass1.json +JSON = r''' +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E666, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ], + "compact": [1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066 + + +,"rosebud"] +''' + +def test_parse(): + # test in/out equivalence and parsing + import simplejson + res = simplejson.loads(JSON) + out = simplejson.dumps(res) + assert res == simplejson.loads(out) + try: + simplejson.dumps(res, allow_nan=False) + except ValueError: + pass + else: + assert False, "23456789012E666 should be out of range" diff --git a/src/simplejson/simplejson/tests/test_pass2.py b/src/simplejson/simplejson/tests/test_pass2.py new file mode 100644 index 00000000..ae74abbf --- /dev/null +++ b/src/simplejson/simplejson/tests/test_pass2.py @@ -0,0 +1,11 @@ +# from http://json.org/JSON_checker/test/pass2.json +JSON = r''' +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] +''' + +def test_parse(): + # test in/out equivalence and parsing + import simplejson + res = simplejson.loads(JSON) + out = simplejson.dumps(res) + assert res == simplejson.loads(out) diff --git a/src/simplejson/simplejson/tests/test_pass3.py b/src/simplejson/simplejson/tests/test_pass3.py new file mode 100644 index 00000000..d94893ff --- /dev/null +++ b/src/simplejson/simplejson/tests/test_pass3.py @@ -0,0 +1,16 @@ +# from http://json.org/JSON_checker/test/pass3.json +JSON = r''' +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} +''' + +def test_parse(): + # test in/out equivalence and parsing + import simplejson + res = simplejson.loads(JSON) + out = simplejson.dumps(res) + assert res == simplejson.loads(out) diff --git a/src/simplejson/simplejson/tests/test_recursion.py b/src/simplejson/simplejson/tests/test_recursion.py new file mode 100644 index 00000000..756b0661 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_recursion.py @@ -0,0 +1,62 @@ +import simplejson + +def test_listrecursion(): + x = [] + x.append(x) + try: + simplejson.dumps(x) + except ValueError: + pass + else: + assert False, "didn't raise ValueError on list recursion" + x = [] + y = [x] + x.append(y) + try: + simplejson.dumps(x) + except ValueError: + pass + else: + assert False, "didn't raise ValueError on alternating list recursion" + y = [] + x = [y, y] + # ensure that the marker is cleared + simplejson.dumps(x) + +def test_dictrecursion(): + x = {} + x["test"] = x + try: + simplejson.dumps(x) + except ValueError: + pass + else: + assert False, "didn't raise ValueError on dict recursion" + x = {} + y = {"a": x, "b": x} + # ensure that the marker is cleared + simplejson.dumps(x) + +class TestObject: + pass + +class RecursiveJSONEncoder(simplejson.JSONEncoder): + recurse = False + def default(self, o): + if o is TestObject: + if self.recurse: + return [TestObject] + else: + return 'TestObject' + simplejson.JSONEncoder.default(o) + +def test_defaultrecursion(): + enc = RecursiveJSONEncoder() + assert enc.encode(TestObject) == '"TestObject"' + enc.recurse = True + try: + enc.encode(TestObject) + except ValueError: + pass + else: + assert False, "didn't raise ValueError on default recursion" diff --git a/src/simplejson/simplejson/tests/test_separators.py b/src/simplejson/simplejson/tests/test_separators.py new file mode 100644 index 00000000..a6153547 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_separators.py @@ -0,0 +1,41 @@ + + + +def test_separators(): + import simplejson + import textwrap + + h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth', + {'nifty': 87}, {'field': 'yes', 'morefield': False} ] + + expect = textwrap.dedent("""\ + [ + [ + "blorpie" + ] , + [ + "whoops" + ] , + [] , + "d-shtaeou" , + "d-nthiouh" , + "i-vhbjkhnth" , + { + "nifty" : 87 + } , + { + "field" : "yes" , + "morefield" : false + } + ]""") + + + d1 = simplejson.dumps(h) + d2 = simplejson.dumps(h, indent=2, sort_keys=True, separators=(' ,', ' : ')) + + h1 = simplejson.loads(d1) + h2 = simplejson.loads(d2) + + assert h1 == h + assert h2 == h + assert d2 == expect diff --git a/src/simplejson/simplejson/tests/test_unicode.py b/src/simplejson/simplejson/tests/test_unicode.py new file mode 100644 index 00000000..88d09393 --- /dev/null +++ b/src/simplejson/simplejson/tests/test_unicode.py @@ -0,0 +1,16 @@ +import simplejson as S + +def test_encoding1(): + encoder = S.JSONEncoder(encoding='utf-8') + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + s = u.encode('utf-8') + ju = encoder.encode(u) + js = encoder.encode(s) + assert ju == js + +def test_encoding2(): + u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' + s = u.encode('utf-8') + ju = S.dumps(u, encoding='utf-8') + js = S.dumps(s, encoding='utf-8') + assert ju == js -- 2.45.2