16 #pragma GCC diagnostic push
17 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
18 #pragma GCC diagnostic ignored "-Wcast-function-type"
51 #if PY_VERSION_HEX < 0x02050000
54 #define PY_SSIZE_T_MAX INT_MAX
55 #define PY_SSIZE_T_MIN INT_MIN
57 #define SSIZE_T_F "%d"
59 #define SSIZE_T_F "%zd"
66 #define INFINITY HUGE_VAL
70 #define NAN (HUGE_VAL-HUGE_VAL)
74 #define Py_IS_NAN(X) ((X) != (X))
77 #define skipSpaces(d) while (*((d)->ptr) && isspace(*((d)->ptr))) (d)->ptr++
83 PyObject *tmp_str, *
result;
84 tmp_str = PyObject_Str(
obj);
87 result = PyUnicode_AsUTF8String(tmp_str);
108 left = jsondata->
end-jsondata->
ptr;
110 if (left >= 4 && strncmp(jsondata->
ptr,
"null", 4) == 0) {
126 left = jsondata->
end-jsondata->
ptr;
128 if (left >= 4 && strncmp(jsondata->
ptr,
"true", 4) == 0) {
132 }
else if (left >= 5 && strncmp(jsondata->
ptr,
"false", 5) == 0) {
147 int c, escaping, has_unicode, string_escape;
152 escaping = has_unicode = string_escape =
False;
153 ptr = jsondata->
ptr+1;
167 }
else if (
c ==
'"') {
169 }
else if (!isascii(
c)) {
184 string_escape =
True;
192 len = ptr-jsondata->
ptr-1;
195 object = PyUnicode_DecodeUnicodeEscape(jsondata->
ptr+1, len, NULL);
196 else if (string_escape)
197 object = PyBytes_DecodeEscape(jsondata->
ptr+1, len, NULL, 0, NULL);
199 object = PyBytes_FromStringAndSize(jsondata->
ptr+1, len);
201 if (
object == NULL) {
202 PyObject *
type, *value, *tb, *reason;
204 PyErr_Fetch(&
type, &value, &tb);
212 if (PyErr_GivenExceptionMatches(
type, PyExc_UnicodeDecodeError)) {
215 reason = PyObject_GetAttrString(value,
"reason");
217 if (reason && PyUnicode_Check(reason)) {
218 PyObject * bytes_reason = PyUnicode_AsUTF8String(reason);
220 reason = bytes_reason;
225 reason ? PyBytes_AsString(reason) :
"bad format");
239 jsondata->
ptr = ptr+1;
249 left = jsondata->
end-jsondata->
ptr;
251 if (left >= 8 && strncmp(jsondata->
ptr,
"Infinity", 8) == 0) {
253 object = PyFloat_FromDouble(
INFINITY);
255 }
else if (left >= 9 && strncmp(jsondata->
ptr,
"+Infinity", 9) == 0) {
257 object = PyFloat_FromDouble(
INFINITY);
259 }
else if (left >= 9 && strncmp(jsondata->
ptr,
"-Infinity", 9) == 0) {
261 object = PyFloat_FromDouble(-
INFINITY);
276 left = jsondata->
end-jsondata->
ptr;
278 if (left >= 3 && strncmp(jsondata->
ptr,
"NaN", 3) == 0) {
280 object = PyFloat_FromDouble(
NAN);
292 int c, is_float, should_stop;
297 is_float = should_stop =
False;
332 str = PyBytes_FromStringAndSize(jsondata->
ptr, ptr-jsondata->
ptr);
337 object = PyFloat_FromString(
str);
339 object = PyLong_FromString(PyBytes_AS_STRING(
str), NULL, 10);
344 if (
object == NULL) {
358 int c, expect_item, items,
result;
361 object = PyList_New(0);
363 start = jsondata->
ptr;
377 }
else if (
c ==
']') {
378 if (expect_item && items > 0) {
388 }
else if (
c ==
',') {
421 PyObject *
object, *key, *value;
422 int c, expect_key, items,
result;
425 object = PyDict_New();
429 start = jsondata->
ptr;
442 }
else if (
c ==
'}') {
443 if (expect_key && items > 0) {
453 }
else if (
c ==
',') {
480 if (*jsondata->
ptr !=
':') {
498 result = PyDict_SetItem(
object, key, value);
519 switch (*jsondata->
ptr) {
555 if (*(jsondata->
ptr+1) ==
'I') {
594 PyBytesObject *
op = (PyBytesObject *)
string;
595 size_t newsize = 2+6*Py_SIZE(
op);
599 PyErr_SetString(PyExc_OverflowError,
"string is too large to make repr");
602 v = PyBytes_FromStringAndSize((
char *)NULL, newsize);
613 p = PyBytes_AS_STRING(v);
615 for (i = 0; i < Py_SIZE(
op); i++) {
618 assert(newsize-(p-PyBytes_AS_STRING(v)) >= 7);
620 if (
c == quote ||
c ==
'\\')
621 *p++ =
'\\', *p++ =
c;
623 *p++ =
'\\', *p++ =
't';
625 *p++ =
'\\', *p++ =
'n';
627 *p++ =
'\\', *p++ =
'r';
629 *p++ =
'\\', *p++ =
'f';
631 *p++ =
'\\', *p++ =
'b';
632 else if (c < ' ' || c >= 0x7f) {
636 sprintf(p,
"\\u%04x",
c&0xff);
642 assert(newsize-(p-PyBytes_AS_STRING(v)) >= 1);
645 _PyBytes_Resize(&v, (
int) (p-PyBytes_AS_STRING(v)));
662 static const char *hexdigit =
"0123456789abcdef";
665 int kind = PyUnicode_KIND(unicode);
666 void *
data = PyUnicode_DATA(unicode);
667 size = PyUnicode_GET_LENGTH(unicode);
670 PyErr_SetString(PyExc_OverflowError,
"unicode object is too large to make repr");
674 repr = PyByteArray_FromStringAndSize(NULL,
681 p = PyByteArray_AS_STRING(repr);
686 const Py_UCS4 quote = PyByteArray_AS_STRING(repr)[0];
689 Py_UCS4 ch = PyUnicode_READ(kind,
data, pos);
693 if ((ch == quote || ch ==
'\\')) {
700 else if (ch >= 0x10000) {
703 *p++ = hexdigit[(ch>>28)&0x0000000F];
704 *p++ = hexdigit[(ch>>24)&0x0000000F];
705 *p++ = hexdigit[(ch>>20)&0x0000000F];
706 *p++ = hexdigit[(ch>>16)&0x0000000F];
707 *p++ = hexdigit[(ch>>12)&0x0000000F];
708 *p++ = hexdigit[(ch>>8)&0x0000000F];
709 *p++ = hexdigit[(ch>>4)&0x0000000F];
710 *p++ = hexdigit[ch&0x0000000F];
717 *p++ = hexdigit[(ch>>12)&0x000F];
718 *p++ = hexdigit[(ch>>8)&0x000F];
719 *p++ = hexdigit[(ch>>4)&0x000F];
720 *p++ = hexdigit[ch&0x000F];
722 }
else if (ch ==
'\t') {
725 }
else if (ch ==
'\n') {
728 }
else if (ch ==
'\r') {
731 }
else if (ch ==
'\f') {
734 }
else if (ch ==
'\b') {
740 else if (ch < ' ' || ch >= 0x7F) {
745 *p++ = hexdigit[(ch>>4)&0x000F];
746 *p++ = hexdigit[ch&0x000F];
754 *p++ = PyByteArray_AS_STRING(repr)[0];
758 PyObject *
result = PyBytes_FromStringAndSize(PyByteArray_AS_STRING(repr),
759 p - PyByteArray_AS_STRING(repr));
776 PyObject *pieces, *
result = NULL;
777 PyTupleObject *v = (PyTupleObject *)tuple;
781 return PyBytes_FromString(
"[]");
783 pieces = PyTuple_New(n);
788 for (i = 0; i < n; ++i) {
792 PyTuple_SET_ITEM(pieces, i, s);
797 s = PyBytes_FromString(
"[");
800 temp = PyTuple_GET_ITEM(pieces, 0);
801 PyBytes_ConcatAndDel(&s,
temp);
802 PyTuple_SET_ITEM(pieces, 0, s);
806 s = PyBytes_FromString(
"]");
809 temp = PyTuple_GET_ITEM(pieces, n-1);
810 PyBytes_ConcatAndDel(&
temp, s);
811 PyTuple_SET_ITEM(pieces, n-1,
temp);
816 s = PyBytes_FromString(
", ");
819 result = _PyBytes_Join(s, pieces);
839 PyObject *pieces = NULL, *
result = NULL;
840 PyListObject *v = (PyListObject *)
list;
842 i = Py_ReprEnter((PyObject *)v);
845 PyErr_SetString(
JSON_EncodeError,
"a list with references to itself is not JSON encodable");
850 if (Py_SIZE(v) == 0) {
851 result = PyBytes_FromString(
"[]");
855 pieces = PyList_New(0);
861 for (i = 0; i < Py_SIZE(v); ++i) {
866 status = PyList_Append(pieces, s);
873 assert(PyList_GET_SIZE(pieces) > 0);
874 s = PyBytes_FromString(
"[");
877 temp = PyList_GET_ITEM(pieces, 0);
878 PyBytes_ConcatAndDel(&s,
temp);
879 PyList_SET_ITEM(pieces, 0, s);
883 s = PyBytes_FromString(
"]");
886 temp = PyList_GET_ITEM(pieces, PyList_GET_SIZE(pieces)-1);
887 PyBytes_ConcatAndDel(&
temp, s);
888 PyList_SET_ITEM(pieces, PyList_GET_SIZE(pieces)-1,
temp);
893 s = PyBytes_FromString(
", ");
896 result = _PyBytes_Join(s, pieces);
901 Py_ReprLeave((PyObject *)v);
917 PyObject *s, *
temp, *colon = NULL;
918 PyObject *pieces = NULL, *
result = NULL;
919 PyObject *key, *value;
920 PyDictObject *mp = (PyDictObject *)
dict;
922 i = Py_ReprEnter((PyObject *)mp);
926 "itself is not JSON encodable");
931 if (mp->ma_used == 0) {
932 result = PyBytes_FromString(
"{}");
936 pieces = PyList_New(0);
940 colon = PyBytes_FromString(
": ");
947 while (PyDict_Next((PyObject *)mp, &i, &key, &value)) {
950 if (!PyBytes_Check(key) && !PyUnicode_Check(key)) {
951 PyErr_SetString(
JSON_EncodeError,
"JSON encodable dictionaries must have string/unicode keys");
958 PyBytes_Concat(&s, colon);
963 status = PyList_Append(pieces, s);
970 assert(PyList_GET_SIZE(pieces) > 0);
971 s = PyBytes_FromString(
"{");
974 temp = PyList_GET_ITEM(pieces, 0);
975 PyBytes_ConcatAndDel(&s,
temp);
976 PyList_SET_ITEM(pieces, 0, s);
980 s = PyBytes_FromString(
"}");
983 temp = PyList_GET_ITEM(pieces, PyList_GET_SIZE(pieces)-1);
984 PyBytes_ConcatAndDel(&
temp, s);
985 PyList_SET_ITEM(pieces, PyList_GET_SIZE(pieces)-1,
temp);
990 s = PyBytes_FromString(
", ");
993 result = _PyBytes_Join(s, pieces);
999 Py_ReprLeave((PyObject *)mp);
1004 if (
object == Py_True) {
1005 return PyBytes_FromString(
"true");
1006 }
else if (
object == Py_False) {
1007 return PyBytes_FromString(
"false");
1008 }
else if (
object == Py_None) {
1009 return PyBytes_FromString(
"null");
1010 }
else if (PyBytes_Check(
object)) {
1012 }
else if (PyUnicode_Check(
object)) {
1014 }
else if (PyFloat_Check(
object)) {
1015 double val = PyFloat_AS_DOUBLE(
object);
1017 return PyBytes_FromString(
"NaN");
1018 }
else if (Py_IS_INFINITY(val)) {
1020 return PyBytes_FromString(
"Infinity");
1022 return PyBytes_FromString(
"-Infinity");
1028 else if (PyLong_Check(
object)) {
1031 else if (PyList_Check(
object)) {
1033 }
else if (PyTuple_Check(
object)) {
1035 }
else if (PyDict_Check(
object)) {
1053 static char *kwlist[] = { (
char*)
"json", (
char*)
"all_unicode", NULL };
1054 int all_unicode =
True;
1059 if (!PyArg_ParseTupleAndKeywords(
args, kwargs,
"O|i:decode", kwlist, &
string, &all_unicode))
1062 if (PyUnicode_Check(
string)) {
1066 str = PyUnicode_EncodeRawUnicodeEscape(PyUnicode_AS_UNICODE(
string),
1067 PyUnicode_GET_SIZE(
string));
1070 str = PyUnicode_AsRawUnicodeEscapeString(
string);
1080 if (PyBytes_AsStringAndSize(
str, &(jsondata.
str), NULL) == -1) {
1085 jsondata.
ptr = jsondata.
str;
1086 jsondata.
end = jsondata.
str+strlen(jsondata.
str);
1091 if (
object != NULL) {
1093 if (jsondata.
ptr < jsondata.
end) {
1110 PyDoc_STR(
"encode(object) -> generate the JSON representation for object.") },
1113 {
"decode", (PyCFunction)
JSON_decode, METH_VARARGS|METH_KEYWORDS,
1114 PyDoc_STR(
"decode(string, all_unicode=True) -> parse the JSON representation into\n"
1115 "python objects. The optional argument `all_unicode', specifies how to\n"
1116 "convert the strings in the JSON representation into python objects.\n"
1117 "If it is False (default on Python 2.x), it will return strings/bytes\n"
1118 "everywhere possible and unicode objects only where necessary, else\n"
1119 "it will return unicode objects everywhere (this is slower, but default\n"
1122 { NULL, NULL, 0, NULL }
1125 PyDoc_STRVAR(module_doc,
"Fast JSON encoder/decoder module.");
1127 #define MODULE_VERSION "1.0.5"
1130 JSON_Error = PyErr_NewException(
"cjson.Error", NULL, NULL);
1154 PyModuleDef_HEAD_INIT,
1180 #pragma GCC diagnostic pop