Skip to content

Commit d026be3

Browse files
committed
apply suggested change
1 parent ee1a7f6 commit d026be3

File tree

1 file changed

+13
-56
lines changed

1 file changed

+13
-56
lines changed

Modules/_json.c

Lines changed: 13 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,11 @@ typedef struct _PyEncoderObject {
4848
PyObject *indent;
4949
PyObject *key_separator;
5050
PyObject *item_separator;
51-
char sort_keys;
52-
char skipkeys;
53-
int allow_nan;
54-
int (*fast_encode)(PyUnicodeWriter *, PyObject*);
51+
bool sort_keys;
52+
bool skipkeys;
53+
bool allow_nan;
54+
bool fast_encode;
55+
bool ensure_ascii; /* used only when fast_encode == true */
5556
} PyEncoderObject;
5657

5758
#define PyEncoderObject_CAST(op) ((PyEncoderObject *)(op))
@@ -303,55 +304,9 @@ escape_unicode(PyObject *pystr)
303304
return rval;
304305
}
305306

306-
// Take a PyUnicode pystr and write an ASCII-only escaped string to writer.
307-
// Same to ascii_escape_unicode(), but write to PyUnicodeWriter instead of
308-
// return Unicode object.
309-
static int
310-
write_escaped_ascii(PyUnicodeWriter *writer, PyObject *pystr)
311-
{
312-
Py_ssize_t i;
313-
Py_ssize_t input_chars;
314-
Py_ssize_t chars;
315-
Py_ssize_t copy_len = 0;
316-
const void *input;
317-
int kind;
318-
int ret;
319-
unsigned char buf[12];
320-
321-
input_chars = PyUnicode_GET_LENGTH(pystr);
322-
input = PyUnicode_DATA(pystr);
323-
kind = PyUnicode_KIND(pystr);
324-
325-
ret = PyUnicodeWriter_WriteChar(writer, '"');
326-
if (ret) return ret;
327-
328-
for (i = 0; i < input_chars; i++) {
329-
Py_UCS4 c = PyUnicode_READ(kind, input, i);
330-
if (S_CHAR(c)) {
331-
copy_len++;
332-
}
333-
else {
334-
ret = PyUnicodeWriter_WriteSubstring(writer, pystr, i-copy_len, i);
335-
if (ret) return ret;
336-
copy_len = 0;
337-
338-
chars = ascii_escape_unichar(c, buf, 0);
339-
ret = PyUnicodeWriter_WriteUTF8(writer, (const char*)buf, chars);
340-
if (ret) return ret;
341-
}
342-
}
343-
344-
ret = PyUnicodeWriter_WriteSubstring(writer, pystr, i-copy_len, i);
345-
if (ret) return ret;
346-
347-
return PyUnicodeWriter_WriteChar(writer, '"');
348-
}
349-
350307
// Take a PyUnicode pystr and write an escaped string to writer.
351-
// Same to escape_unicode(), but write to PyUnicodeWriter instead of
352-
// return Unicode object.
353308
static int
354-
write_escaped_unicode(PyUnicodeWriter *writer, PyObject *pystr)
309+
write_escaped_unicode(PyUnicodeWriter *writer, PyObject *pystr, bool ascii_only)
355310
{
356311
Py_ssize_t i;
357312
Py_ssize_t input_chars;
@@ -371,7 +326,7 @@ write_escaped_unicode(PyUnicodeWriter *writer, PyObject *pystr)
371326

372327
for (i = 0; i < input_chars; i++) {
373328
Py_UCS4 c = PyUnicode_READ(kind, input, i);
374-
if (c <= 0x1f || c == '\\' || c == '"') {
329+
if (c <= 0x1f || c == '\\' || c == '"' || (ascii_only && c >= 0x7f)) {
375330
ret = PyUnicodeWriter_WriteSubstring(writer, pystr, i-copy_len, i);
376331
if (ret) return ret;
377332
copy_len = 0;
@@ -1338,15 +1293,17 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
13381293
s->sort_keys = sort_keys;
13391294
s->skipkeys = skipkeys;
13401295
s->allow_nan = allow_nan;
1341-
s->fast_encode = NULL;
1296+
s->fast_encode = false;
1297+
s->ensure_ascii = false;
13421298

13431299
if (PyCFunction_Check(s->encoder)) {
13441300
PyCFunction f = PyCFunction_GetFunction(s->encoder);
13451301
if (f == py_encode_basestring_ascii){
1346-
s->fast_encode = write_escaped_ascii;
1302+
s->fast_encode = true;
1303+
s->ensure_ascii = true;
13471304
}
13481305
else if (f == py_encode_basestring) {
1349-
s->fast_encode = write_escaped_unicode;
1306+
s->fast_encode = true;
13501307
}
13511308
}
13521309

@@ -1540,7 +1497,7 @@ static int
15401497
encoder_write_string(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *obj)
15411498
{
15421499
if (s->fast_encode) {
1543-
return s->fast_encode(writer, obj);
1500+
return write_escaped_unicode(writer, obj, s->ensure_ascii);
15441501
}
15451502

15461503
/* Return the JSON representation of a string */

0 commit comments

Comments
 (0)