Do not escape a unicode sequence when escaping JSON text.

Previously, any backslash in text being escaped for JSON was doubled so
that the result was still valid JSON. However, this led to some perverse
results in the case of Unicode sequences, These are now detected and the
initial backslash is no longer escaped. All other backslashes are
still escaped. No validity check is performed, all that is looked for is
\uXXXX where X is a hexidecimal digit.

This is a change from the 9.2 and 9.3 behaviour as noted in the Release
notes.

Per complaint from Teodor Sigaev.
This commit is contained in:
Andrew Dunstan 2014-06-03 16:11:31 -04:00
parent f30015b6d7
commit 0ad1a81632
7 changed files with 77 additions and 7 deletions

View File

@ -180,6 +180,21 @@
</para>
</listitem>
<listitem>
<para>
Unicode escapes in <link linkend="datatype-json"><type>JSON</type></link>
text values are no longer rendered with the backslash escaped.
(Andrew Dunstan)
</para>
<para>
Previously all backslashes in text values being formed into JSON were
escaped. Now a backslash followed by "u" and four hexadecimal digits is
not escaped, as this is a legal sequence in a JSON string value, and
escaping the backslash led to some perverse results.
</para>
</listitem>
<listitem>
<para>
Rename <link linkend="SQL-EXPLAIN"><command>EXPLAIN

View File

@ -2315,7 +2315,26 @@ escape_json(StringInfo buf, const char *str)
appendStringInfoString(buf, "\\\"");
break;
case '\\':
appendStringInfoString(buf, "\\\\");
/*
* Unicode escapes are passed through as is. There is no
* requirement that they denote a valid character in the
* server encoding - indeed that is a big part of their
* usefulness.
*
* All we require is that they consist of \uXXXX where
* the Xs are hexadecimal digits. It is the responsibility
* of the caller of, say, to_json() to make sure that the
* unicode escape is valid.
*
* In the case of a jsonb string value being escaped, the
* only unicode escape that should be present is \u0000,
* all the other unicode escapes will have been resolved.
*/
if (p[1] == 'u' && isxdigit(p[2]) && isxdigit(p[3])
&& isxdigit(p[4]) && isxdigit(p[5]))
appendStringInfoCharMacro(buf, *p);
else
appendStringInfoString(buf, "\\\\");
break;
default:
if ((unsigned char) *p < ' ')

View File

@ -426,6 +426,20 @@ select to_json(timestamptz '2014-05-28 12:22:35.614298-04');
(1 row)
COMMIT;
-- unicode escape - backslash is not escaped
select to_json(text '\uabcd');
to_json
----------
"\uabcd"
(1 row)
-- any other backslash is escaped
select to_json(text '\abcd');
to_json
----------
"\\abcd"
(1 row)
--json_agg
SELECT json_agg(q)
FROM ( SELECT $$a$$ || x AS b, y AS c,

View File

@ -426,6 +426,20 @@ select to_json(timestamptz '2014-05-28 12:22:35.614298-04');
(1 row)
COMMIT;
-- unicode escape - backslash is not escaped
select to_json(text '\uabcd');
to_json
----------
"\uabcd"
(1 row)
-- any other backslash is escaped
select to_json(text '\abcd');
to_json
----------
"\\abcd"
(1 row)
--json_agg
SELECT json_agg(q)
FROM ( SELECT $$a$$ || x AS b, y AS c,

View File

@ -61,9 +61,9 @@ LINE 1: SELECT '"\u000g"'::jsonb;
DETAIL: "\u" must be followed by four hexadecimal digits.
CONTEXT: JSON data, line 1: "\u000g...
SELECT '"\u0000"'::jsonb; -- OK, legal escape
jsonb
-----------
"\\u0000"
jsonb
----------
"\u0000"
(1 row)
-- use octet_length here so we don't get an odd unicode char in the

View File

@ -61,9 +61,9 @@ LINE 1: SELECT '"\u000g"'::jsonb;
DETAIL: "\u" must be followed by four hexadecimal digits.
CONTEXT: JSON data, line 1: "\u000g...
SELECT '"\u0000"'::jsonb; -- OK, legal escape
jsonb
-----------
"\\u0000"
jsonb
----------
"\u0000"
(1 row)
-- use octet_length here so we don't get an odd unicode char in the

View File

@ -111,6 +111,14 @@ SET LOCAL TIME ZONE -8;
select to_json(timestamptz '2014-05-28 12:22:35.614298-04');
COMMIT;
-- unicode escape - backslash is not escaped
select to_json(text '\uabcd');
-- any other backslash is escaped
select to_json(text '\abcd');
--json_agg
SELECT json_agg(q)