postgresql/src/test/regress/expected/json.out

1722 lines
43 KiB
Plaintext
Raw Normal View History

-- Strings.
SELECT '""'::json; -- OK.
json
------
""
(1 row)
SELECT $$''$$::json; -- ERROR, single quotes are not allowed
ERROR: invalid input syntax for type json
LINE 1: SELECT $$''$$::json;
^
DETAIL: Token "'" is invalid.
CONTEXT: JSON data, line 1: '...
SELECT '"abc"'::json; -- OK
json
-------
"abc"
(1 row)
SELECT '"abc'::json; -- ERROR, quotes not closed
ERROR: invalid input syntax for type json
LINE 1: SELECT '"abc'::json;
^
DETAIL: Token ""abc" is invalid.
CONTEXT: JSON data, line 1: "abc
SELECT '"abc
def"'::json; -- ERROR, unescaped newline in string constant
ERROR: invalid input syntax for type json
LINE 1: SELECT '"abc
^
DETAIL: Character with value 0x0a must be escaped.
CONTEXT: JSON data, line 1: "abc
SELECT '"\n\"\\"'::json; -- OK, legal escapes
json
----------
"\n\"\\"
(1 row)
SELECT '"\v"'::json; -- ERROR, not a valid JSON escape
ERROR: invalid input syntax for type json
LINE 1: SELECT '"\v"'::json;
^
DETAIL: Escape sequence "\v" is invalid.
CONTEXT: JSON data, line 1: "\v...
SELECT '"\u"'::json; -- ERROR, incomplete escape
ERROR: invalid input syntax for type json
LINE 1: SELECT '"\u"'::json;
^
DETAIL: "\u" must be followed by four hexadecimal digits.
CONTEXT: JSON data, line 1: "\u"
SELECT '"\u00"'::json; -- ERROR, incomplete escape
ERROR: invalid input syntax for type json
LINE 1: SELECT '"\u00"'::json;
^
DETAIL: "\u" must be followed by four hexadecimal digits.
CONTEXT: JSON data, line 1: "\u00"
SELECT '"\u000g"'::json; -- ERROR, g is not a hex digit
ERROR: invalid input syntax for type json
LINE 1: SELECT '"\u000g"'::json;
^
DETAIL: "\u" must be followed by four hexadecimal digits.
CONTEXT: JSON data, line 1: "\u000g...
SELECT '"\u0000"'::json; -- OK, legal escape
json
----------
"\u0000"
(1 row)
SELECT '"\uaBcD"'::json; -- OK, uppercase and lower case both OK
json
----------
"\uaBcD"
(1 row)
-- Numbers.
SELECT '1'::json; -- OK
json
------
1
(1 row)
SELECT '0'::json; -- OK
json
------
0
(1 row)
SELECT '01'::json; -- ERROR, not valid according to JSON spec
ERROR: invalid input syntax for type json
LINE 1: SELECT '01'::json;
^
DETAIL: Token "01" is invalid.
CONTEXT: JSON data, line 1: 01
SELECT '0.1'::json; -- OK
json
------
0.1
(1 row)
SELECT '9223372036854775808'::json; -- OK, even though it's too large for int8
json
---------------------
9223372036854775808
(1 row)
SELECT '1e100'::json; -- OK
json
-------
1e100
(1 row)
SELECT '1.3e100'::json; -- OK
json
---------
1.3e100
(1 row)
SELECT '1f2'::json; -- ERROR
ERROR: invalid input syntax for type json
LINE 1: SELECT '1f2'::json;
^
DETAIL: Token "1f2" is invalid.
CONTEXT: JSON data, line 1: 1f2
SELECT '0.x1'::json; -- ERROR
ERROR: invalid input syntax for type json
LINE 1: SELECT '0.x1'::json;
^
DETAIL: Token "0.x1" is invalid.
CONTEXT: JSON data, line 1: 0.x1
SELECT '1.3ex100'::json; -- ERROR
ERROR: invalid input syntax for type json
LINE 1: SELECT '1.3ex100'::json;
^
DETAIL: Token "1.3ex100" is invalid.
CONTEXT: JSON data, line 1: 1.3ex100
-- Arrays.
SELECT '[]'::json; -- OK
json
------
[]
(1 row)
SELECT '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'::json; -- OK
json
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
(1 row)
SELECT '[1,2]'::json; -- OK
json
-------
[1,2]
(1 row)
SELECT '[1,2,]'::json; -- ERROR, trailing comma
ERROR: invalid input syntax for type json
LINE 1: SELECT '[1,2,]'::json;
^
DETAIL: Expected JSON value, but found "]".
CONTEXT: JSON data, line 1: [1,2,]
SELECT '[1,2'::json; -- ERROR, no closing bracket
ERROR: invalid input syntax for type json
LINE 1: SELECT '[1,2'::json;
^
DETAIL: The input string ended unexpectedly.
CONTEXT: JSON data, line 1: [1,2
SELECT '[1,[2]'::json; -- ERROR, no closing bracket
ERROR: invalid input syntax for type json
LINE 1: SELECT '[1,[2]'::json;
^
DETAIL: The input string ended unexpectedly.
CONTEXT: JSON data, line 1: [1,[2]
-- Objects.
SELECT '{}'::json; -- OK
json
------
{}
(1 row)
SELECT '{"abc"}'::json; -- ERROR, no value
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc"}'::json;
^
DETAIL: Expected ":", but found "}".
CONTEXT: JSON data, line 1: {"abc"}
SELECT '{"abc":1}'::json; -- OK
json
-----------
{"abc":1}
(1 row)
SELECT '{1:"abc"}'::json; -- ERROR, keys must be strings
ERROR: invalid input syntax for type json
LINE 1: SELECT '{1:"abc"}'::json;
^
DETAIL: Expected string or "}", but found "1".
CONTEXT: JSON data, line 1: {1...
SELECT '{"abc",1}'::json; -- ERROR, wrong separator
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc",1}'::json;
^
DETAIL: Expected ":", but found ",".
CONTEXT: JSON data, line 1: {"abc",...
SELECT '{"abc"=1}'::json; -- ERROR, totally wrong separator
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc"=1}'::json;
^
DETAIL: Token "=" is invalid.
CONTEXT: JSON data, line 1: {"abc"=...
SELECT '{"abc"::1}'::json; -- ERROR, another wrong separator
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc"::1}'::json;
^
DETAIL: Expected JSON value, but found ":".
CONTEXT: JSON data, line 1: {"abc"::...
SELECT '{"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}}'::json; -- OK
json
---------------------------------------------------------
{"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}}
(1 row)
SELECT '{"abc":1:2}'::json; -- ERROR, colon in wrong spot
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc":1:2}'::json;
^
DETAIL: Expected "," or "}", but found ":".
CONTEXT: JSON data, line 1: {"abc":1:...
SELECT '{"abc":1,3}'::json; -- ERROR, no value
ERROR: invalid input syntax for type json
LINE 1: SELECT '{"abc":1,3}'::json;
^
DETAIL: Expected string, but found "3".
CONTEXT: JSON data, line 1: {"abc":1,3...
-- Miscellaneous stuff.
SELECT 'true'::json; -- OK
json
------
true
(1 row)
SELECT 'false'::json; -- OK
json
-------
false
(1 row)
SELECT 'null'::json; -- OK
json
------
null
(1 row)
SELECT ' true '::json; -- OK, even with extra whitespace
json
--------
true
(1 row)
SELECT 'true false'::json; -- ERROR, too many values
ERROR: invalid input syntax for type json
LINE 1: SELECT 'true false'::json;
^
DETAIL: Expected end of input, but found "false".
CONTEXT: JSON data, line 1: true false
SELECT 'true, false'::json; -- ERROR, too many values
ERROR: invalid input syntax for type json
LINE 1: SELECT 'true, false'::json;
^
DETAIL: Expected end of input, but found ",".
CONTEXT: JSON data, line 1: true,...
SELECT 'truf'::json; -- ERROR, not a keyword
ERROR: invalid input syntax for type json
LINE 1: SELECT 'truf'::json;
^
DETAIL: Token "truf" is invalid.
CONTEXT: JSON data, line 1: truf
SELECT 'trues'::json; -- ERROR, not a keyword
ERROR: invalid input syntax for type json
LINE 1: SELECT 'trues'::json;
^
DETAIL: Token "trues" is invalid.
CONTEXT: JSON data, line 1: trues
SELECT ''::json; -- ERROR, no value
ERROR: invalid input syntax for type json
LINE 1: SELECT ''::json;
^
DETAIL: The input string ended unexpectedly.
CONTEXT: JSON data, line 1:
SELECT ' '::json; -- ERROR, no value
ERROR: invalid input syntax for type json
LINE 1: SELECT ' '::json;
^
DETAIL: The input string ended unexpectedly.
CONTEXT: JSON data, line 1:
--constructors
-- array_to_json
SELECT array_to_json(array(select 1 as a));
array_to_json
---------------
[1]
(1 row)
SELECT array_to_json(array_agg(q),false) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
array_to_json
---------------------------------------------
[{"b":1,"c":2},{"b":2,"c":4},{"b":3,"c":6}]
(1 row)
SELECT array_to_json(array_agg(q),true) from (select x as b, x * 2 as c from generate_series(1,3) x) q;
array_to_json
-----------------
[{"b":1,"c":2},+
{"b":2,"c":4},+
{"b":3,"c":6}]
(1 row)
SELECT array_to_json(array_agg(q),false)
FROM ( SELECT $$a$$ || x AS b, y AS c,
ARRAY[ROW(x.*,ARRAY[1,2,3]),
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
array_to_json
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},{"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]},{"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]},{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
(1 row)
SELECT array_to_json(array_agg(x),false) from generate_series(5,10) x;
array_to_json
----------------
[5,6,7,8,9,10]
(1 row)
SELECT array_to_json('{{1,5},{99,100}}'::int[]);
array_to_json
------------------
[[1,5],[99,100]]
(1 row)
-- row_to_json
SELECT row_to_json(row(1,'foo'));
row_to_json
---------------------
{"f1":1,"f2":"foo"}
(1 row)
SELECT row_to_json(q)
FROM (SELECT $$a$$ || x AS b,
y AS c,
ARRAY[ROW(x.*,ARRAY[1,2,3]),
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
row_to_json
--------------------------------------------------------------------
{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
{"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
{"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
(4 rows)
SELECT row_to_json(q,true)
FROM (SELECT $$a$$ || x AS b,
y AS c,
ARRAY[ROW(x.*,ARRAY[1,2,3]),
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
row_to_json
-----------------------------------------------------
{"b":"a1", +
"c":4, +
"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
{"b":"a1", +
"c":5, +
"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
{"b":"a2", +
"c":4, +
"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}
{"b":"a2", +
"c":5, +
"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}
(4 rows)
CREATE TEMP TABLE rows AS
SELECT x, 'txt' || x as y
FROM generate_series(1,3) AS x;
SELECT row_to_json(q,true)
FROM rows q;
row_to_json
--------------
{"x":1, +
"y":"txt1"}
{"x":2, +
"y":"txt2"}
{"x":3, +
"y":"txt3"}
(3 rows)
SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
row_to_json
-----------------------
{"f1":[5,6,7,8,9,10]}
(1 row)
-- to_json, timestamps
select to_json(timestamp '2014-05-28 12:22:35.614298');
to_json
------------------------------
"2014-05-28T12:22:35.614298"
(1 row)
BEGIN;
SET LOCAL TIME ZONE 10.5;
select to_json(timestamptz '2014-05-28 12:22:35.614298-04');
to_json
------------------------------------
"2014-05-29T02:52:35.614298+10:30"
(1 row)
SET LOCAL TIME ZONE -8;
select to_json(timestamptz '2014-05-28 12:22:35.614298-04');
to_json
------------------------------------
"2014-05-28T08:22:35.614298-08:00"
(1 row)
COMMIT;
select to_json(date '2014-05-28');
to_json
--------------
"2014-05-28"
(1 row)
select to_json(date 'Infinity');
to_json
------------
"infinity"
(1 row)
select to_json(timestamp 'Infinity');
to_json
------------
"infinity"
(1 row)
select to_json(timestamptz 'Infinity');
to_json
------------
"infinity"
(1 row)
--json_agg
SELECT json_agg(q)
FROM ( SELECT $$a$$ || x AS b, y AS c,
ARRAY[ROW(x.*,ARRAY[1,2,3]),
ROW(y.*,ARRAY[4,5,6])] AS z
FROM generate_series(1,2) x,
generate_series(4,5) y) q;
json_agg
-----------------------------------------------------------------------
[{"b":"a1","c":4,"z":[{"f1":1,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
{"b":"a1","c":5,"z":[{"f1":1,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}, +
{"b":"a2","c":4,"z":[{"f1":2,"f2":[1,2,3]},{"f1":4,"f2":[4,5,6]}]}, +
{"b":"a2","c":5,"z":[{"f1":2,"f2":[1,2,3]},{"f1":5,"f2":[4,5,6]}]}]
(1 row)
SELECT json_agg(q ORDER BY x, y)
FROM rows q;
json_agg
-----------------------
[{"x":1,"y":"txt1"}, +
{"x":2,"y":"txt2"}, +
{"x":3,"y":"txt3"}]
(1 row)
UPDATE rows SET x = NULL WHERE x = 1;
SELECT json_agg(q ORDER BY x NULLS FIRST, y)
FROM rows q;
json_agg
--------------------------
[{"x":null,"y":"txt1"}, +
{"x":2,"y":"txt2"}, +
{"x":3,"y":"txt3"}]
(1 row)
-- non-numeric output
SELECT row_to_json(q)
FROM (SELECT 'NaN'::float8 AS "float8field") q;
row_to_json
-----------------------
{"float8field":"NaN"}
(1 row)
SELECT row_to_json(q)
FROM (SELECT 'Infinity'::float8 AS "float8field") q;
row_to_json
----------------------------
{"float8field":"Infinity"}
(1 row)
SELECT row_to_json(q)
FROM (SELECT '-Infinity'::float8 AS "float8field") q;
row_to_json
-----------------------------
{"float8field":"-Infinity"}
(1 row)
-- json input
SELECT row_to_json(q)
FROM (SELECT '{"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}}'::json AS "jsonfield") q;
row_to_json
------------------------------------------------------------------
{"jsonfield":{"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}}}
(1 row)
-- json extraction functions
CREATE TEMP TABLE test_json (
json_type text,
test_json json
);
INSERT INTO test_json VALUES
('scalar','"a scalar"'),
('array','["zero", "one","two",null,"four","five", [1,2,3],{"f1":9}]'),
('object','{"field1":"val1","field2":"val2","field3":null, "field4": 4, "field5": [1,2,3], "field6": {"f1":9}}');
2013-05-31 03:05:07 +02:00
SELECT test_json -> 'x'
FROM test_json
WHERE json_type = 'scalar';
?column?
----------
(1 row)
2013-05-31 03:05:07 +02:00
SELECT test_json -> 'x'
FROM test_json
WHERE json_type = 'array';
?column?
----------
(1 row)
2013-05-31 03:05:07 +02:00
SELECT test_json -> 'x'
FROM test_json
WHERE json_type = 'object';
?column?
----------
(1 row)
SELECT test_json->'field2'
FROM test_json
WHERE json_type = 'object';
?column?
----------
"val2"
(1 row)
2013-05-31 03:05:07 +02:00
SELECT test_json->>'field2'
FROM test_json
WHERE json_type = 'object';
?column?
----------
val2
(1 row)
2013-05-31 03:05:07 +02:00
SELECT test_json -> 2
FROM test_json
WHERE json_type = 'scalar';
?column?
----------
(1 row)
2013-05-31 03:05:07 +02:00
SELECT test_json -> 2
FROM test_json
WHERE json_type = 'array';
?column?
----------
"two"
(1 row)
Support JSON negative array subscripts everywhere Previously, there was an inconsistency across json/jsonb operators that operate on datums containing JSON arrays -- only some operators supported negative array count-from-the-end subscripting. Specifically, only a new-to-9.5 jsonb deletion operator had support (the new "jsonb - integer" operator). This inconsistency seemed likely to be counter-intuitive to users. To fix, allow all places where the user can supply an integer subscript to accept a negative subscript value, including path-orientated operators and functions, as well as other extraction operators. This will need to be called out as an incompatibility in the 9.5 release notes, since it's possible that users are relying on certain established extraction operators changed here yielding NULL in the event of a negative subscript. For the json type, this requires adding a way of cheaply getting the total JSON array element count ahead of time when parsing arrays with a negative subscript involved, necessitating an ad-hoc lex and parse. This is followed by a "conversion" from a negative subscript to its equivalent positive-wise value using the count. From there on, it's as if a positive-wise value was originally provided. Note that there is still a minor inconsistency here across jsonb deletion operators. Unlike the aforementioned new "-" deletion operator that accepts an integer on its right hand side, the new "#-" path orientated deletion variant does not throw an error when it appears like an array subscript (input that could be recognized by as an integer literal) is being used on an object, which is wrong-headed. The reason for not being stricter is that it could be the case that an object pair happens to have a key value that looks like an integer; in general, these two possibilities are impossible to differentiate with rhs path text[] argument elements. However, we still don't allow the "#-" path-orientated deletion operator to perform array-style subscripting. Rather, we just return the original left operand value in the event of a negative subscript (which seems analogous to how the established "jsonb/json #> text[]" path-orientated operator may yield NULL in the event of an invalid subscript). In passing, make SetArrayPath() stricter about not accepting cases where there is trailing non-numeric garbage bytes rather than a clean NUL byte. This means, for example, that strings like "10e10" are now not accepted as an array subscript of 10 by some new-to-9.5 path-orientated jsonb operators (e.g. the new #- operator). Finally, remove dead code for jsonb subscript deletion; arguably, this should have been done in commit b81c7b409. Peter Geoghegan and Andrew Dunstan
2015-07-18 02:56:13 +02:00
SELECT test_json -> -1
FROM test_json
WHERE json_type = 'array';
?column?
----------
{"f1":9}
(1 row)
SELECT test_json -> 2
FROM test_json
WHERE json_type = 'object';
?column?
----------
(1 row)
SELECT test_json->>2
FROM test_json
WHERE json_type = 'array';
?column?
----------
two
(1 row)
SELECT test_json ->> 6 FROM test_json WHERE json_type = 'array';
?column?
----------
[1,2,3]
(1 row)
SELECT test_json ->> 7 FROM test_json WHERE json_type = 'array';
?column?
----------
{"f1":9}
(1 row)
SELECT test_json ->> 'field4' FROM test_json WHERE json_type = 'object';
?column?
----------
4
(1 row)
SELECT test_json ->> 'field5' FROM test_json WHERE json_type = 'object';
?column?
----------
[1,2,3]
(1 row)
SELECT test_json ->> 'field6' FROM test_json WHERE json_type = 'object';
?column?
----------
{"f1":9}
(1 row)
SELECT json_object_keys(test_json)
FROM test_json
WHERE json_type = 'scalar';
ERROR: cannot call json_object_keys on a scalar
SELECT json_object_keys(test_json)
FROM test_json
WHERE json_type = 'array';
ERROR: cannot call json_object_keys on an array
SELECT json_object_keys(test_json)
FROM test_json
WHERE json_type = 'object';
json_object_keys
------------------
field1
field2
field3
field4
field5
field6
(6 rows)
-- test extending object_keys resultset - initial resultset size is 256
select count(*) from
(select json_object_keys(json_object(array_agg(g)))
from (select unnest(array['f'||n,n::text])as g
from generate_series(1,300) as n) x ) y;
count
-------
300
(1 row)
-- nulls
select (test_json->'field3') is null as expect_false
from test_json
where json_type = 'object';
expect_false
--------------
f
(1 row)
select (test_json->>'field3') is null as expect_true
from test_json
where json_type = 'object';
expect_true
-------------
t
(1 row)
select (test_json->3) is null as expect_false
from test_json
where json_type = 'array';
expect_false
--------------
f
(1 row)
select (test_json->>3) is null as expect_true
from test_json
where json_type = 'array';
expect_true
-------------
t
(1 row)
-- corner cases
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> null::text;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> null::int;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> 1;
?column?
----------
(1 row)
Support JSON negative array subscripts everywhere Previously, there was an inconsistency across json/jsonb operators that operate on datums containing JSON arrays -- only some operators supported negative array count-from-the-end subscripting. Specifically, only a new-to-9.5 jsonb deletion operator had support (the new "jsonb - integer" operator). This inconsistency seemed likely to be counter-intuitive to users. To fix, allow all places where the user can supply an integer subscript to accept a negative subscript value, including path-orientated operators and functions, as well as other extraction operators. This will need to be called out as an incompatibility in the 9.5 release notes, since it's possible that users are relying on certain established extraction operators changed here yielding NULL in the event of a negative subscript. For the json type, this requires adding a way of cheaply getting the total JSON array element count ahead of time when parsing arrays with a negative subscript involved, necessitating an ad-hoc lex and parse. This is followed by a "conversion" from a negative subscript to its equivalent positive-wise value using the count. From there on, it's as if a positive-wise value was originally provided. Note that there is still a minor inconsistency here across jsonb deletion operators. Unlike the aforementioned new "-" deletion operator that accepts an integer on its right hand side, the new "#-" path orientated deletion variant does not throw an error when it appears like an array subscript (input that could be recognized by as an integer literal) is being used on an object, which is wrong-headed. The reason for not being stricter is that it could be the case that an object pair happens to have a key value that looks like an integer; in general, these two possibilities are impossible to differentiate with rhs path text[] argument elements. However, we still don't allow the "#-" path-orientated deletion operator to perform array-style subscripting. Rather, we just return the original left operand value in the event of a negative subscript (which seems analogous to how the established "jsonb/json #> text[]" path-orientated operator may yield NULL in the event of an invalid subscript). In passing, make SetArrayPath() stricter about not accepting cases where there is trailing non-numeric garbage bytes rather than a clean NUL byte. This means, for example, that strings like "10e10" are now not accepted as an array subscript of 10 by some new-to-9.5 path-orientated jsonb operators (e.g. the new #- operator). Finally, remove dead code for jsonb subscript deletion; arguably, this should have been done in commit b81c7b409. Peter Geoghegan and Andrew Dunstan
2015-07-18 02:56:13 +02:00
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> -1;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> 'z';
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json -> '';
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json -> 1;
?column?
-------------
{"b": "cc"}
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json -> 3;
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json -> 'z';
?column?
----------
(1 row)
select '{"a": "c", "b": null}'::json -> 'b';
?column?
----------
null
(1 row)
select '"foo"'::json -> 1;
?column?
----------
(1 row)
select '"foo"'::json -> 'z';
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json ->> null::text;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json ->> null::int;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json ->> 1;
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json ->> 'z';
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json ->> '';
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json ->> 1;
?column?
-------------
{"b": "cc"}
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json ->> 3;
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json ->> 'z';
?column?
----------
(1 row)
select '{"a": "c", "b": null}'::json ->> 'b';
?column?
----------
(1 row)
select '"foo"'::json ->> 1;
?column?
----------
(1 row)
select '"foo"'::json ->> 'z';
?column?
----------
(1 row)
-- array length
SELECT json_array_length('[1,2,3,{"f1":1,"f2":[5,6]},4]');
json_array_length
-------------------
5
(1 row)
SELECT json_array_length('[]');
json_array_length
-------------------
0
(1 row)
SELECT json_array_length('{"f1":1,"f2":[5,6]}');
ERROR: cannot get array length of a non-array
SELECT json_array_length('4');
ERROR: cannot get array length of a scalar
-- each
select json_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null}');
json_each
-------------------
(f1,"[1,2,3]")
(f2,"{""f3"":1}")
(f4,null)
(3 rows)
select * from json_each('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
key | value
-----+-----------
f1 | [1,2,3]
f2 | {"f3":1}
f4 | null
f5 | 99
f6 | "stringy"
(5 rows)
select json_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":"null"}');
json_each_text
-------------------
(f1,"[1,2,3]")
(f2,"{""f3"":1}")
(f4,)
(f5,null)
(4 rows)
select * from json_each_text('{"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"}') q;
key | value
-----+----------
f1 | [1,2,3]
f2 | {"f3":1}
f4 |
f5 | 99
f6 | stringy
(5 rows)
-- extract_path, extract_path_as_text
select json_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
json_extract_path
-------------------
"stringy"
(1 row)
select json_extract_path('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
json_extract_path
-------------------
{"f3":1}
(1 row)
select json_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
json_extract_path
-------------------
"f3"
(1 row)
select json_extract_path('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
json_extract_path
-------------------
1
(1 row)
select json_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f4','f6');
json_extract_path_text
------------------------
stringy
(1 row)
select json_extract_path_text('{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}','f2');
json_extract_path_text
------------------------
{"f3":1}
(1 row)
select json_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',0::text);
json_extract_path_text
------------------------
f3
(1 row)
select json_extract_path_text('{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}','f2',1::text);
json_extract_path_text
------------------------
1
(1 row)
-- extract_path nulls
select json_extract_path('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_false;
expect_false
--------------
f
(1 row)
select json_extract_path_text('{"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}}','f4','f5') is null as expect_true;
expect_true
-------------
t
(1 row)
select json_extract_path('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_false;
expect_false
--------------
f
(1 row)
select json_extract_path_text('{"f2":{"f3":1},"f4":[0,1,2,null]}','f4','3') is null as expect_true;
expect_true
-------------
t
(1 row)
-- extract_path operators
select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::json#>array['f4','f6'];
?column?
-----------
"stringy"
(1 row)
select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::json#>array['f2'];
?column?
----------
{"f3":1}
(1 row)
select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::json#>array['f2','0'];
?column?
----------
"f3"
(1 row)
select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::json#>array['f2','1'];
?column?
----------
1
(1 row)
select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::json#>>array['f4','f6'];
?column?
----------
stringy
(1 row)
select '{"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}}'::json#>>array['f2'];
?column?
----------
{"f3":1}
(1 row)
select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::json#>>array['f2','0'];
?column?
----------
f3
(1 row)
select '{"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}}'::json#>>array['f2','1'];
?column?
----------
1
(1 row)
-- corner cases for same
select '{"a": {"b":{"c": "foo"}}}'::json #> '{}';
?column?
---------------------------
{"a": {"b":{"c": "foo"}}}
(1 row)
select '[1,2,3]'::json #> '{}';
?column?
----------
[1,2,3]
(1 row)
select '"foo"'::json #> '{}';
?column?
----------
"foo"
(1 row)
select '42'::json #> '{}';
?column?
----------
42
(1 row)
select 'null'::json #> '{}';
?column?
----------
null
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a'];
?column?
--------------------
{"b":{"c": "foo"}}
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a', null];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a', ''];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a','b'];
?column?
--------------
{"c": "foo"}
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a','b','c'];
?column?
----------
"foo"
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a','b','c','d'];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #> array['a','z','c'];
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json #> array['a','1','b'];
?column?
----------
"cc"
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json #> array['a','z','b'];
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json #> array['1','b'];
?column?
----------
"cc"
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json #> array['z','b'];
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": null}]'::json #> array['1','b'];
?column?
----------
null
(1 row)
select '"foo"'::json #> array['z'];
?column?
----------
(1 row)
select '42'::json #> array['f2'];
?column?
----------
(1 row)
select '42'::json #> array['0'];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> '{}';
?column?
---------------------------
{"a": {"b":{"c": "foo"}}}
(1 row)
select '[1,2,3]'::json #>> '{}';
?column?
----------
[1,2,3]
(1 row)
select '"foo"'::json #>> '{}';
?column?
----------
foo
(1 row)
select '42'::json #>> '{}';
?column?
----------
42
(1 row)
select 'null'::json #>> '{}';
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a'];
?column?
--------------------
{"b":{"c": "foo"}}
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a', null];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a', ''];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a','b'];
?column?
--------------
{"c": "foo"}
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a','b','c'];
?column?
----------
foo
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a','b','c','d'];
?column?
----------
(1 row)
select '{"a": {"b":{"c": "foo"}}}'::json #>> array['a','z','c'];
?column?
----------
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json #>> array['a','1','b'];
?column?
----------
cc
(1 row)
select '{"a": [{"b": "c"}, {"b": "cc"}]}'::json #>> array['a','z','b'];
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json #>> array['1','b'];
?column?
----------
cc
(1 row)
select '[{"b": "c"}, {"b": "cc"}]'::json #>> array['z','b'];
?column?
----------
(1 row)
select '[{"b": "c"}, {"b": null}]'::json #>> array['1','b'];
?column?
----------
(1 row)
select '"foo"'::json #>> array['z'];
?column?
----------
(1 row)
select '42'::json #>> array['f2'];
?column?
----------
(1 row)
select '42'::json #>> array['0'];
?column?
----------
(1 row)
-- array_elements
select json_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"]');
json_array_elements
-----------------------
1
true
[1,[2,3]]
null
{"f1":1,"f2":[7,8,9]}
false
"stringy"
(7 rows)
select * from json_array_elements('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"]') q;
value
-----------------------
1
true
[1,[2,3]]
null
{"f1":1,"f2":[7,8,9]}
false
"stringy"
(7 rows)
select json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"]');
json_array_elements_text
--------------------------
1
true
[1,[2,3]]
{"f1":1,"f2":[7,8,9]}
false
stringy
(7 rows)
select * from json_array_elements_text('[1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"]') q;
value
-----------------------
1
true
[1,[2,3]]
{"f1":1,"f2":[7,8,9]}
false
stringy
(7 rows)
-- populate_record
create type jpop as (a text, b int, c timestamp);
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
blurfl | |
(1 row)
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+--------------------------
blurfl | 3 | Mon Dec 31 15:30:56 2012
(1 row)
select * from json_populate_record(null::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+---
blurfl | |
(1 row)
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":"blurfl","x":43.2}') q;
a | b | c
--------+---+--------------------------
blurfl | 3 | Mon Dec 31 15:30:56 2012
(1 row)
select * from json_populate_record(null::jpop,'{"a":[100,200,false],"x":43.2}') q;
a | b | c
-----------------+---+---
[100,200,false] | |
(1 row)
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"a":[100,200,false],"x":43.2}') q;
a | b | c
-----------------+---+--------------------------
[100,200,false] | 3 | Mon Dec 31 15:30:56 2012
(1 row)
select * from json_populate_record(row('x',3,'2012-12-31 15:30:56')::jpop,'{"c":[100,200,false],"x":43.2}') q;
ERROR: invalid input syntax for type timestamp: "[100,200,false]"
-- populate_recordset
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+---+--------------------------
blurfl | |
| 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+----+--------------------------
blurfl | 99 |
def | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+---+--------------------------
blurfl | |
| 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+----+--------------------------
blurfl | 99 |
def | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
---------------+----+--------------------------
[100,200,300] | 99 |
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
ERROR: invalid input syntax for type timestamp: "[100,200,300]"
create type jpop2 as (a int, b json, c int, d int);
select * from json_populate_recordset(null::jpop2, '[{"a":2,"c":3,"b":{"z":4},"d":6}]') q;
a | b | c | d
---+---------+---+---
2 | {"z":4} | 3 | 6
(1 row)
select * from json_populate_recordset(null::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+---+--------------------------
blurfl | |
| 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
--------+----+--------------------------
blurfl | 99 |
def | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
select * from json_populate_recordset(row('def',99,null)::jpop,'[{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}]') q;
a | b | c
---------------+----+--------------------------
[100,200,300] | 99 |
{"z":true} | 3 | Fri Jan 20 10:42:53 2012
(2 rows)
-- handling of unicode surrogate pairs
select json '{ "a": "\ud83d\ude04\ud83d\udc36" }' -> 'a' as correct_in_utf8;
correct_in_utf8
----------------------------
"\ud83d\ude04\ud83d\udc36"
(1 row)
select json '{ "a": "\ud83d\ud83d" }' -> 'a'; -- 2 high surrogates in a row
ERROR: invalid input syntax for type json
DETAIL: Unicode high surrogate must not follow a high surrogate.
CONTEXT: JSON data, line 1: { "a":...
select json '{ "a": "\ude04\ud83d" }' -> 'a'; -- surrogates in wrong order
ERROR: invalid input syntax for type json
DETAIL: Unicode low surrogate must follow a high surrogate.
CONTEXT: JSON data, line 1: { "a":...
select json '{ "a": "\ud83dX" }' -> 'a'; -- orphan high surrogate
ERROR: invalid input syntax for type json
DETAIL: Unicode low surrogate must follow a high surrogate.
CONTEXT: JSON data, line 1: { "a":...
select json '{ "a": "\ude04X" }' -> 'a'; -- orphan low surrogate
ERROR: invalid input syntax for type json
DETAIL: Unicode low surrogate must follow a high surrogate.
CONTEXT: JSON data, line 1: { "a":...
--handling of simple unicode escapes
Fix jsonb Unicode escape processing, and in consequence disallow \u0000. We've been trying to support \u0000 in JSON values since commit 78ed8e03c67d7333, and have introduced increasingly worse hacks to try to make it work, such as commit 0ad1a816320a2b53. However, it fundamentally can't work in the way envisioned, because the stored representation looks the same as for \\u0000 which is not the same thing at all. It's also entirely bogus to output \u0000 when de-escaped output is called for. The right way to do this would be to store an actual 0x00 byte, and then throw error only if asked to produce de-escaped textual output. However, getting to that point seems likely to take considerable work and may well never be practical in the 9.4.x series. To preserve our options for better behavior while getting rid of the nasty side-effects of 0ad1a816320a2b53, revert that commit in toto and instead throw error if \u0000 is used in a context where it needs to be de-escaped. (These are the same contexts where non-ASCII Unicode escapes throw error if the database encoding isn't UTF8, so this behavior is by no means without precedent.) In passing, make both the \u0000 case and the non-ASCII Unicode case report ERRCODE_UNTRANSLATABLE_CHARACTER / "unsupported Unicode escape sequence" rather than claiming there's something wrong with the input syntax. Back-patch to 9.4, where we have to do something because 0ad1a816320a2b53 broke things for many cases having nothing to do with \u0000. 9.3 also has bogus behavior, but only for that specific escape value, so given the lack of field complaints it seems better to leave 9.3 alone.
2015-01-30 20:44:46 +01:00
select json '{ "a": "the Copyright \u00a9 sign" }' as correct_in_utf8;
correct_in_utf8
---------------------------------------
{ "a": "the Copyright \u00a9 sign" }
(1 row)
select json '{ "a": "dollar \u0024 character" }' as correct_everywhere;
correct_everywhere
-------------------------------------
{ "a": "dollar \u0024 character" }
(1 row)
select json '{ "a": "dollar \\u0024 character" }' as not_an_escape;
not_an_escape
--------------------------------------
{ "a": "dollar \\u0024 character" }
(1 row)
select json '{ "a": "null \u0000 escape" }' as not_unescaped;
not_unescaped
--------------------------------
{ "a": "null \u0000 escape" }
(1 row)
select json '{ "a": "null \\u0000 escape" }' as not_an_escape;
not_an_escape
---------------------------------
{ "a": "null \\u0000 escape" }
(1 row)
select json '{ "a": "the Copyright \u00a9 sign" }' ->> 'a' as correct_in_utf8;
correct_in_utf8
----------------------
the Copyright © sign
(1 row)
select json '{ "a": "dollar \u0024 character" }' ->> 'a' as correct_everywhere;
correct_everywhere
--------------------
dollar $ character
(1 row)
Fix jsonb Unicode escape processing, and in consequence disallow \u0000. We've been trying to support \u0000 in JSON values since commit 78ed8e03c67d7333, and have introduced increasingly worse hacks to try to make it work, such as commit 0ad1a816320a2b53. However, it fundamentally can't work in the way envisioned, because the stored representation looks the same as for \\u0000 which is not the same thing at all. It's also entirely bogus to output \u0000 when de-escaped output is called for. The right way to do this would be to store an actual 0x00 byte, and then throw error only if asked to produce de-escaped textual output. However, getting to that point seems likely to take considerable work and may well never be practical in the 9.4.x series. To preserve our options for better behavior while getting rid of the nasty side-effects of 0ad1a816320a2b53, revert that commit in toto and instead throw error if \u0000 is used in a context where it needs to be de-escaped. (These are the same contexts where non-ASCII Unicode escapes throw error if the database encoding isn't UTF8, so this behavior is by no means without precedent.) In passing, make both the \u0000 case and the non-ASCII Unicode case report ERRCODE_UNTRANSLATABLE_CHARACTER / "unsupported Unicode escape sequence" rather than claiming there's something wrong with the input syntax. Back-patch to 9.4, where we have to do something because 0ad1a816320a2b53 broke things for many cases having nothing to do with \u0000. 9.3 also has bogus behavior, but only for that specific escape value, so given the lack of field complaints it seems better to leave 9.3 alone.
2015-01-30 20:44:46 +01:00
select json '{ "a": "dollar \\u0024 character" }' ->> 'a' as not_an_escape;
not_an_escape
-------------------------
dollar \u0024 character
(1 row)
select json '{ "a": "null \u0000 escape" }' ->> 'a' as fails;
ERROR: unsupported Unicode escape sequence
DETAIL: \u0000 cannot be converted to text.
CONTEXT: JSON data, line 1: { "a":...
select json '{ "a": "null \\u0000 escape" }' ->> 'a' as not_an_escape;
not_an_escape
--------------------
null \u0000 escape
(1 row)
2013-10-10 18:21:59 +02:00
--json_typeof() function
select value, json_typeof(value)
from (values (json '123.4'),
(json '-1'),
(json '"foo"'),
(json 'true'),
(json 'false'),
2013-10-10 18:21:59 +02:00
(json 'null'),
(json '[1, 2, 3]'),
(json '[]'),
(json '{"x":"foo", "y":123}'),
(json '{}'),
2013-10-10 18:21:59 +02:00
(NULL::json))
as data(value);
value | json_typeof
----------------------+-------------
123.4 | number
-1 | number
"foo" | string
true | boolean
false | boolean
null | null
[1, 2, 3] | array
[] | array
{"x":"foo", "y":123} | object
{} | object
|
(11 rows)
-- json_build_array, json_build_object, json_object_agg
SELECT json_build_array('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
json_build_array
-----------------------------------------------------------------------
["a", 1, "b", 1.2, "c", true, "d", null, "e", {"x": 3, "y": [1,2,3]}]
(1 row)
SELECT json_build_object('a',1,'b',1.2,'c',true,'d',null,'e',json '{"x": 3, "y": [1,2,3]}');
json_build_object
----------------------------------------------------------------------------
{"a" : 1, "b" : 1.2, "c" : true, "d" : null, "e" : {"x": 3, "y": [1,2,3]}}
(1 row)
SELECT json_build_object(
'a', json_build_object('b',false,'c',99),
'd', json_build_object('e',array[9,8,7]::int[],
'f', (select row_to_json(r) from ( select relkind, oid::regclass as name from pg_class where relname = 'pg_class') r)));
json_build_object
-------------------------------------------------------------------------------------------------
{"a" : {"b" : false, "c" : 99}, "d" : {"e" : [9,8,7], "f" : {"relkind":"r","name":"pg_class"}}}
(1 row)
-- empty objects/arrays
SELECT json_build_array();
json_build_array
------------------
[]
(1 row)
SELECT json_build_object();
json_build_object
-------------------
{}
(1 row)
-- make sure keys are quoted
SELECT json_build_object(1,2);
json_build_object
-------------------
{"1" : 2}
(1 row)
-- keys must be scalar and not null
SELECT json_build_object(null,2);
ERROR: argument 1 cannot be null
HINT: Object keys should be text.
SELECT json_build_object(r,2) FROM (SELECT 1 AS a, 2 AS b) r;
ERROR: key value must be scalar, not array, composite, or json
SELECT json_build_object(json '{"a":1,"b":2}', 3);
ERROR: key value must be scalar, not array, composite, or json
SELECT json_build_object('{1,2,3}'::int[], 3);
ERROR: key value must be scalar, not array, composite, or json
CREATE TEMP TABLE foo (serial_num int, name text, type text);
INSERT INTO foo VALUES (847001,'t15','GE1043');
INSERT INTO foo VALUES (847002,'t16','GE1043');
INSERT INTO foo VALUES (847003,'sub-alpha','GESS90');
SELECT json_build_object('turbines',json_object_agg(serial_num,json_build_object('name',name,'type',type)))
FROM foo;
json_build_object
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{"turbines" : { "847001" : {"name" : "t15", "type" : "GE1043"}, "847002" : {"name" : "t16", "type" : "GE1043"}, "847003" : {"name" : "sub-alpha", "type" : "GESS90"} }}
(1 row)
SELECT json_object_agg(name, type) FROM foo;
json_object_agg
----------------------------------------------------------------
{ "t15" : "GE1043", "t16" : "GE1043", "sub-alpha" : "GESS90" }
(1 row)
INSERT INTO foo VALUES (999999, NULL, 'bar');
SELECT json_object_agg(name, type) FROM foo;
ERROR: field name must not be null
-- json_object
-- one dimension
SELECT json_object('{a,1,b,2,3,NULL,"d e f","a b c"}');
json_object
-------------------------------------------------------
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
(1 row)
-- same but with two dimensions
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
json_object
-------------------------------------------------------
{"a" : "1", "b" : "2", "3" : null, "d e f" : "a b c"}
(1 row)
-- odd number error
SELECT json_object('{a,b,c}');
ERROR: array must have even number of elements
-- one column error
SELECT json_object('{{a},{b}}');
ERROR: array must have two columns
-- too many columns error
SELECT json_object('{{a,b,c},{b,c,d}}');
ERROR: array must have two columns
-- too many dimensions error
SELECT json_object('{{{a,b},{c,d}},{{b,c},{d,e}}}');
ERROR: wrong number of array subscripts
--two argument form of json_object
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c"}');
json_object
------------------------------------------------------
{"a" : "1", "b" : "2", "c" : "3", "d e f" : "a b c"}
(1 row)
-- too many dimensions
SELECT json_object('{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}', '{{a,1},{b,2},{3,NULL},{"d e f","a b c"}}');
ERROR: wrong number of array subscripts
-- mismatched dimensions
select json_object('{a,b,c,"d e f",g}','{1,2,3,"a b c"}');
ERROR: mismatched array dimensions
select json_object('{a,b,c,"d e f"}','{1,2,3,"a b c",g}');
ERROR: mismatched array dimensions
-- null key error
select json_object('{a,b,NULL,"d e f"}','{1,2,3,"a b c"}');
ERROR: null value not allowed for object key
-- empty key is allowed
select json_object('{a,b,"","d e f"}','{1,2,3,"a b c"}');
json_object
-----------------------------------------------------
{"a" : "1", "b" : "2", "" : "3", "d e f" : "a b c"}
(1 row)
-- json_to_record and json_to_recordset
select * from json_to_record('{"a":1,"b":"foo","c":"bar"}')
as x(a int, b text, d text);
a | b | d
---+-----+---
1 | foo |
(1 row)
select * from json_to_recordset('[{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}]')
as x(a int, b text, c boolean);
a | b | c
---+-----+---
1 | foo |
2 | bar | t
(2 rows)
select * from json_to_recordset('[{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":false,"b":{"d":"bar"}}]')
as x(a int, b json, c boolean);
a | b | c
---+-------------+---
1 | {"d":"foo"} | t
2 | {"d":"bar"} | f
(2 rows)
-- json_strip_nulls
select json_strip_nulls(null);
json_strip_nulls
------------------
(1 row)
select json_strip_nulls('1');
json_strip_nulls
------------------
1
(1 row)
select json_strip_nulls('"a string"');
json_strip_nulls
------------------
"a string"
(1 row)
select json_strip_nulls('null');
json_strip_nulls
------------------
null
(1 row)
select json_strip_nulls('[1,2,null,3,4]');
json_strip_nulls
------------------
[1,2,null,3,4]
(1 row)
select json_strip_nulls('{"a":1,"b":null,"c":[2,null,3],"d":{"e":4,"f":null}}');
json_strip_nulls
------------------------------------
{"a":1,"c":[2,null,3],"d":{"e":4}}
(1 row)
select json_strip_nulls('[1,{"a":1,"b":null,"c":2},3]');
json_strip_nulls
---------------------
[1,{"a":1,"c":2},3]
(1 row)
-- an empty object is not null and should not be stripped
select json_strip_nulls('{"a": {"b": null, "c": null}, "d": {} }');
json_strip_nulls
------------------
{"a":{},"d":{}}
(1 row)