2012-01-31 17:48:23 +01:00
-- Strings.
SELECT ' "" ' : : json ; -- OK.
SELECT $ $ ' ' $ $ : : json ; -- ERROR, single quotes are not allowed
SELECT ' "abc" ' : : json ; -- OK
SELECT ' "abc ' : : json ; -- ERROR, quotes not closed
SELECT ' "abc
def " '::json; -- ERROR, unescaped newline in string constant
SELECT ' "\n\"\\" ' : : json ; -- OK, legal escapes
SELECT ' "\v" ' : : json ; -- ERROR, not a valid JSON escape
SELECT ' "\u" ' : : json ; -- ERROR, incomplete escape
SELECT ' "\u00" ' : : json ; -- ERROR, incomplete escape
SELECT ' "\u000g" ' : : json ; -- ERROR, g is not a hex digit
SELECT ' "\u0000" ' : : json ; -- OK, legal escape
SELECT ' "\uaBcD" ' : : json ; -- OK, uppercase and lower case both OK
-- Numbers.
SELECT ' 1 ' : : json ; -- OK
SELECT ' 0 ' : : json ; -- OK
SELECT ' 01 ' : : json ; -- ERROR, not valid according to JSON spec
SELECT ' 0.1 ' : : json ; -- OK
SELECT ' 9223372036854775808 ' : : json ; -- OK, even though it's too large for int8
SELECT ' 1e100 ' : : json ; -- OK
SELECT ' 1.3e100 ' : : json ; -- OK
SELECT ' 1f2 ' : : json ; -- ERROR
2012-05-20 01:24:46 +02:00
SELECT ' 0.x1 ' : : json ; -- ERROR
SELECT ' 1.3ex100 ' : : json ; -- ERROR
2012-01-31 17:48:23 +01:00
-- Arrays.
SELECT ' [] ' : : json ; -- OK
SELECT ' [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]] ' : : json ; -- OK
SELECT ' [1,2] ' : : json ; -- OK
SELECT ' [1,2,] ' : : json ; -- ERROR, trailing comma
SELECT ' [1,2 ' : : json ; -- ERROR, no closing bracket
SELECT ' [1,[2] ' : : json ; -- ERROR, no closing bracket
-- Objects.
SELECT ' {} ' : : json ; -- OK
SELECT ' {"abc"} ' : : json ; -- ERROR, no value
SELECT ' {"abc":1} ' : : json ; -- OK
SELECT ' {1:"abc"} ' : : json ; -- ERROR, keys must be strings
SELECT ' {"abc",1} ' : : json ; -- ERROR, wrong separator
SELECT ' {"abc"=1} ' : : json ; -- ERROR, totally wrong separator
SELECT ' {"abc"::1} ' : : json ; -- ERROR, another wrong separator
SELECT ' {"abc":1,"def":2,"ghi":[3,4],"hij":{"klm":5,"nop":[6]}} ' : : json ; -- OK
SELECT ' {"abc":1:2} ' : : json ; -- ERROR, colon in wrong spot
SELECT ' {"abc":1,3} ' : : json ; -- ERROR, no value
-- Miscellaneous stuff.
SELECT ' true ' : : json ; -- OK
SELECT ' false ' : : json ; -- OK
SELECT ' null ' : : json ; -- OK
SELECT ' true ' : : json ; -- OK, even with extra whitespace
SELECT ' true false ' : : json ; -- ERROR, too many values
SELECT ' true, false ' : : json ; -- ERROR, too many values
SELECT ' truf ' : : json ; -- ERROR, not a keyword
SELECT ' trues ' : : json ; -- ERROR, not a keyword
SELECT ' ' : : json ; -- ERROR, no value
SELECT ' ' : : json ; -- ERROR, no value
2012-02-03 18:11:16 +01:00
- - constructors
-- array_to_json
SELECT array_to_json ( array ( select 1 as a ) ) ;
SELECT array_to_json ( array_agg ( q ) , false ) from ( select x as b , x * 2 as c from generate_series ( 1 , 3 ) x ) q ;
SELECT array_to_json ( array_agg ( q ) , true ) from ( select x as b , x * 2 as c from generate_series ( 1 , 3 ) x ) q ;
SELECT array_to_json ( array_agg ( q ) , false )
2012-05-15 21:19:04 +02:00
FROM ( SELECT $ $ a $ $ | | x AS b , y AS c ,
2012-02-03 18:11:16 +01:00
ARRAY [ ROW ( x . * , ARRAY [ 1 , 2 , 3 ] ) ,
2012-05-15 21:19:04 +02:00
ROW ( y . * , ARRAY [ 4 , 5 , 6 ] ) ] AS z
FROM generate_series ( 1 , 2 ) x ,
2012-02-03 18:11:16 +01:00
generate_series ( 4 , 5 ) y ) q ;
SELECT array_to_json ( array_agg ( x ) , false ) from generate_series ( 5 , 10 ) x ;
SELECT array_to_json ( ' {{1,5},{99,100}} ' : : int [ ] ) ;
-- row_to_json
SELECT row_to_json ( row ( 1 , ' foo ' ) ) ;
2012-05-15 21:19:04 +02:00
SELECT row_to_json ( q )
FROM ( SELECT $ $ a $ $ | | x AS b ,
y AS c ,
2012-02-03 18:11:16 +01:00
ARRAY [ ROW ( x . * , ARRAY [ 1 , 2 , 3 ] ) ,
2012-05-15 21:19:04 +02:00
ROW ( y . * , ARRAY [ 4 , 5 , 6 ] ) ] AS z
FROM generate_series ( 1 , 2 ) x ,
2012-02-03 18:11:16 +01:00
generate_series ( 4 , 5 ) y ) q ;
2012-05-15 21:19:04 +02:00
SELECT row_to_json ( q , true )
FROM ( SELECT $ $ a $ $ | | x AS b ,
y AS c ,
2012-02-03 18:11:16 +01:00
ARRAY [ ROW ( x . * , ARRAY [ 1 , 2 , 3 ] ) ,
2012-05-15 21:19:04 +02:00
ROW ( y . * , ARRAY [ 4 , 5 , 6 ] ) ] AS z
FROM generate_series ( 1 , 2 ) x ,
2012-02-03 18:11:16 +01:00
generate_series ( 4 , 5 ) y ) q ;
CREATE TEMP TABLE rows AS
SELECT x , ' txt ' | | x as y
FROM generate_series ( 1 , 3 ) AS x ;
2012-05-15 21:19:04 +02:00
SELECT row_to_json ( q , true )
2012-02-03 18:11:16 +01:00
FROM rows q ;
SELECT row_to_json ( row ( ( select array_agg ( x ) as d from generate_series ( 5 , 10 ) x ) ) , false ) ;
2012-02-20 21:01:03 +01:00
2014-06-03 19:56:53 +02:00
-- to_json, timestamps
select to_json ( timestamp ' 2014-05-28 12:22:35.614298 ' ) ;
BEGIN ;
SET LOCAL TIME ZONE 10 . 5 ;
select to_json ( timestamptz ' 2014-05-28 12:22:35.614298-04 ' ) ;
SET LOCAL TIME ZONE - 8 ;
select to_json ( timestamptz ' 2014-05-28 12:22:35.614298-04 ' ) ;
COMMIT ;
2015-02-26 18:25:21 +01:00
select to_json ( date ' 2014-05-28 ' ) ;
select to_json ( date ' Infinity ' ) ;
select to_json ( timestamp ' Infinity ' ) ;
select to_json ( timestamptz ' Infinity ' ) ;
2013-03-10 22:35:36 +01:00
- - json_agg
SELECT json_agg ( q )
FROM ( SELECT $ $ a $ $ | | x AS b , y AS c ,
ARRAY [ ROW ( x . * , ARRAY [ 1 , 2 , 3 ] ) ,
ROW ( y . * , ARRAY [ 4 , 5 , 6 ] ) ] AS z
FROM generate_series ( 1 , 2 ) x ,
generate_series ( 4 , 5 ) y ) q ;
SELECT json_agg ( q )
FROM rows q ;
2012-02-20 21:01:03 +01:00
-- non-numeric output
SELECT row_to_json ( q )
FROM ( SELECT ' NaN ' : : float8 AS " float8field " ) q ;
SELECT row_to_json ( q )
FROM ( SELECT ' Infinity ' : : float8 AS " float8field " ) q ;
SELECT row_to_json ( q )
FROM ( SELECT ' -Infinity ' : : float8 AS " float8field " ) q ;
-- json input
SELECT row_to_json ( q )
FROM ( SELECT ' {"a":1,"b": [2,3,4,"d","e","f"],"c":{"p":1,"q":2}} ' : : json AS " jsonfield " ) q ;
2013-03-29 19:12:13 +01:00
-- json extraction functions
CREATE TEMP TABLE test_json (
json_type text ,
test_json json
) ;
INSERT INTO test_json VALUES
( ' scalar ' , ' "a scalar" ' ) ,
Introduce jsonb, a structured format for storing json.
The new format accepts exactly the same data as the json type. However, it is
stored in a format that does not require reparsing the orgiginal text in order
to process it, making it much more suitable for indexing and other operations.
Insignificant whitespace is discarded, and the order of object keys is not
preserved. Neither are duplicate object keys kept - the later value for a given
key is the only one stored.
The new type has all the functions and operators that the json type has,
with the exception of the json generation functions (to_json, json_agg etc.)
and with identical semantics. In addition, there are operator classes for
hash and btree indexing, and two classes for GIN indexing, that have no
equivalent in the json type.
This feature grew out of previous work by Oleg Bartunov and Teodor Sigaev, which
was intended to provide similar facilities to a nested hstore type, but which
in the end proved to have some significant compatibility issues.
Authors: Oleg Bartunov, Teodor Sigaev, Peter Geoghegan and Andrew Dunstan.
Review: Andres Freund
2014-03-23 21:40:19 +01:00
( ' array ' , ' ["zero", "one","two",null,"four","five", [1,2,3],{"f1":9}] ' ) ,
( ' object ' , ' {"field1":"val1","field2":"val2","field3":null, "field4": 4, "field5": [1,2,3], "field6": {"f1":9}} ' ) ;
2013-03-29 19:12:13 +01:00
2013-05-31 03:05:07 +02:00
SELECT test_json - > ' x '
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' scalar ' ;
2013-05-31 03:05:07 +02:00
SELECT test_json - > ' x '
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' array ' ;
2013-05-31 03:05:07 +02:00
SELECT test_json - > ' x '
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' object ' ;
SELECT test_json - > ' field2 '
FROM test_json
WHERE json_type = ' object ' ;
2013-05-31 03:05:07 +02:00
SELECT test_json - > > ' field2 '
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' object ' ;
2013-05-31 03:05:07 +02:00
SELECT test_json - > 2
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' scalar ' ;
2013-05-31 03:05:07 +02:00
SELECT test_json - > 2
2013-03-29 19:12:13 +01:00
FROM test_json
WHERE json_type = ' array ' ;
SELECT test_json - > 2
FROM test_json
WHERE json_type = ' object ' ;
SELECT test_json - > > 2
FROM test_json
WHERE json_type = ' array ' ;
Introduce jsonb, a structured format for storing json.
The new format accepts exactly the same data as the json type. However, it is
stored in a format that does not require reparsing the orgiginal text in order
to process it, making it much more suitable for indexing and other operations.
Insignificant whitespace is discarded, and the order of object keys is not
preserved. Neither are duplicate object keys kept - the later value for a given
key is the only one stored.
The new type has all the functions and operators that the json type has,
with the exception of the json generation functions (to_json, json_agg etc.)
and with identical semantics. In addition, there are operator classes for
hash and btree indexing, and two classes for GIN indexing, that have no
equivalent in the json type.
This feature grew out of previous work by Oleg Bartunov and Teodor Sigaev, which
was intended to provide similar facilities to a nested hstore type, but which
in the end proved to have some significant compatibility issues.
Authors: Oleg Bartunov, Teodor Sigaev, Peter Geoghegan and Andrew Dunstan.
Review: Andres Freund
2014-03-23 21:40:19 +01:00
SELECT test_json - > > 6 FROM test_json WHERE json_type = ' array ' ;
SELECT test_json - > > 7 FROM test_json WHERE json_type = ' array ' ;
SELECT test_json - > > ' field4 ' FROM test_json WHERE json_type = ' object ' ;
SELECT test_json - > > ' field5 ' FROM test_json WHERE json_type = ' object ' ;
SELECT test_json - > > ' field6 ' FROM test_json WHERE json_type = ' object ' ;
2013-03-29 19:12:13 +01:00
SELECT json_object_keys ( test_json )
FROM test_json
WHERE json_type = ' scalar ' ;
SELECT json_object_keys ( test_json )
FROM test_json
WHERE json_type = ' array ' ;
SELECT json_object_keys ( test_json )
FROM test_json
WHERE json_type = ' object ' ;
Introduce jsonb, a structured format for storing json.
The new format accepts exactly the same data as the json type. However, it is
stored in a format that does not require reparsing the orgiginal text in order
to process it, making it much more suitable for indexing and other operations.
Insignificant whitespace is discarded, and the order of object keys is not
preserved. Neither are duplicate object keys kept - the later value for a given
key is the only one stored.
The new type has all the functions and operators that the json type has,
with the exception of the json generation functions (to_json, json_agg etc.)
and with identical semantics. In addition, there are operator classes for
hash and btree indexing, and two classes for GIN indexing, that have no
equivalent in the json type.
This feature grew out of previous work by Oleg Bartunov and Teodor Sigaev, which
was intended to provide similar facilities to a nested hstore type, but which
in the end proved to have some significant compatibility issues.
Authors: Oleg Bartunov, Teodor Sigaev, Peter Geoghegan and Andrew Dunstan.
Review: Andres Freund
2014-03-23 21:40:19 +01:00
-- test extending object_keys resultset - initial resultset size is 256
select count ( * ) from
( select json_object_keys ( json_object ( array_agg ( g ) ) )
from ( select unnest ( array [ ' f ' | | n , n : : text ] ) as g
from generate_series ( 1 , 300 ) as n ) x ) y ;
2013-03-29 19:12:13 +01:00
-- nulls
select ( test_json - > ' field3 ' ) is null as expect_false
from test_json
where json_type = ' object ' ;
select ( test_json - > > ' field3 ' ) is null as expect_true
from test_json
where json_type = ' object ' ;
select ( test_json - > 3 ) is null as expect_false
from test_json
where json_type = ' array ' ;
select ( test_json - > > 3 ) is null as expect_true
from test_json
where json_type = ' array ' ;
2014-08-20 22:48:35 +02:00
-- corner cases
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > null : : text ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > null : : int ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > 1 ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > ' z ' ;
2014-08-21 01:05:05 +02:00
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > ' ' ;
2014-08-20 22:48:35 +02:00
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > 1 ;
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > 3 ;
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > ' z ' ;
2014-08-22 19:17:58 +02:00
select ' {"a": "c", "b": null} ' : : json - > ' b ' ;
2014-08-20 22:48:35 +02:00
select ' "foo" ' : : json - > 1 ;
select ' "foo" ' : : json - > ' z ' ;
2013-03-29 19:12:13 +01:00
2014-08-21 01:05:05 +02:00
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > > null : : text ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > > null : : int ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > > 1 ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > > ' z ' ;
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json - > > ' ' ;
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > > 1 ;
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > > 3 ;
select ' [{"b": "c"}, {"b": "cc"}] ' : : json - > > ' z ' ;
2014-08-22 19:17:58 +02:00
select ' {"a": "c", "b": null} ' : : json - > > ' b ' ;
2014-08-21 01:05:05 +02:00
select ' "foo" ' : : json - > > 1 ;
select ' "foo" ' : : json - > > ' z ' ;
2013-03-29 19:12:13 +01:00
-- array length
SELECT json_array_length ( ' [1,2,3,{"f1":1,"f2":[5,6]},4] ' ) ;
SELECT json_array_length ( ' [] ' ) ;
SELECT json_array_length ( ' {"f1":1,"f2":[5,6]} ' ) ;
SELECT json_array_length ( ' 4 ' ) ;
-- each
select json_each ( ' {"f1":[1,2,3],"f2":{"f3":1},"f4":null} ' ) ;
select * from json_each ( ' {"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"} ' ) q ;
select json_each_text ( ' {"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":"null"} ' ) ;
select * from json_each_text ( ' {"f1":[1,2,3],"f2":{"f3":1},"f4":null,"f5":99,"f6":"stringy"} ' ) q ;
-- extract_path, extract_path_as_text
select json_extract_path ( ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' , ' f4 ' , ' f6 ' ) ;
select json_extract_path ( ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' ) ;
select json_extract_path ( ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' , 0 : : text ) ;
select json_extract_path ( ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' , 1 : : text ) ;
select json_extract_path_text ( ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' , ' f4 ' , ' f6 ' ) ;
select json_extract_path_text ( ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' ) ;
select json_extract_path_text ( ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' , 0 : : text ) ;
select json_extract_path_text ( ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' , ' f2 ' , 1 : : text ) ;
-- extract_path nulls
select json_extract_path ( ' {"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}} ' , ' f4 ' , ' f5 ' ) is null as expect_false ;
select json_extract_path_text ( ' {"f2":{"f3":1},"f4":{"f5":null,"f6":"stringy"}} ' , ' f4 ' , ' f5 ' ) is null as expect_true ;
select json_extract_path ( ' {"f2":{"f3":1},"f4":[0,1,2,null]} ' , ' f4 ' , ' 3 ' ) is null as expect_false ;
select json_extract_path_text ( ' {"f2":{"f3":1},"f4":[0,1,2,null]} ' , ' f4 ' , ' 3 ' ) is null as expect_true ;
-- extract_path operators
select ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' : : json #>array['f4','f6'];
select ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' : : json #>array['f2'];
select ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' : : json #>array['f2','0'];
select ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' : : json #>array['f2','1'];
2014-08-20 22:48:35 +02:00
2013-03-29 19:12:13 +01:00
select ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' : : json #>>array['f4','f6'];
select ' {"f2":{"f3":1},"f4":{"f5":99,"f6":"stringy"}} ' : : json #>>array['f2'];
select ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' : : json #>>array['f2','0'];
select ' {"f2":["f3",1],"f4":{"f5":99,"f6":"stringy"}} ' : : json #>>array['f2','1'];
2014-08-20 22:48:35 +02:00
-- corner cases for same
2014-08-21 01:05:05 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> '{}';
2014-08-22 19:17:58 +02:00
select ' [1,2,3] ' : : json #> '{}';
select ' "foo" ' : : json #> '{}';
select ' 42 ' : : json #> '{}';
select ' null ' : : json #> '{}';
2014-08-20 22:48:35 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a'];
2014-08-21 01:05:05 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a', null];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a', ''];
2014-08-20 22:48:35 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a','b'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a','b','c'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a','b','c','d'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #> array['a','z','c'];
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json #> array['a','1','b'];
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json #> array['a','z','b'];
select ' [{"b": "c"}, {"b": "cc"}] ' : : json #> array['1','b'];
select ' [{"b": "c"}, {"b": "cc"}] ' : : json #> array['z','b'];
2014-08-22 19:17:58 +02:00
select ' [{"b": "c"}, {"b": null}] ' : : json #> array['1','b'];
2014-08-20 22:48:35 +02:00
select ' "foo" ' : : json #> array['z'];
select ' 42 ' : : json #> array['f2'];
select ' 42 ' : : json #> array['0'];
2014-08-21 01:05:05 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> '{}';
2014-08-22 19:17:58 +02:00
select ' [1,2,3] ' : : json #>> '{}';
select ' "foo" ' : : json #>> '{}';
select ' 42 ' : : json #>> '{}';
select ' null ' : : json #>> '{}';
2014-08-20 22:48:35 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a'];
2014-08-21 01:05:05 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a', null];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a', ''];
2014-08-20 22:48:35 +02:00
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a','b'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a','b','c'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a','b','c','d'];
select ' {"a": {"b":{"c": "foo"}}} ' : : json #>> array['a','z','c'];
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json #>> array['a','1','b'];
select ' {"a": [{"b": "c"}, {"b": "cc"}]} ' : : json #>> array['a','z','b'];
select ' [{"b": "c"}, {"b": "cc"}] ' : : json #>> array['1','b'];
select ' [{"b": "c"}, {"b": "cc"}] ' : : json #>> array['z','b'];
2014-08-22 19:17:58 +02:00
select ' [{"b": "c"}, {"b": null}] ' : : json #>> array['1','b'];
2014-08-20 22:48:35 +02:00
select ' "foo" ' : : json #>> array['z'];
select ' 42 ' : : json #>> array['f2'];
select ' 42 ' : : json #>> array['0'];
2013-03-29 19:12:13 +01:00
-- array_elements
2014-01-29 21:39:01 +01:00
select json_array_elements ( ' [1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"] ' ) ;
select * from json_array_elements ( ' [1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"] ' ) q ;
select json_array_elements_text ( ' [1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"] ' ) ;
select * from json_array_elements_text ( ' [1,true,[1,[2,3]],null,{"f1":1,"f2":[7,8,9]},false,"stringy"] ' ) q ;
2013-03-29 19:12:13 +01:00
-- populate_record
create type jpop as ( a text , b int , c timestamp ) ;
select * from json_populate_record ( null : : jpop , ' {"a":"blurfl","x":43.2} ' ) q ;
select * from json_populate_record ( row ( ' x ' , 3 , ' 2012-12-31 15:30:56 ' ) : : jpop , ' {"a":"blurfl","x":43.2} ' ) q ;
2014-06-29 19:50:58 +02:00
select * from json_populate_record ( null : : jpop , ' {"a":"blurfl","x":43.2} ' ) q ;
select * from json_populate_record ( row ( ' x ' , 3 , ' 2012-12-31 15:30:56 ' ) : : jpop , ' {"a":"blurfl","x":43.2} ' ) q ;
2013-03-29 19:12:13 +01:00
2014-06-29 19:50:58 +02:00
select * from json_populate_record ( null : : jpop , ' {"a":[100,200,false],"x":43.2} ' ) q ;
select * from json_populate_record ( row ( ' x ' , 3 , ' 2012-12-31 15:30:56 ' ) : : jpop , ' {"a":[100,200,false],"x":43.2} ' ) q ;
select * from json_populate_record ( row ( ' x ' , 3 , ' 2012-12-31 15:30:56 ' ) : : jpop , ' {"c":[100,200,false],"x":43.2} ' ) q ;
2013-03-29 19:12:13 +01:00
-- populate_recordset
2014-06-29 19:50:58 +02:00
select * from json_populate_recordset ( null : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( null : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"c":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
2013-03-29 19:12:13 +01:00
2014-06-25 06:22:40 +02:00
create type jpop2 as ( a int , b json , c int , d int ) ;
2014-06-29 19:50:58 +02:00
select * from json_populate_recordset ( null : : jpop2 , ' [{"a":2,"c":3,"b":{"z":4},"d":6}] ' ) q ;
2013-03-29 19:12:13 +01:00
select * from json_populate_recordset ( null : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"a":"blurfl","x":43.2},{"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
select * from json_populate_recordset ( row ( ' def ' , 99 , null ) : : jpop , ' [{"a":[100,200,300],"x":43.2},{"a":{"z":true},"b":3,"c":"2012-01-20 10:42:53"}] ' ) q ;
2013-06-08 15:12:48 +02:00
-- handling of unicode surrogate pairs
2013-06-12 19:35:24 +02:00
select json ' { "a": "\ud83d\ude04\ud83d\udc36" } ' - > ' a ' as correct_in_utf8 ;
2013-06-08 15:12:48 +02:00
select json ' { "a": "\ud83d\ud83d" } ' - > ' a ' ; -- 2 high surrogates in a row
select json ' { "a": "\ude04\ud83d" } ' - > ' a ' ; -- surrogates in wrong order
select json ' { "a": "\ud83dX" } ' - > ' a ' ; -- orphan high surrogate
select json ' { "a": "\ude04X" } ' - > ' a ' ; -- orphan low surrogate
2013-06-12 19:35:24 +02:00
- - handling of simple unicode escapes
Fix jsonb Unicode escape processing, and in consequence disallow \u0000.
We've been trying to support \u0000 in JSON values since commit
78ed8e03c67d7333, and have introduced increasingly worse hacks to try to
make it work, such as commit 0ad1a816320a2b53. However, it fundamentally
can't work in the way envisioned, because the stored representation looks
the same as for \\u0000 which is not the same thing at all. It's also
entirely bogus to output \u0000 when de-escaped output is called for.
The right way to do this would be to store an actual 0x00 byte, and then
throw error only if asked to produce de-escaped textual output. However,
getting to that point seems likely to take considerable work and may well
never be practical in the 9.4.x series.
To preserve our options for better behavior while getting rid of the nasty
side-effects of 0ad1a816320a2b53, revert that commit in toto and instead
throw error if \u0000 is used in a context where it needs to be de-escaped.
(These are the same contexts where non-ASCII Unicode escapes throw error
if the database encoding isn't UTF8, so this behavior is by no means
without precedent.)
In passing, make both the \u0000 case and the non-ASCII Unicode case report
ERRCODE_UNTRANSLATABLE_CHARACTER / "unsupported Unicode escape sequence"
rather than claiming there's something wrong with the input syntax.
Back-patch to 9.4, where we have to do something because 0ad1a816320a2b53
broke things for many cases having nothing to do with \u0000. 9.3 also has
bogus behavior, but only for that specific escape value, so given the lack
of field complaints it seems better to leave 9.3 alone.
2015-01-30 20:44:46 +01:00
select json ' { "a": "the Copyright \u00a9 sign" } ' as correct_in_utf8 ;
select json ' { "a": "dollar \u0024 character" } ' as correct_everywhere ;
select json ' { "a": "dollar \\u0024 character" } ' as not_an_escape ;
select json ' { "a": "null \u0000 escape" } ' as not_unescaped ;
select json ' { "a": "null \\u0000 escape" } ' as not_an_escape ;
2013-06-12 19:35:24 +02:00
select json ' { "a": "the Copyright \u00a9 sign" } ' - > > ' a ' as correct_in_utf8 ;
select json ' { "a": "dollar \u0024 character" } ' - > > ' a ' as correct_everywhere ;
Fix jsonb Unicode escape processing, and in consequence disallow \u0000.
We've been trying to support \u0000 in JSON values since commit
78ed8e03c67d7333, and have introduced increasingly worse hacks to try to
make it work, such as commit 0ad1a816320a2b53. However, it fundamentally
can't work in the way envisioned, because the stored representation looks
the same as for \\u0000 which is not the same thing at all. It's also
entirely bogus to output \u0000 when de-escaped output is called for.
The right way to do this would be to store an actual 0x00 byte, and then
throw error only if asked to produce de-escaped textual output. However,
getting to that point seems likely to take considerable work and may well
never be practical in the 9.4.x series.
To preserve our options for better behavior while getting rid of the nasty
side-effects of 0ad1a816320a2b53, revert that commit in toto and instead
throw error if \u0000 is used in a context where it needs to be de-escaped.
(These are the same contexts where non-ASCII Unicode escapes throw error
if the database encoding isn't UTF8, so this behavior is by no means
without precedent.)
In passing, make both the \u0000 case and the non-ASCII Unicode case report
ERRCODE_UNTRANSLATABLE_CHARACTER / "unsupported Unicode escape sequence"
rather than claiming there's something wrong with the input syntax.
Back-patch to 9.4, where we have to do something because 0ad1a816320a2b53
broke things for many cases having nothing to do with \u0000. 9.3 also has
bogus behavior, but only for that specific escape value, so given the lack
of field complaints it seems better to leave 9.3 alone.
2015-01-30 20:44:46 +01:00
select json ' { "a": "dollar \\u0024 character" } ' - > > ' a ' as not_an_escape ;
select json ' { "a": "null \u0000 escape" } ' - > > ' a ' as fails ;
select json ' { "a": "null \\u0000 escape" } ' - > > ' a ' as not_an_escape ;
2013-10-10 18:21:59 +02:00
- - json_typeof ( ) function
select value , json_typeof ( value )
2013-11-10 15:20:52 +01:00
from ( values ( json ' 123.4 ' ) ,
( json ' -1 ' ) ,
( json ' "foo" ' ) ,
( json ' true ' ) ,
( json ' false ' ) ,
2013-10-10 18:21:59 +02:00
( json ' null ' ) ,
2013-11-10 15:20:52 +01:00
( json ' [1, 2, 3] ' ) ,
( json ' [] ' ) ,
( json ' {"x":"foo", "y":123} ' ) ,
( json ' {} ' ) ,
2013-10-10 18:21:59 +02:00
( NULL : : json ) )
as data ( value ) ;
2014-01-28 23:48:21 +01:00
-- json_build_array, json_build_object, json_object_agg
SELECT json_build_array ( ' a ' , 1 , ' b ' , 1 . 2 , ' c ' , true , ' d ' , null , ' e ' , json ' {"x": 3, "y": [1,2,3]} ' ) ;
SELECT json_build_object ( ' a ' , 1 , ' b ' , 1 . 2 , ' c ' , true , ' d ' , null , ' e ' , json ' {"x": 3, "y": [1,2,3]} ' ) ;
SELECT json_build_object (
' a ' , json_build_object ( ' b ' , false , ' c ' , 99 ) ,
' d ' , json_build_object ( ' e ' , array [ 9 , 8 , 7 ] : : int [ ] ,
' f ' , ( select row_to_json ( r ) from ( select relkind , oid : : regclass as name from pg_class where relname = ' pg_class ' ) r ) ) ) ;
-- empty objects/arrays
SELECT json_build_array ( ) ;
SELECT json_build_object ( ) ;
-- make sure keys are quoted
SELECT json_build_object ( 1 , 2 ) ;
-- keys must be scalar and not null
SELECT json_build_object ( null , 2 ) ;
SELECT json_build_object ( r , 2 ) FROM ( SELECT 1 AS a , 2 AS b ) r ;
SELECT json_build_object ( json ' {"a":1,"b":2} ' , 3 ) ;
SELECT json_build_object ( ' {1,2,3} ' : : int [ ] , 3 ) ;
CREATE TEMP TABLE foo ( serial_num int , name text , type text ) ;
INSERT INTO foo VALUES ( 847001 , ' t15 ' , ' GE1043 ' ) ;
INSERT INTO foo VALUES ( 847002 , ' t16 ' , ' GE1043 ' ) ;
INSERT INTO foo VALUES ( 847003 , ' sub-alpha ' , ' GESS90 ' ) ;
SELECT json_build_object ( ' turbines ' , json_object_agg ( serial_num , json_build_object ( ' name ' , name , ' type ' , type ) ) )
FROM foo ;
-- json_object
-- one dimension
SELECT json_object ( ' {a,1,b,2,3,NULL,"d e f","a b c"} ' ) ;
-- same but with two dimensions
SELECT json_object ( ' {{a,1},{b,2},{3,NULL},{"d e f","a b c"}} ' ) ;
-- odd number error
SELECT json_object ( ' {a,b,c} ' ) ;
-- one column error
SELECT json_object ( ' {{a},{b}} ' ) ;
-- too many columns error
SELECT json_object ( ' {{a,b,c},{b,c,d}} ' ) ;
-- too many dimensions error
SELECT json_object ( ' {{{a,b},{c,d}},{{b,c},{d,e}}} ' ) ;
- - two argument form of json_object
select json_object ( ' {a,b,c,"d e f"} ' , ' {1,2,3,"a b c"} ' ) ;
-- too many dimensions
SELECT json_object ( ' {{a,1},{b,2},{3,NULL},{"d e f","a b c"}} ' , ' {{a,1},{b,2},{3,NULL},{"d e f","a b c"}} ' ) ;
-- mismatched dimensions
select json_object ( ' {a,b,c,"d e f",g} ' , ' {1,2,3,"a b c"} ' ) ;
select json_object ( ' {a,b,c,"d e f"} ' , ' {1,2,3,"a b c",g} ' ) ;
-- null key error
select json_object ( ' {a,b,NULL,"d e f"} ' , ' {1,2,3,"a b c"} ' ) ;
2014-07-22 17:22:47 +02:00
-- empty key is allowed
2014-01-28 23:48:21 +01:00
select json_object ( ' {a,b,"","d e f"} ' , ' {1,2,3,"a b c"} ' ) ;
-- json_to_record and json_to_recordset
2014-06-29 19:50:58 +02:00
select * from json_to_record ( ' {"a":1,"b":"foo","c":"bar"} ' )
2014-01-28 23:48:21 +01:00
as x ( a int , b text , d text ) ;
2014-06-29 19:50:58 +02:00
select * from json_to_recordset ( ' [{"a":1,"b":"foo","d":false},{"a":2,"b":"bar","c":true}] ' )
2014-01-28 23:48:21 +01:00
as x ( a int , b text , c boolean ) ;
2014-06-25 06:22:40 +02:00
2014-06-29 19:50:58 +02:00
select * from json_to_recordset ( ' [{"a":1,"b":{"d":"foo"},"c":true},{"a":2,"c":false,"b":{"d":"bar"}}] ' )
2014-06-25 06:22:40 +02:00
as x ( a int , b json , c boolean ) ;
2014-12-12 15:00:43 +01:00
-- json_strip_nulls
select json_strip_nulls ( null ) ;
select json_strip_nulls ( ' 1 ' ) ;
select json_strip_nulls ( ' "a string" ' ) ;
select json_strip_nulls ( ' null ' ) ;
select json_strip_nulls ( ' [1,2,null,3,4] ' ) ;
select json_strip_nulls ( ' {"a":1,"b":null,"c":[2,null,3],"d":{"e":4,"f":null}} ' ) ;
select json_strip_nulls ( ' [1,{"a":1,"b":null,"c":2},3] ' ) ;
-- an empty object is not null and should not be stripped
select json_strip_nulls ( ' {"a": {"b": null, "c": null}, "d": {} } ' ) ;