Rearrange core regression tests to reduce cross-script dependencies.

The idea behind this patch is to make it possible to run individual
test scripts without running the entire core test suite.  Making all
the scripts completely independent would involve a massive rewrite,
and would probably be worse for coverage of things like concurrent DDL.
So this patch just does what seems practical with limited changes.

The net effect is that any test script can be run after running
limited earlier dependencies:
* all scripts depend on test_setup
* many scripts depend on create_index
* other dependencies are few in number, and are documented in
  the parallel_schedule file.

To accomplish this, I chose a small number of commonly-used tables
and moved their creation and filling into test_setup.  Later scripts
are expected not to modify these tables' data contents, for fear of
affecting other scripts' results.  Also, our former habit of declaring
all C functions in one place is now gone in favor of declaring them
where they're used, if that's just one script, or in test_setup if
necessary.

There's more that could be done to remove some of the remaining
inter-script dependencies, but significantly more-invasive changes
would be needed, and at least for now it doesn't seem worth it.

Discussion: https://postgr.es/m/1114748.1640383217@sss.pgh.pa.us
This commit is contained in:
Tom Lane 2022-02-08 15:30:38 -05:00
parent ba15f16107
commit cc50080a82
105 changed files with 2391 additions and 2764 deletions

View File

@ -1,8 +1,18 @@
--
-- AGGREGATES
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
-- prepare some test data
CREATE TABLE aggtest (
a int2,
b float4
);
\set filename :abs_srcdir '/data/agg.data'
COPY aggtest FROM :'filename';
ANALYZE aggtest;
SELECT avg(four) AS avg_1 FROM onek;
avg_1
--------------------

View File

@ -1,6 +1,14 @@
--
-- Test for ALTER some_object {RENAME TO, OWNER TO, SET SCHEMA}
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
-- Clean up in case a prior regression run failed
SET client_min_messages TO 'warning';
DROP ROLE IF EXISTS regress_alter_generic_user1;

View File

@ -1,6 +1,8 @@
--
-- ARRAYS
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE arrtest (
a int2[],
b int4[][][],
@ -10,6 +12,14 @@ CREATE TABLE arrtest (
f char(5)[],
g varchar(5)[]
);
CREATE TABLE array_op_test (
seqno int4,
i int4[],
t text[]
);
\set filename :abs_srcdir '/data/array.data'
COPY array_op_test FROM :'filename';
ANALYZE array_op_test;
--
-- only the 'e' array is 0-based, the others are 1-based.
--
@ -307,6 +317,9 @@ INSERT INTO arrtest_s VALUES(NULL, NULL);
UPDATE arrtest_s SET a[:] = '{11, 12, 13, 14, 15}'; -- fail, no good with null
ERROR: array slice subscript must provide both boundaries
DETAIL: When assigning to a slice of an empty array value, slice boundaries must be fully specified.
-- we want to work with a point_tbl that includes a null
CREATE TEMP TABLE point_tbl AS SELECT * FROM public.point_tbl;
INSERT INTO POINT_TBL(f1) VALUES (NULL);
-- check with fixed-length-array type, such as point
SELECT f1[0:1] FROM POINT_TBL;
ERROR: slices of fixed-length arrays not implemented

View File

@ -1,5 +1,49 @@
--
-- BTREE_INDEX
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE bt_i4_heap (
seqno int4,
random int4
);
CREATE TABLE bt_name_heap (
seqno name,
random int4
);
CREATE TABLE bt_txt_heap (
seqno text,
random int4
);
CREATE TABLE bt_f8_heap (
seqno float8,
random int4
);
\set filename :abs_srcdir '/data/desc.data'
COPY bt_i4_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_name_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_txt_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_f8_heap FROM :'filename';
ANALYZE bt_i4_heap;
ANALYZE bt_name_heap;
ANALYZE bt_txt_heap;
ANALYZE bt_f8_heap;
--
-- BTREE ascending/descending cases
--
-- we load int4/text from pure descending data (each key is a new
-- low key) and name/f8 from pure ascending data (each key is a new
-- high key). we had a bug where new low keys would sometimes be
-- "lost".
--
CREATE INDEX bt_i4_index ON bt_i4_heap USING btree (seqno int4_ops);
CREATE INDEX bt_name_index ON bt_name_heap USING btree (seqno name_ops);
CREATE INDEX bt_txt_index ON bt_txt_heap USING btree (seqno text_ops);
CREATE INDEX bt_f8_index ON bt_f8_heap USING btree (seqno float8_ops);
--
-- test retrieval of min/max keys for each index
--
SELECT b.*

View File

@ -11,8 +11,9 @@ SELECT char 'c' = char 'c' AS true;
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TABLE CHAR_TBL(f1 char);
CREATE TEMP TABLE CHAR_TBL(f1 char);
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -104,13 +105,11 @@ DROP TABLE CHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('ab');
INSERT INTO CHAR_TBL (f1) VALUES ('abcd');
-- This char_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO CHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character(4)
INSERT INTO CHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM CHAR_TBL;
f1
------

View File

@ -11,8 +11,9 @@ SELECT char 'c' = char 'c' AS true;
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TABLE CHAR_TBL(f1 char);
CREATE TEMP TABLE CHAR_TBL(f1 char);
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -104,13 +105,11 @@ DROP TABLE CHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('ab');
INSERT INTO CHAR_TBL (f1) VALUES ('abcd');
-- This char_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO CHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character(4)
INSERT INTO CHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM CHAR_TBL;
f1
------

View File

@ -11,8 +11,9 @@ SELECT char 'c' = char 'c' AS true;
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TABLE CHAR_TBL(f1 char);
CREATE TEMP TABLE CHAR_TBL(f1 char);
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -104,13 +105,11 @@ DROP TABLE CHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('ab');
INSERT INTO CHAR_TBL (f1) VALUES ('abcd');
-- This char_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO CHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character(4)
INSERT INTO CHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM CHAR_TBL;
f1
------

View File

@ -1,6 +1,13 @@
--
-- create user defined conversion
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE USER regress_conversion_user WITH NOCREATEDB NOCREATEROLE;
SET SESSION AUTHORIZATION regress_conversion_user;
CREATE CONVERSION myconv FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;

View File

@ -4,84 +4,6 @@
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv abs_builddir PG_ABS_BUILDDIR
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
\set filename :abs_srcdir '/data/agg.data'
COPY aggtest FROM :'filename';
\set filename :abs_srcdir '/data/onek.data'
COPY onek FROM :'filename';
\set filename :abs_builddir '/results/onek.data'
COPY onek TO :'filename';
DELETE FROM onek;
COPY onek FROM :'filename';
\set filename :abs_srcdir '/data/tenk.data'
COPY tenk1 FROM :'filename';
\set filename :abs_srcdir '/data/rect.data'
COPY slow_emp4000 FROM :'filename';
\set filename :abs_srcdir '/data/person.data'
COPY person FROM :'filename';
\set filename :abs_srcdir '/data/emp.data'
COPY emp FROM :'filename';
\set filename :abs_srcdir '/data/student.data'
COPY student FROM :'filename';
\set filename :abs_srcdir '/data/stud_emp.data'
COPY stud_emp FROM :'filename';
\set filename :abs_srcdir '/data/streets.data'
COPY road FROM :'filename';
\set filename :abs_srcdir '/data/real_city.data'
COPY real_city FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY hash_i4_heap FROM :'filename';
COPY hash_name_heap FROM :'filename';
COPY hash_txt_heap FROM :'filename';
COPY hash_f8_heap FROM :'filename';
\set filename :abs_srcdir '/data/tsearch.data'
COPY test_tsvector FROM :'filename';
\set filename :abs_srcdir '/data/jsonb.data'
COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
-- \set filename :abs_srcdir '/data/hashovfl.data'
-- COPY hash_ovfl_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_i4_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_name_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_txt_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_f8_heap FROM :'filename';
\set filename :abs_srcdir '/data/array.data'
COPY array_op_test FROM :'filename';
\set filename :abs_srcdir '/data/array.data'
COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
ANALYZE aggtest;
ANALYZE onek;
ANALYZE tenk1;
ANALYZE slow_emp4000;
ANALYZE person;
ANALYZE emp;
ANALYZE student;
ANALYZE stud_emp;
ANALYZE road;
ANALYZE real_city;
ANALYZE hash_i4_heap;
ANALYZE hash_name_heap;
ANALYZE hash_txt_heap;
ANALYZE hash_f8_heap;
ANALYZE test_tsvector;
ANALYZE bt_i4_heap;
ANALYZE bt_name_heap;
ANALYZE bt_txt_heap;
ANALYZE bt_f8_heap;
ANALYZE array_op_test;
ANALYZE array_index_op_test;
--- test copying in CSV mode with various styles
--- of embedded line ending characters
create temp table copytest (

View File

@ -1,88 +1,20 @@
--
-- CREATE_FUNCTION_0
--
-- This script used to create C functions for other scripts to use.
-- But to get rid of the ordering dependencies that caused, such
-- functions are now made either in test_setup.sql or in the specific
-- test script that needs them. All that remains here is error cases.
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set autoinclib :libdir '/autoinc' :dlsuffix
\set refintlib :libdir '/refint' :dlsuffix
\set regresslib :libdir '/regress' :dlsuffix
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
--
-- Check LOAD command. (The alternative of implicitly loading the library
-- is checked in many other test scripts.)
--
LOAD :'regresslib';
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT ''not an integer'';';
ERROR: return type mismatch in function declared to return integer
DETAIL: Actual return type is text.
CONTEXT: SQL function "test1"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'not even SQL';
ERROR: syntax error at or near "not"
LINE 2: AS 'not even SQL';
^
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT 1, 2, 3;';
ERROR: return type mismatch in function declared to return integer
DETAIL: Final statement must return exactly one column.
CONTEXT: SQL function "test1"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT $2;';
ERROR: there is no parameter $2
LINE 2: AS 'SELECT $2;';
^
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'a', 'b';
ERROR: only one AS item needed for language "sql"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';
ERROR: could not access file "nosuchfile": No such file or directory

View File

@ -1,34 +0,0 @@
--
-- CREATE_FUNCTION_1
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "widget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION widget_out(widget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type widget is only a shell
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "city_budget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type city_budget is only a shell
CREATE FUNCTION test_canonicalize_path(text)
RETURNS text
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;

View File

@ -1,73 +0,0 @@
--
-- CREATE_FUNCTION_2
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct(text, text)
RETURNS hobbies_r
AS 'select $1 as name, $2 as hobby'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct_named(name text, hobby text)
RETURNS hobbies_r
AS 'select name, hobby'
LANGUAGE SQL;
CREATE FUNCTION hobbies_by_name(hobbies_r.name%TYPE)
RETURNS hobbies_r.person%TYPE
AS 'select person from hobbies_r where name = $1'
LANGUAGE SQL;
NOTICE: type reference hobbies_r.name%TYPE converted to text
NOTICE: type reference hobbies_r.person%TYPE converted to text
CREATE FUNCTION equipment(hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = $1.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = equipment_named.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1a(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_1a.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1b(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1c(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2a(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_2a.hobby'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby'
LANGUAGE SQL;
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
LOAD :'regresslib';

View File

@ -666,6 +666,30 @@ SELECT * FROM voidtest5(3);
-----------
(0 rows)
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT ''not an integer'';';
ERROR: return type mismatch in function declared to return integer
DETAIL: Actual return type is text.
CONTEXT: SQL function "test1"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'not even SQL';
ERROR: syntax error at or near "not"
LINE 2: AS 'not even SQL';
^
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT 1, 2, 3;';
ERROR: return type mismatch in function declared to return integer
DETAIL: Final statement must return exactly one column.
CONTEXT: SQL function "test1"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT $2;';
ERROR: there is no parameter $2
LINE 2: AS 'SELECT $2;';
^
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'a', 'b';
ERROR: only one AS item needed for language "sql"
-- Cleanup
DROP SCHEMA temp_func_test CASCADE;
NOTICE: drop cascades to 29 other objects

View File

@ -2,6 +2,8 @@
-- CREATE_INDEX
-- Create ancillary data structures (i.e. indices)
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
--
-- BTREE
--
@ -31,18 +33,6 @@ ERROR: relation "six_wrong" does not exist
COMMENT ON INDEX six IS 'good index';
COMMENT ON INDEX six IS NULL;
--
-- BTREE ascending/descending cases
--
-- we load int4/text from pure descending data (each key is a new
-- low key) and name/f8 from pure ascending data (each key is a new
-- high key). we had a bug where new low keys would sometimes be
-- "lost".
--
CREATE INDEX bt_i4_index ON bt_i4_heap USING btree (seqno int4_ops);
CREATE INDEX bt_name_index ON bt_name_heap USING btree (seqno name_ops);
CREATE INDEX bt_txt_index ON bt_txt_heap USING btree (seqno text_ops);
CREATE INDEX bt_f8_index ON bt_f8_heap USING btree (seqno float8_ops);
--
-- BTREE partial indices
--
CREATE INDEX onek2_u1_prtl ON onek2 USING btree(unique1 int4_ops)
@ -54,9 +44,20 @@ CREATE INDEX onek2_stu1_prtl ON onek2 USING btree(stringu1 name_ops)
--
-- GiST (rtree-equivalent opclasses only)
--
CREATE TABLE slow_emp4000 (
home_base box
);
CREATE TABLE fast_emp4000 (
home_base box
);
\set filename :abs_srcdir '/data/rect.data'
COPY slow_emp4000 FROM :'filename';
INSERT INTO fast_emp4000 SELECT * FROM slow_emp4000;
ANALYZE slow_emp4000;
ANALYZE fast_emp4000;
CREATE INDEX grect2ind ON fast_emp4000 USING gist (home_base);
CREATE INDEX gpolygonind ON polygon_tbl USING gist (f1);
CREATE INDEX gcircleind ON circle_tbl USING gist (f1);
-- we want to work with a point_tbl that includes a null
CREATE TEMP TABLE point_tbl AS SELECT * FROM public.point_tbl;
INSERT INTO POINT_TBL(f1) VALUES (NULL);
CREATE INDEX gpointind ON point_tbl USING gist (f1);
CREATE TEMP TABLE gpolygon_tbl AS
@ -95,23 +96,6 @@ SELECT count(*) FROM fast_emp4000 WHERE home_base IS NULL;
278
(1 row)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
f1
---------------------
((2,0),(2,4),(0,0))
(1 row)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
f1
---------------
<(1,2),3>
<(1,3),5>
<(1,2),100>
<(100,1),115>
(4 rows)
SELECT count(*) FROM gpolygon_tbl WHERE f1 && '(1000,1000,0,0)'::polygon;
count
-------
@ -310,45 +294,6 @@ SELECT count(*) FROM fast_emp4000 WHERE home_base IS NULL;
278
(1 row)
EXPLAIN (COSTS OFF)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
QUERY PLAN
------------------------------------------------------------
Sort
Sort Key: ((poly_center(f1))[0])
-> Index Scan using gpolygonind on polygon_tbl
Index Cond: (f1 @> '((1,1),(2,2),(2,1))'::polygon)
(4 rows)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
f1
---------------------
((2,0),(2,4),(0,0))
(1 row)
EXPLAIN (COSTS OFF)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
QUERY PLAN
--------------------------------------------------
Sort
Sort Key: (area(f1))
-> Index Scan using gcircleind on circle_tbl
Index Cond: (f1 && '<(1,-2),1>'::circle)
(4 rows)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
f1
---------------
<(1,2),3>
<(1,3),5>
<(1,2),100>
<(100,1),115>
(4 rows)
EXPLAIN (COSTS OFF)
SELECT count(*) FROM gpolygon_tbl WHERE f1 && '(1000,1000,0,0)'::polygon;
QUERY PLAN
@ -677,6 +622,36 @@ RESET enable_bitmapscan;
--
-- Note: GIN currently supports only bitmap scans, not plain indexscans
--
CREATE TABLE array_index_op_test (
seqno int4,
i int4[],
t text[]
);
\set filename :abs_srcdir '/data/array.data'
COPY array_index_op_test FROM :'filename';
ANALYZE array_index_op_test;
SELECT * FROM array_index_op_test WHERE i = '{NULL}' ORDER BY seqno;
seqno | i | t
-------+--------+--------
102 | {NULL} | {NULL}
(1 row)
SELECT * FROM array_index_op_test WHERE i @> '{NULL}' ORDER BY seqno;
seqno | i | t
-------+---+---
(0 rows)
SELECT * FROM array_index_op_test WHERE i && '{NULL}' ORDER BY seqno;
seqno | i | t
-------+---+---
(0 rows)
SELECT * FROM array_index_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
seqno | i | t
-------+----+----
101 | {} | {}
(1 row)
SET enable_seqscan = OFF;
SET enable_indexscan = OFF;
SET enable_bitmapscan = ON;
@ -904,28 +879,6 @@ SELECT * FROM array_index_op_test WHERE i <@ '{}' ORDER BY seqno;
101 | {} | {}
(1 row)
SELECT * FROM array_op_test WHERE i = '{NULL}' ORDER BY seqno;
seqno | i | t
-------+--------+--------
102 | {NULL} | {NULL}
(1 row)
SELECT * FROM array_op_test WHERE i @> '{NULL}' ORDER BY seqno;
seqno | i | t
-------+---+---
(0 rows)
SELECT * FROM array_op_test WHERE i && '{NULL}' ORDER BY seqno;
seqno | i | t
-------+---+---
(0 rows)
SELECT * FROM array_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
seqno | i | t
-------+----+----
101 | {} | {}
(1 row)
CREATE INDEX textarrayidx ON array_index_op_test USING gin (t);
explain (costs off)
SELECT * FROM array_index_op_test WHERE t @> '{AAAAAAAA72908}' ORDER BY seqno;
@ -1195,18 +1148,6 @@ SELECT * FROM array_index_op_test WHERE t = '{}' ORDER BY seqno;
101 | {} | {}
(1 row)
SELECT * FROM array_op_test WHERE i = '{NULL}' ORDER BY seqno;
seqno | i | t
-------+--------+--------
102 | {NULL} | {NULL}
(1 row)
SELECT * FROM array_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
seqno | i | t
-------+----+----
101 | {} | {}
(1 row)
RESET enable_seqscan;
RESET enable_indexscan;
RESET enable_bitmapscan;
@ -1240,10 +1181,6 @@ Options: fastupdate=on, gin_pending_list_limit=128
--
-- HASH
--
CREATE INDEX hash_i4_index ON hash_i4_heap USING hash (random int4_ops);
CREATE INDEX hash_name_index ON hash_name_heap USING hash (random name_ops);
CREATE INDEX hash_txt_index ON hash_txt_heap USING hash (random text_ops);
CREATE INDEX hash_f8_index ON hash_f8_heap USING hash (random float8_ops) WITH (fillfactor=60);
CREATE UNLOGGED TABLE unlogged_hash_table (id int4);
CREATE INDEX unlogged_hash_index ON unlogged_hash_table USING hash (id int4_ops);
DROP TABLE unlogged_hash_table;
@ -1418,13 +1355,6 @@ ALTER TABLE covering_index_heap ADD CONSTRAINT covering_pkey PRIMARY KEY USING I
covering_pkey;
DROP TABLE covering_index_heap;
--
-- Also try building functional, expressional, and partial indexes on
-- tables that already contain data.
--
create unique index hash_f8_index_1 on hash_f8_heap(abs(random));
create unique index hash_f8_index_2 on hash_f8_heap((seqno + 1), random);
create unique index hash_f8_index_3 on hash_f8_heap(random) where seqno > 1000;
--
-- Try some concurrent index builds
--
-- Unfortunately this only tests about half the code paths because there are
@ -1949,7 +1879,6 @@ SELECT count(*) FROM dupindexcols
--
-- Check ordering of =ANY indexqual results (bug in 9.2.0)
--
vacuum tenk1; -- ensure we get consistent plans here
explain (costs off)
SELECT unique1 FROM tenk1
WHERE unique1 IN (1,42,7)

View File

@ -1,53 +1,34 @@
--
-- CREATE_MISC
--
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
INSERT INTO tenk2 SELECT * FROM tenk1;
CREATE TABLE onek2 AS SELECT * FROM onek;
INSERT INTO fast_emp4000 SELECT * FROM slow_emp4000;
SELECT *
INTO TABLE Bprime
FROM tenk1
WHERE unique2 < 1000;
INSERT INTO hobbies_r (name, person)
SELECT 'posthacking', p.name
FROM person* p
WHERE p.name = 'mike' or p.name = 'jeff';
INSERT INTO hobbies_r (name, person)
SELECT 'basketball', p.name
FROM person p
WHERE p.name = 'joe' or p.name = 'sally';
INSERT INTO hobbies_r (name) VALUES ('skywalking');
INSERT INTO equipment_r (name, hobby) VALUES ('advil', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('peet''s coffee', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('hightops', 'basketball');
INSERT INTO equipment_r (name, hobby) VALUES ('guts', 'skywalking');
INSERT INTO city VALUES
('Podunk', '(1,2),(3,4)', '100,127,1000'),
('Gotham', '(1000,34),(1100,334)', '123456,127,-1000,6789');
TABLE city;
name | location | budget
--------+----------------------+-----------------------
Podunk | (3,4),(1,2) | 100,127,1000,0
Gotham | (1100,334),(1000,34) | 123456,127,-1000,6789
(2 rows)
SELECT *
INTO TABLE ramp
FROM road
WHERE name ~ '.*Ramp';
INSERT INTO ihighway
SELECT *
FROM road
WHERE name ~ 'I- .*';
INSERT INTO shighway
SELECT *
FROM road
WHERE name ~ 'State Hwy.*';
UPDATE shighway
SET surface = 'asphalt';
-- a is the type root
-- b and c inherit from a (one-level single inheritance)
-- d inherits from b and c (two-level multiple inheritance)
-- e inherits from c (two-level single inheritance)
-- f inherits from e (three-level single inheritance)
--
CREATE TABLE a_star (
class char,
a int4
);
CREATE TABLE b_star (
b text
) INHERITS (a_star);
CREATE TABLE c_star (
c name
) INHERITS (a_star);
CREATE TABLE d_star (
d float8
) INHERITS (b_star, c_star);
NOTICE: merging multiple inherited definitions of column "class"
NOTICE: merging multiple inherited definitions of column "a"
CREATE TABLE e_star (
e int2
) INHERITS (c_star);
CREATE TABLE f_star (
f polygon
) INHERITS (e_star);
INSERT INTO a_star (class, a) VALUES ('a', 1);
INSERT INTO a_star (class, a) VALUES ('a', 2);
INSERT INTO a_star (class) VALUES ('a');
@ -138,14 +119,369 @@ ANALYZE d_star;
ANALYZE e_star;
ANALYZE f_star;
--
-- for internal portal (cursor) tests
-- inheritance stress test
--
CREATE TABLE iportaltest (
i int4,
d float4,
p polygon
);
INSERT INTO iportaltest (i, d, p)
VALUES (1, 3.567, '(3.0,1.0),(4.0,2.0)'::polygon);
INSERT INTO iportaltest (i, d, p)
VALUES (2, 89.05, '(4.0,2.0),(3.0,1.0)'::polygon);
SELECT * FROM a_star*;
class | a
-------+----
a | 1
a | 2
a |
b | 3
b | 4
b |
b |
c | 5
c | 6
c |
c |
d | 7
d | 8
d | 9
d | 10
d |
d | 11
d | 12
d | 13
d |
d |
d |
d | 14
d |
d |
d |
d |
e | 15
e | 16
e | 17
e |
e | 18
e |
e |
f | 19
f | 20
f | 21
f | 22
f |
f | 24
f | 25
f | 26
f |
f |
f |
f | 27
f |
f |
f |
f |
(50 rows)
SELECT *
FROM b_star* x
WHERE x.b = text 'bumble' or x.a < 3;
class | a | b
-------+---+--------
b | | bumble
(1 row)
SELECT class, a
FROM c_star* x
WHERE x.c ~ text 'hi';
class | a
-------+----
c | 5
c |
d | 7
d | 8
d | 10
d |
d | 12
d |
d |
d |
e | 15
e | 16
e |
e |
f | 19
f | 20
f | 21
f |
f | 24
f |
f |
f |
(22 rows)
SELECT class, b, c
FROM d_star* x
WHERE x.a < 100;
class | b | c
-------+---------+------------
d | grumble | hi sunita
d | stumble | hi koko
d | rumble |
d | | hi kristin
d | fumble |
d | | hi avi
d | |
d | |
(8 rows)
SELECT class, c FROM e_star* x WHERE x.c NOTNULL;
class | c
-------+-------------
e | hi carol
e | hi bob
e | hi michelle
e | hi elisa
f | hi claire
f | hi mike
f | hi marcel
f | hi keith
f | hi marc
f | hi allison
f | hi jeff
f | hi carl
(12 rows)
SELECT * FROM f_star* x WHERE x.c ISNULL;
class | a | c | e | f
-------+----+---+-----+-------------------------------------------
f | 22 | | -7 | ((111,555),(222,666),(333,777),(444,888))
f | 25 | | -9 |
f | 26 | | | ((11111,33333),(22222,44444))
f | | | -11 | ((1111111,3333333),(2222222,4444444))
f | 27 | | |
f | | | -12 |
f | | | | ((11111111,33333333),(22222222,44444444))
f | | | |
(8 rows)
-- grouping and aggregation on inherited sets have been busted in the past...
SELECT sum(a) FROM a_star*;
sum
-----
355
(1 row)
SELECT class, sum(a) FROM a_star* GROUP BY class ORDER BY class;
class | sum
-------+-----
a | 3
b | 7
c | 11
d | 84
e | 66
f | 184
(6 rows)
ALTER TABLE f_star RENAME COLUMN f TO ff;
ALTER TABLE e_star* RENAME COLUMN e TO ee;
ALTER TABLE d_star* RENAME COLUMN d TO dd;
ALTER TABLE c_star* RENAME COLUMN c TO cc;
ALTER TABLE b_star* RENAME COLUMN b TO bb;
ALTER TABLE a_star* RENAME COLUMN a TO aa;
SELECT class, aa
FROM a_star* x
WHERE aa ISNULL;
class | aa
-------+----
a |
b |
b |
c |
c |
d |
d |
d |
d |
d |
d |
d |
d |
e |
e |
e |
f |
f |
f |
f |
f |
f |
f |
f |
(24 rows)
-- As of Postgres 7.1, ALTER implicitly recurses,
-- so this should be same as ALTER a_star*
ALTER TABLE a_star RENAME COLUMN aa TO foo;
SELECT class, foo
FROM a_star* x
WHERE x.foo >= 2;
class | foo
-------+-----
a | 2
b | 3
b | 4
c | 5
c | 6
d | 7
d | 8
d | 9
d | 10
d | 11
d | 12
d | 13
d | 14
e | 15
e | 16
e | 17
e | 18
f | 19
f | 20
f | 21
f | 22
f | 24
f | 25
f | 26
f | 27
(25 rows)
ALTER TABLE a_star RENAME COLUMN foo TO aa;
SELECT *
from a_star*
WHERE aa < 1000;
class | aa
-------+----
a | 1
a | 2
b | 3
b | 4
c | 5
c | 6
d | 7
d | 8
d | 9
d | 10
d | 11
d | 12
d | 13
d | 14
e | 15
e | 16
e | 17
e | 18
f | 19
f | 20
f | 21
f | 22
f | 24
f | 25
f | 26
f | 27
(26 rows)
ALTER TABLE f_star ADD COLUMN f int4;
UPDATE f_star SET f = 10;
ALTER TABLE e_star* ADD COLUMN e int4;
--UPDATE e_star* SET e = 42;
SELECT * FROM e_star*;
class | aa | cc | ee | e
-------+----+-------------+-----+---
e | 15 | hi carol | -1 |
e | 16 | hi bob | |
e | 17 | | -2 |
e | | hi michelle | -3 |
e | 18 | | |
e | | hi elisa | |
e | | | -4 |
f | 19 | hi claire | -5 |
f | 20 | hi mike | -6 |
f | 21 | hi marcel | |
f | 22 | | -7 |
f | | hi keith | -8 |
f | 24 | hi marc | |
f | 25 | | -9 |
f | 26 | | |
f | | hi allison | -10 |
f | | hi jeff | |
f | | | -11 |
f | 27 | | |
f | | hi carl | |
f | | | -12 |
f | | | |
f | | | |
(23 rows)
ALTER TABLE a_star* ADD COLUMN a text;
NOTICE: merging definition of column "a" for child "d_star"
-- That ALTER TABLE should have added TOAST tables.
SELECT relname, reltoastrelid <> 0 AS has_toast_table
FROM pg_class
WHERE oid::regclass IN ('a_star', 'c_star')
ORDER BY 1;
relname | has_toast_table
---------+-----------------
a_star | t
c_star | t
(2 rows)
--UPDATE b_star*
-- SET a = text 'gazpacho'
-- WHERE aa > 4;
SELECT class, aa, a FROM a_star*;
class | aa | a
-------+----+---
a | 1 |
a | 2 |
a | |
b | 3 |
b | 4 |
b | |
b | |
c | 5 |
c | 6 |
c | |
c | |
d | 7 |
d | 8 |
d | 9 |
d | 10 |
d | |
d | 11 |
d | 12 |
d | 13 |
d | |
d | |
d | |
d | 14 |
d | |
d | |
d | |
d | |
e | 15 |
e | 16 |
e | 17 |
e | |
e | 18 |
e | |
e | |
f | 19 |
f | 20 |
f | 21 |
f | 22 |
f | |
f | 24 |
f | 25 |
f | 26 |
f | |
f | |
f | |
f | 27 |
f | |
f | |
f | |
f | |
(50 rows)

View File

@ -7,13 +7,6 @@ CREATE OPERATOR ## (
function = path_inter,
commutator = ##
);
CREATE OPERATOR <% (
leftarg = point,
rightarg = widget,
procedure = pt_in_widget,
commutator = >% ,
negator = >=%
);
CREATE OPERATOR @#@ (
rightarg = int8, -- prefix
procedure = factorial
@ -25,11 +18,10 @@ CREATE OPERATOR #%# (
ERROR: operator right argument type must be specified
DETAIL: Postfix operators are not supported.
-- Test operator created above
SELECT point '(1,2)' <% widget '(0,0,3)' AS t,
point '(1,2)' <% widget '(0,0,1)' AS f;
t | f
---+---
t | f
SELECT @#@ 24;
?column?
--------------------------
620448401733239439360000
(1 row)
-- Test comments

View File

@ -1,204 +1,7 @@
--
-- CREATE_TABLE
--
--
-- CLASS DEFINITIONS
--
CREATE TABLE hobbies_r (
name text,
person text
);
CREATE TABLE equipment_r (
name text,
hobby text
);
CREATE TABLE onek (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE tenk1 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE tenk2 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE person (
name text,
age int4,
location point
);
CREATE TABLE emp (
salary int4,
manager name
) INHERITS (person);
CREATE TABLE student (
gpa float8
) INHERITS (person);
CREATE TABLE stud_emp (
percent int4
) INHERITS (emp, student);
NOTICE: merging multiple inherited definitions of column "name"
NOTICE: merging multiple inherited definitions of column "age"
NOTICE: merging multiple inherited definitions of column "location"
CREATE TABLE city (
name name,
location box,
budget city_budget
);
CREATE TABLE dept (
dname name,
mgrname text
);
CREATE TABLE slow_emp4000 (
home_base box
);
CREATE TABLE fast_emp4000 (
home_base box
);
CREATE TABLE road (
name text,
thepath path
);
CREATE TABLE ihighway () INHERITS (road);
CREATE TABLE shighway (
surface text
) INHERITS (road);
CREATE TABLE real_city (
pop int4,
cname text,
outline path
);
--
-- test the "star" operators a bit more thoroughly -- this time,
-- throw in lots of NULL fields...
--
-- a is the type root
-- b and c inherit from a (one-level single inheritance)
-- d inherits from b and c (two-level multiple inheritance)
-- e inherits from c (two-level single inheritance)
-- f inherits from e (three-level single inheritance)
--
CREATE TABLE a_star (
class char,
a int4
);
CREATE TABLE b_star (
b text
) INHERITS (a_star);
CREATE TABLE c_star (
c name
) INHERITS (a_star);
CREATE TABLE d_star (
d float8
) INHERITS (b_star, c_star);
NOTICE: merging multiple inherited definitions of column "class"
NOTICE: merging multiple inherited definitions of column "a"
CREATE TABLE e_star (
e int2
) INHERITS (c_star);
CREATE TABLE f_star (
f polygon
) INHERITS (e_star);
CREATE TABLE aggtest (
a int2,
b float4
);
CREATE TABLE hash_i4_heap (
seqno int4,
random int4
);
CREATE TABLE hash_name_heap (
seqno int4,
random name
);
CREATE TABLE hash_txt_heap (
seqno int4,
random text
);
CREATE TABLE hash_f8_heap (
seqno int4,
random float8
);
-- don't include the hash_ovfl_heap stuff in the distribution
-- the data set is too large for what it's worth
--
-- CREATE TABLE hash_ovfl_heap (
-- x int4,
-- y int4
-- );
CREATE TABLE bt_i4_heap (
seqno int4,
random int4
);
CREATE TABLE bt_name_heap (
seqno name,
random int4
);
CREATE TABLE bt_txt_heap (
seqno text,
random int4
);
CREATE TABLE bt_f8_heap (
seqno float8,
random int4
);
CREATE TABLE array_op_test (
seqno int4,
i int4[],
t text[]
);
CREATE TABLE array_index_op_test (
seqno int4,
i int4[],
t text[]
);
CREATE TABLE testjsonb (
j jsonb
);
-- Error cases
CREATE TABLE unknowntab (
u unknown -- fail
);
@ -207,14 +10,6 @@ CREATE TYPE unknown_comptype AS (
u unknown -- fail
);
ERROR: column "u" has pseudo-type unknown
CREATE TABLE IF NOT EXISTS test_tsvector(
t text,
a tsvector
);
CREATE TABLE IF NOT EXISTS test_tsvector(
t text
);
NOTICE: relation "test_tsvector" already exists, skipping
-- invalid: non-lowercase quoted reloptions identifiers
CREATE TABLE tas_case WITH ("Fillfactor" = 10) AS SELECT 1 a;
ERROR: unrecognized parameter "Fillfactor"

View File

@ -1,11 +1,36 @@
--
-- CREATE_TYPE
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- Note: widget_in/out were created in create_function_1, without any
-- prior shell-type creation. These commands therefore complete a test
-- of the "old style" approach of making the functions first.
-- Test the "old style" approach of making the I/O functions first,
-- with no explicit shell type creation.
--
CREATE FUNCTION widget_in(cstring)
RETURNS widget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "widget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION widget_out(widget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type widget is only a shell
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "city_budget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type city_budget is only a shell
CREATE TYPE widget (
internallength = 24,
input = widget_in,
@ -224,6 +249,41 @@ select format_type('bpchar'::regtype, -1);
bpchar
(1 row)
-- Test creation of an operator over a user-defined type
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE OPERATOR <% (
leftarg = point,
rightarg = widget,
procedure = pt_in_widget,
commutator = >% ,
negator = >=%
);
SELECT point '(1,2)' <% widget '(0,0,3)' AS t,
point '(1,2)' <% widget '(0,0,1)' AS f;
t | f
---+---
t | f
(1 row)
-- exercise city_budget type
CREATE TABLE city (
name name,
location box,
budget city_budget
);
INSERT INTO city VALUES
('Podunk', '(1,2),(3,4)', '100,127,1000'),
('Gotham', '(1000,34),(1100,334)', '123456,127,-1000,6789');
TABLE city;
name | location | budget
--------+----------------------+-----------------------
Podunk | (3,4),(1,2) | 100,127,1000,0
Gotham | (1100,334),(1000,34) | 123456,127,-1000,6789
(2 rows)
--
-- Test CREATE/ALTER TYPE using a type that's compatible with varchar,
-- so we can re-use those support functions

View File

@ -3,15 +3,36 @@
-- Virtual class definitions
-- (this also tests the query rewrite system)
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
AS :'regresslib'
LANGUAGE C STRICT;
CREATE TABLE real_city (
pop int4,
cname text,
outline path
);
\set filename :abs_srcdir '/data/real_city.data'
COPY real_city FROM :'filename';
ANALYZE real_city;
SELECT *
INTO TABLE ramp
FROM ONLY road
WHERE name ~ '.*Ramp';
CREATE VIEW street AS
SELECT r.name, r.thepath, c.cname AS cname
FROM ONLY road r, real_city c
WHERE c.outline ## r.thepath;
WHERE c.outline ?# r.thepath;
CREATE VIEW iexit AS
SELECT ih.name, ih.thepath,
interpt_pp(ih.thepath, r.thepath) AS exit
FROM ihighway ih, ramp r
WHERE ih.thepath ## r.thepath;
WHERE ih.thepath ?# r.thepath;
CREATE VIEW toyemp AS
SELECT name, age, location, 12*salary AS annualsal
FROM emp;

View File

@ -92,8 +92,8 @@ ERROR: relation "nonesuch" does not exist
alter table nonesuch rename to stud_emp;
ERROR: relation "nonesuch" does not exist
-- conflict
alter table stud_emp rename to aggtest;
ERROR: relation "aggtest" already exists
alter table stud_emp rename to student;
ERROR: relation "student" already exists
-- self-conflict
alter table stud_emp rename to stud_emp;
ERROR: relation "stud_emp" already exists

View File

@ -2,7 +2,7 @@
-- expression evaluation tests that don't fit into a more specific file
--
--
-- Tests for SQLVAlueFunction
-- Tests for SQLValueFunction
--
-- current_date (always matches because of transactional behaviour)
SELECT date(now())::text = current_date::text;
@ -87,77 +87,6 @@ SELECT current_schema;
(1 row)
RESET search_path;
--
-- Tests for BETWEEN
--
explain (costs off)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
QUERY PLAN
-----------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: ((f1 >= '01-01-1997'::date) AND (f1 <= '01-01-1998'::date))
(3 rows)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
count
-------
3
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
QUERY PLAN
--------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: ((f1 < '01-01-1997'::date) OR (f1 > '01-01-1998'::date))
(3 rows)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
count
-------
13
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: (((f1 >= '01-01-1997'::date) AND (f1 <= '01-01-1998'::date)) OR ((f1 >= '01-01-1998'::date) AND (f1 <= '01-01-1997'::date)))
(3 rows)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
count
-------
3
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: (((f1 < '01-01-1997'::date) OR (f1 > '01-01-1998'::date)) AND ((f1 < '01-01-1998'::date) OR (f1 > '01-01-1997'::date)))
(3 rows)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
count
-------
13
(1 row)
--
-- Test parsing of a no-op cast to a type with unspecified typmod
--

View File

@ -1,7 +1,11 @@
--
-- FLOAT8
--
CREATE TABLE FLOAT8_TBL(f1 float8);
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TEMP TABLE FLOAT8_TBL(f1 float8);
INSERT INTO FLOAT8_TBL(f1) VALUES (' 0.0 ');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1004.30 ');
INSERT INTO FLOAT8_TBL(f1) VALUES (' -34.84');
@ -779,14 +783,8 @@ INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e-400');
ERROR: "-10e-400" is out of range for type double precision
LINE 1: INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e-400');
^
-- maintain external table consistency across platforms
-- delete all values and reinsert well-behaved ones
DELETE FROM FLOAT8_TBL;
INSERT INTO FLOAT8_TBL(f1) VALUES ('0.0');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-34.84');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1004.30');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e+200');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e-200');
DROP TABLE FLOAT8_TBL;
-- Check the float8 values exported for use by other tests
SELECT * FROM FLOAT8_TBL;
f1
-----------------------

View File

@ -1,6 +1,14 @@
--
-- Test foreign-data wrapper and server management.
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
-- Clean up in case a prior regression run failed
-- Suppress NOTICE messages when roles don't exist
SET client_min_messages TO 'warning';

View File

@ -5236,3 +5236,63 @@ SELECT c.f1, p.f1, c.f1 <-> p.f1 FROM CIRCLE_TBL c, POLYGON_TBL p;
<(3,5),NaN> | ((0,1),(0,1)) | NaN
(56 rows)
-- Check index behavior for circles
CREATE INDEX gcircleind ON circle_tbl USING gist (f1);
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
f1
---------------
<(1,2),3>
<(1,3),5>
<(1,2),100>
<(100,1),115>
(4 rows)
EXPLAIN (COSTS OFF)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
QUERY PLAN
----------------------------------------------
Sort
Sort Key: (area(f1))
-> Seq Scan on circle_tbl
Filter: (f1 && '<(1,-2),1>'::circle)
(4 rows)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
f1
---------------
<(1,2),3>
<(1,3),5>
<(1,2),100>
<(100,1),115>
(4 rows)
-- Check index behavior for polygons
CREATE INDEX gpolygonind ON polygon_tbl USING gist (f1);
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
f1
---------------------
((2,0),(2,4),(0,0))
(1 row)
EXPLAIN (COSTS OFF)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
QUERY PLAN
--------------------------------------------------------
Sort
Sort Key: ((poly_center(f1))[0])
-> Seq Scan on polygon_tbl
Filter: (f1 @> '((1,1),(2,2),(2,1))'::polygon)
(4 rows)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
f1
---------------------
((2,0),(2,4),(0,0))
(1 row)

View File

@ -1,5 +1,54 @@
--
-- HASH_INDEX
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE hash_i4_heap (
seqno int4,
random int4
);
CREATE TABLE hash_name_heap (
seqno int4,
random name
);
CREATE TABLE hash_txt_heap (
seqno int4,
random text
);
CREATE TABLE hash_f8_heap (
seqno int4,
random float8
);
\set filename :abs_srcdir '/data/hash.data'
COPY hash_i4_heap FROM :'filename';
COPY hash_name_heap FROM :'filename';
COPY hash_txt_heap FROM :'filename';
COPY hash_f8_heap FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
-- \set filename :abs_srcdir '/data/hashovfl.data'
-- COPY hash_ovfl_heap FROM :'filename';
ANALYZE hash_i4_heap;
ANALYZE hash_name_heap;
ANALYZE hash_txt_heap;
ANALYZE hash_f8_heap;
CREATE INDEX hash_i4_index ON hash_i4_heap USING hash (random int4_ops);
CREATE INDEX hash_name_index ON hash_name_heap USING hash (random name_ops);
CREATE INDEX hash_txt_index ON hash_txt_heap USING hash (random text_ops);
CREATE INDEX hash_f8_index ON hash_f8_heap USING hash (random float8_ops)
WITH (fillfactor=60);
--
-- Also try building functional, expressional, and partial indexes on
-- tables that already contain data.
--
create unique index hash_f8_index_1 on hash_f8_heap(abs(random));
create unique index hash_f8_index_2 on hash_f8_heap((seqno + 1), random);
create unique index hash_f8_index_3 on hash_f8_heap(random) where seqno > 1000;
--
-- hash index
-- grep 843938989 hash.data
--
SELECT * FROM hash_i4_heap

View File

@ -2147,6 +2147,77 @@ SELECT '2020-10-05'::timestamptz >= '4714-11-24 BC'::timestamp as t;
(1 row)
RESET TimeZone;
--
-- Tests for BETWEEN
--
explain (costs off)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
QUERY PLAN
-----------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: ((f1 >= '01-01-1997'::date) AND (f1 <= '01-01-1998'::date))
(3 rows)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
count
-------
3
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
QUERY PLAN
--------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: ((f1 < '01-01-1997'::date) OR (f1 > '01-01-1998'::date))
(3 rows)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
count
-------
13
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: (((f1 >= '01-01-1997'::date) AND (f1 <= '01-01-1998'::date)) OR ((f1 >= '01-01-1998'::date) AND (f1 <= '01-01-1997'::date)))
(3 rows)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
count
-------
3
(1 row)
explain (costs off)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------------------------
Aggregate
-> Seq Scan on date_tbl
Filter: (((f1 < '01-01-1997'::date) OR (f1 > '01-01-1998'::date)) AND ((f1 < '01-01-1998'::date) OR (f1 > '01-01-1997'::date)))
(3 rows)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
count
-------
13
(1 row)
--
-- Formats
--

View File

@ -1,6 +1,14 @@
--
-- Tests for external toast datums
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
AS :'regresslib'
LANGUAGE C STRICT;
-- Other compression algorithms may cause the compressed data to be stored
-- inline. pglz guarantees that the data is externalized, so stick to it.
SET default_toast_compression = 'pglz';

View File

@ -408,28 +408,6 @@ select tableoid::regclass::text, a, min(b) as min_b, max(b) as max_b from list_p
(9 rows)
-- direct partition inserts should check hash partition bound constraint
-- Use hand-rolled hash functions and operator classes to get predictable
-- result on different machines. The hash function for int4 simply returns
-- the sum of the values passed to it and the one for text returns the length
-- of the non-empty string value passed to it or 0.
create or replace function part_hashint4_noop(value int4, seed int8)
returns int8 as $$
select value + seed;
$$ language sql immutable;
create operator class part_test_int4_ops
for type int4
using hash as
operator 1 =,
function 2 part_hashint4_noop(int4, int8);
create or replace function part_hashtext_length(value text, seed int8)
RETURNS int8 AS $$
select length(coalesce(value, ''))::int8
$$ language sql immutable;
create operator class part_test_text_ops
for type text
using hash as
operator 1 =,
function 2 part_hashtext_length(text, int8);
create table hash_parted (
a int
) partition by hash (a part_test_int4_ops);

View File

@ -1,18 +1,12 @@
--
-- INT2
--
CREATE TABLE INT2_TBL(f1 int2);
INSERT INTO INT2_TBL(f1) VALUES ('0 ');
INSERT INTO INT2_TBL(f1) VALUES (' 1234 ');
INSERT INTO INT2_TBL(f1) VALUES (' -1234');
-- int2_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO INT2_TBL(f1) VALUES ('34.5');
ERROR: invalid input syntax for type smallint: "34.5"
LINE 1: INSERT INTO INT2_TBL(f1) VALUES ('34.5');
^
-- largest and smallest values
INSERT INTO INT2_TBL(f1) VALUES ('32767');
INSERT INTO INT2_TBL(f1) VALUES ('-32767');
-- bad input values -- should give errors
INSERT INTO INT2_TBL(f1) VALUES ('100000');
ERROR: value "100000" is out of range for type smallint
LINE 1: INSERT INTO INT2_TBL(f1) VALUES ('100000');

View File

@ -1,18 +1,12 @@
--
-- INT4
--
CREATE TABLE INT4_TBL(f1 int4);
INSERT INTO INT4_TBL(f1) VALUES (' 0 ');
INSERT INTO INT4_TBL(f1) VALUES ('123456 ');
INSERT INTO INT4_TBL(f1) VALUES (' -123456');
-- int4_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO INT4_TBL(f1) VALUES ('34.5');
ERROR: invalid input syntax for type integer: "34.5"
LINE 1: INSERT INTO INT4_TBL(f1) VALUES ('34.5');
^
-- largest and smallest values
INSERT INTO INT4_TBL(f1) VALUES ('2147483647');
INSERT INTO INT4_TBL(f1) VALUES ('-2147483647');
-- bad input values -- should give errors
INSERT INTO INT4_TBL(f1) VALUES ('1000000000000');
ERROR: value "1000000000000" is out of range for type integer
LINE 1: INSERT INTO INT4_TBL(f1) VALUES ('1000000000000');

View File

@ -2,13 +2,8 @@
-- INT8
-- Test int8 64-bit integers.
--
CREATE TABLE INT8_TBL(q1 int8, q2 int8);
INSERT INTO INT8_TBL VALUES(' 123 ',' 456');
INSERT INTO INT8_TBL VALUES('123 ','4567890123456789');
INSERT INTO INT8_TBL VALUES('4567890123456789','123');
INSERT INTO INT8_TBL VALUES(+4567890123456789,'4567890123456789');
INSERT INTO INT8_TBL VALUES('+4567890123456789','-4567890123456789');
-- bad inputs
-- int8_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO INT8_TBL(q1) VALUES (' ');
ERROR: invalid input syntax for type bigint: " "
LINE 1: INSERT INTO INT8_TBL(q1) VALUES (' ');

View File

@ -2261,7 +2261,7 @@ rollback;
--
explain (costs off)
select aa, bb, unique1, unique1
from tenk1 right join b on aa = unique1
from tenk1 right join b_star on aa = unique1
where bb < bb and bb is null;
QUERY PLAN
--------------------------
@ -2270,7 +2270,7 @@ select aa, bb, unique1, unique1
(2 rows)
select aa, bb, unique1, unique1
from tenk1 right join b on aa = unique1
from tenk1 right join b_star on aa = unique1
where bb < bb and bb is null;
aa | bb | unique1 | unique1
----+----+---------+---------

View File

@ -1,3 +1,10 @@
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE testjsonb (
j jsonb
);
\set filename :abs_srcdir '/data/jsonb.data'
COPY testjsonb FROM :'filename';
-- Strings.
SELECT '""'::jsonb; -- OK.
jsonb

View File

@ -1,6 +1,10 @@
--
-- Test the LOCK statement
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
-- Setup
CREATE SCHEMA lock_schema1;
SET search_path = lock_schema1;
@ -170,6 +174,10 @@ DROP SCHEMA lock_schema1 CASCADE;
DROP ROLE regress_rol_lock1;
-- atomic ops tests
RESET search_path;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
AS :'regresslib'
LANGUAGE C;
SELECT test_atomic_ops();
test_atomic_ops
-----------------

View File

@ -1,9 +1,20 @@
--
-- MISC
--
-- directory paths are passed to us in environment variables
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv abs_builddir PG_ABS_BUILDDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION overpaid(emp)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
AS :'regresslib'
LANGUAGE C STRICT;
--
-- BTREE
--
@ -25,6 +36,9 @@ UPDATE onek
-- systems. This non-func update stuff needs to be examined
-- more closely. - jolly (2/22/96)
--
SELECT two, stringu1, ten, string4
INTO TABLE tmp
FROM onek;
UPDATE tmp
SET stringu1 = reverse_name(onek.stringu1)
FROM onek
@ -46,29 +60,23 @@ DROP TABLE tmp;
--
\set filename :abs_builddir '/results/onek.data'
COPY onek TO :'filename';
DELETE FROM onek;
COPY onek FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
0
1
(2 rows)
CREATE TEMP TABLE onek_copy (LIKE onek);
COPY onek_copy FROM :'filename';
SELECT * FROM onek EXCEPT ALL SELECT * FROM onek_copy;
unique1 | unique2 | two | four | ten | twenty | hundred | thousand | twothousand | fivethous | tenthous | odd | even | stringu1 | stringu2 | string4
---------+---------+-----+------+-----+--------+---------+----------+-------------+-----------+----------+-----+------+----------+----------+---------
(0 rows)
DELETE FROM onek2;
COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
0
1
(2 rows)
SELECT * FROM onek_copy EXCEPT ALL SELECT * FROM onek;
unique1 | unique2 | two | four | ten | twenty | hundred | thousand | twothousand | fivethous | tenthous | odd | even | stringu1 | stringu2 | string4
---------+---------+-----+------+-----+--------+---------+----------+-------------+-----------+----------+-----+------+----------+----------+---------
(0 rows)
\set filename :abs_builddir '/results/stud_emp.data'
COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
CREATE TEMP TABLE stud_emp_copy (LIKE stud_emp);
COPY BINARY stud_emp_copy FROM :'filename';
SELECT * FROM stud_emp_copy;
name | age | location | salary | manager | gpa | percent
-------+-----+------------+--------+---------+-----+---------
jeff | 23 | (8,7.7) | 600 | sharon | 3.5 |
@ -76,386 +84,79 @@ SELECT * FROM stud_emp;
linda | 19 | (0.9,6.1) | 100 | | 2.9 |
(3 rows)
-- COPY aggtest FROM stdin;
-- 56 7.8
-- 100 99.097
-- 0 0.09561
-- 42 324.78
-- .
-- COPY aggtest TO stdout;
--
-- inheritance stress test
--
SELECT * FROM a_star*;
class | a
-------+----
a | 1
a | 2
a |
b | 3
b | 4
b |
b |
c | 5
c | 6
c |
c |
d | 7
d | 8
d | 9
d | 10
d |
d | 11
d | 12
d | 13
d |
d |
d |
d | 14
d |
d |
d |
d |
e | 15
e | 16
e | 17
e |
e | 18
e |
e |
f | 19
f | 20
f | 21
f | 22
f |
f | 24
f | 25
f | 26
f |
f |
f |
f | 27
f |
f |
f |
f |
(50 rows)
SELECT *
FROM b_star* x
WHERE x.b = text 'bumble' or x.a < 3;
class | a | b
-------+---+--------
b | | bumble
(1 row)
SELECT class, a
FROM c_star* x
WHERE x.c ~ text 'hi';
class | a
-------+----
c | 5
c |
d | 7
d | 8
d | 10
d |
d | 12
d |
d |
d |
e | 15
e | 16
e |
e |
f | 19
f | 20
f | 21
f |
f | 24
f |
f |
f |
(22 rows)
SELECT class, b, c
FROM d_star* x
WHERE x.a < 100;
class | b | c
-------+---------+------------
d | grumble | hi sunita
d | stumble | hi koko
d | rumble |
d | | hi kristin
d | fumble |
d | | hi avi
d | |
d | |
(8 rows)
SELECT class, c FROM e_star* x WHERE x.c NOTNULL;
class | c
-------+-------------
e | hi carol
e | hi bob
e | hi michelle
e | hi elisa
f | hi claire
f | hi mike
f | hi marcel
f | hi keith
f | hi marc
f | hi allison
f | hi jeff
f | hi carl
(12 rows)
SELECT * FROM f_star* x WHERE x.c ISNULL;
class | a | c | e | f
-------+----+---+-----+-------------------------------------------
f | 22 | | -7 | ((111,555),(222,666),(333,777),(444,888))
f | 25 | | -9 |
f | 26 | | | ((11111,33333),(22222,44444))
f | | | -11 | ((1111111,3333333),(2222222,4444444))
f | 27 | | |
f | | | -12 |
f | | | | ((11111111,33333333),(22222222,44444444))
f | | | |
(8 rows)
-- grouping and aggregation on inherited sets have been busted in the past...
SELECT sum(a) FROM a_star*;
sum
-----
355
(1 row)
SELECT class, sum(a) FROM a_star* GROUP BY class ORDER BY class;
class | sum
-------+-----
a | 3
b | 7
c | 11
d | 84
e | 66
f | 184
(6 rows)
ALTER TABLE f_star RENAME COLUMN f TO ff;
ALTER TABLE e_star* RENAME COLUMN e TO ee;
ALTER TABLE d_star* RENAME COLUMN d TO dd;
ALTER TABLE c_star* RENAME COLUMN c TO cc;
ALTER TABLE b_star* RENAME COLUMN b TO bb;
ALTER TABLE a_star* RENAME COLUMN a TO aa;
SELECT class, aa
FROM a_star* x
WHERE aa ISNULL;
class | aa
-------+----
a |
b |
b |
c |
c |
d |
d |
d |
d |
d |
d |
d |
d |
e |
e |
e |
f |
f |
f |
f |
f |
f |
f |
f |
(24 rows)
-- As of Postgres 7.1, ALTER implicitly recurses,
-- so this should be same as ALTER a_star*
ALTER TABLE a_star RENAME COLUMN aa TO foo;
SELECT class, foo
FROM a_star* x
WHERE x.foo >= 2;
class | foo
-------+-----
a | 2
b | 3
b | 4
c | 5
c | 6
d | 7
d | 8
d | 9
d | 10
d | 11
d | 12
d | 13
d | 14
e | 15
e | 16
e | 17
e | 18
f | 19
f | 20
f | 21
f | 22
f | 24
f | 25
f | 26
f | 27
(25 rows)
ALTER TABLE a_star RENAME COLUMN foo TO aa;
SELECT *
from a_star*
WHERE aa < 1000;
class | aa
-------+----
a | 1
a | 2
b | 3
b | 4
c | 5
c | 6
d | 7
d | 8
d | 9
d | 10
d | 11
d | 12
d | 13
d | 14
e | 15
e | 16
e | 17
e | 18
f | 19
f | 20
f | 21
f | 22
f | 24
f | 25
f | 26
f | 27
(26 rows)
ALTER TABLE f_star ADD COLUMN f int4;
UPDATE f_star SET f = 10;
ALTER TABLE e_star* ADD COLUMN e int4;
--UPDATE e_star* SET e = 42;
SELECT * FROM e_star*;
class | aa | cc | ee | e
-------+----+-------------+-----+---
e | 15 | hi carol | -1 |
e | 16 | hi bob | |
e | 17 | | -2 |
e | | hi michelle | -3 |
e | 18 | | |
e | | hi elisa | |
e | | | -4 |
f | 19 | hi claire | -5 |
f | 20 | hi mike | -6 |
f | 21 | hi marcel | |
f | 22 | | -7 |
f | | hi keith | -8 |
f | 24 | hi marc | |
f | 25 | | -9 |
f | 26 | | |
f | | hi allison | -10 |
f | | hi jeff | |
f | | | -11 |
f | 27 | | |
f | | hi carl | |
f | | | -12 |
f | | | |
f | | | |
(23 rows)
ALTER TABLE a_star* ADD COLUMN a text;
NOTICE: merging definition of column "a" for child "d_star"
-- That ALTER TABLE should have added TOAST tables.
SELECT relname, reltoastrelid <> 0 AS has_toast_table
FROM pg_class
WHERE oid::regclass IN ('a_star', 'c_star')
ORDER BY 1;
relname | has_toast_table
---------+-----------------
a_star | t
c_star | t
(2 rows)
--UPDATE b_star*
-- SET a = text 'gazpacho'
-- WHERE aa > 4;
SELECT class, aa, a FROM a_star*;
class | aa | a
-------+----+---
a | 1 |
a | 2 |
a | |
b | 3 |
b | 4 |
b | |
b | |
c | 5 |
c | 6 |
c | |
c | |
d | 7 |
d | 8 |
d | 9 |
d | 10 |
d | |
d | 11 |
d | 12 |
d | 13 |
d | |
d | |
d | |
d | 14 |
d | |
d | |
d | |
d | |
e | 15 |
e | 16 |
e | 17 |
e | |
e | 18 |
e | |
e | |
f | 19 |
f | 20 |
f | 21 |
f | 22 |
f | |
f | 24 |
f | 25 |
f | 26 |
f | |
f | |
f | |
f | 27 |
f | |
f | |
f | |
f | |
(50 rows)
--
-- versions
-- test data for postquel functions
--
CREATE TABLE hobbies_r (
name text,
person text
);
CREATE TABLE equipment_r (
name text,
hobby text
);
INSERT INTO hobbies_r (name, person)
SELECT 'posthacking', p.name
FROM person* p
WHERE p.name = 'mike' or p.name = 'jeff';
INSERT INTO hobbies_r (name, person)
SELECT 'basketball', p.name
FROM person p
WHERE p.name = 'joe' or p.name = 'sally';
INSERT INTO hobbies_r (name) VALUES ('skywalking');
INSERT INTO equipment_r (name, hobby) VALUES ('advil', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('peet''s coffee', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('hightops', 'basketball');
INSERT INTO equipment_r (name, hobby) VALUES ('guts', 'skywalking');
--
-- postquel functions
--
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct(text, text)
RETURNS hobbies_r
AS 'select $1 as name, $2 as hobby'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct_named(name text, hobby text)
RETURNS hobbies_r
AS 'select name, hobby'
LANGUAGE SQL;
CREATE FUNCTION hobbies_by_name(hobbies_r.name%TYPE)
RETURNS hobbies_r.person%TYPE
AS 'select person from hobbies_r where name = $1'
LANGUAGE SQL;
NOTICE: type reference hobbies_r.name%TYPE converted to text
NOTICE: type reference hobbies_r.person%TYPE converted to text
CREATE FUNCTION equipment(hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = $1.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = equipment_named.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1a(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_1a.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1b(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1c(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2a(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_2a.hobby'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby'
LANGUAGE SQL;
--
-- mike does post_hacking,
-- joe and sally play basketball, and

View File

@ -1,3 +1,7 @@
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- num_nulls()
--
@ -136,6 +140,10 @@ HINT: No function matches the given name and argument types. You might need to
--
-- canonicalize_path()
--
CREATE FUNCTION test_canonicalize_path(text)
RETURNS text
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
SELECT test_canonicalize_path('/');
test_canonicalize_path
------------------------
@ -456,6 +464,10 @@ WHERE my_int_eq(a.unique2, 42);
(6 rows)
-- With support function that knows it's int4eq, we get a different plan
CREATE FUNCTION test_support_func(internal)
RETURNS internal
AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
ALTER FUNCTION my_int_eq(int, int) SUPPORT test_support_func;
EXPLAIN (COSTS OFF)
SELECT * FROM tenk1 a JOIN tenk1 b ON a.unique1 = b.unique1

View File

@ -3,22 +3,12 @@
--
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
CREATE TABLE POINT_TBL(f1 point);
INSERT INTO POINT_TBL(f1) VALUES ('(0.0,0.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(-10.0,0.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(-3.0,4.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(5.1, 34.5)');
INSERT INTO POINT_TBL(f1) VALUES ('(-5.0,-12.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(1e-300,-1e-300)'); -- To underflow
INSERT INTO POINT_TBL(f1) VALUES ('(1e+300,Inf)'); -- To overflow
INSERT INTO POINT_TBL(f1) VALUES ('(Inf,1e+300)'); -- Transposed
INSERT INTO POINT_TBL(f1) VALUES (' ( Nan , NaN ) ');
-- bad format points
-- point_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO POINT_TBL(f1) VALUES ('asdfasdf');
ERROR: invalid input syntax for type point: "asdfasdf"
LINE 1: INSERT INTO POINT_TBL(f1) VALUES ('asdfasdf');
^
INSERT INTO POINT_TBL(f1) VALUES ('10.0,10.0');
INSERT INTO POINT_TBL(f1) VALUES ('(10.0 10.0)');
ERROR: invalid input syntax for type point: "(10.0 10.0)"
LINE 1: INSERT INTO POINT_TBL(f1) VALUES ('(10.0 10.0)');

View File

@ -1,7 +1,7 @@
-- Tests for range data types.
create type textrange as range (subtype=text, collation="C");
--
-- test input parser
-- (type textrange was already made in test_setup.sql)
--
-- negative tests; should fail
select ''::textrange;
@ -1482,12 +1482,11 @@ LINE 1: select '[2010-01-01 01:00:00 -08, 2010-01-01 02:00:00 -05)':...
set timezone to default;
--
-- Test user-defined range of floats
-- (type float8range was already made in test_setup.sql)
--
--should fail
create type float8range as range (subtype=float8, subtype_diff=float4mi);
create type bogus_float8range as range (subtype=float8, subtype_diff=float4mi);
ERROR: function float4mi(double precision, double precision) does not exist
--should succeed
create type float8range as range (subtype=float8, subtype_diff=float8mi);
select '[123.001, 5.e9)'::float8range @> 888.882::float8;
?column?
----------

View File

@ -1283,30 +1283,8 @@ drop table cchild;
-- temporarily disable fancy output, so view changes create less diff noise
\a\t
SELECT viewname, definition FROM pg_views
WHERE schemaname IN ('pg_catalog', 'public')
WHERE schemaname = 'pg_catalog'
ORDER BY viewname;
iexit| SELECT ih.name,
ih.thepath,
interpt_pp(ih.thepath, r.thepath) AS exit
FROM ihighway ih,
ramp r
WHERE (ih.thepath ## r.thepath);
key_dependent_view| SELECT view_base_table.key,
view_base_table.data
FROM view_base_table
GROUP BY view_base_table.key;
key_dependent_view_no_cols| SELECT
FROM view_base_table
GROUP BY view_base_table.key
HAVING (length((view_base_table.data)::text) > 0);
mvtest_tv| SELECT mvtest_t.type,
sum(mvtest_t.amt) AS totamt
FROM mvtest_t
GROUP BY mvtest_t.type;
mvtest_tvv| SELECT sum(mvtest_tv.totamt) AS grandtot
FROM mvtest_tv;
mvtest_tvvmv| SELECT mvtest_tvvm.grandtot
FROM mvtest_tvvm;
pg_available_extension_versions| SELECT e.name,
e.version,
(x.extname IS NOT NULL) AS installed,
@ -2592,204 +2570,14 @@ pg_views| SELECT n.nspname AS schemaname,
FROM (pg_class c
LEFT JOIN pg_namespace n ON ((n.oid = c.relnamespace)))
WHERE (c.relkind = 'v'::"char");
rtest_v1| SELECT rtest_t1.a,
rtest_t1.b
FROM rtest_t1;
rtest_vcomp| SELECT x.part,
(x.size * y.factor) AS size_in_cm
FROM rtest_comp x,
rtest_unitfact y
WHERE (x.unit = y.unit);
rtest_vview1| SELECT x.a,
x.b
FROM rtest_view1 x
WHERE (0 < ( SELECT count(*) AS count
FROM rtest_view2 y
WHERE (y.a = x.a)));
rtest_vview2| SELECT rtest_view1.a,
rtest_view1.b
FROM rtest_view1
WHERE rtest_view1.v;
rtest_vview3| SELECT x.a,
x.b
FROM rtest_vview2 x
WHERE (0 < ( SELECT count(*) AS count
FROM rtest_view2 y
WHERE (y.a = x.a)));
rtest_vview4| SELECT x.a,
x.b,
count(y.a) AS refcount
FROM rtest_view1 x,
rtest_view2 y
WHERE (x.a = y.a)
GROUP BY x.a, x.b;
rtest_vview5| SELECT rtest_view1.a,
rtest_view1.b,
rtest_viewfunc1(rtest_view1.a) AS refcount
FROM rtest_view1;
shoe| SELECT sh.shoename,
sh.sh_avail,
sh.slcolor,
sh.slminlen,
(sh.slminlen * un.un_fact) AS slminlen_cm,
sh.slmaxlen,
(sh.slmaxlen * un.un_fact) AS slmaxlen_cm,
sh.slunit
FROM shoe_data sh,
unit un
WHERE (sh.slunit = un.un_name);
shoe_ready| SELECT rsh.shoename,
rsh.sh_avail,
rsl.sl_name,
rsl.sl_avail,
int4smaller(rsh.sh_avail, rsl.sl_avail) AS total_avail
FROM shoe rsh,
shoelace rsl
WHERE ((rsl.sl_color = rsh.slcolor) AND (rsl.sl_len_cm >= rsh.slminlen_cm) AND (rsl.sl_len_cm <= rsh.slmaxlen_cm));
shoelace| SELECT s.sl_name,
s.sl_avail,
s.sl_color,
s.sl_len,
s.sl_unit,
(s.sl_len * u.un_fact) AS sl_len_cm
FROM shoelace_data s,
unit u
WHERE (s.sl_unit = u.un_name);
shoelace_candelete| SELECT shoelace_obsolete.sl_name,
shoelace_obsolete.sl_avail,
shoelace_obsolete.sl_color,
shoelace_obsolete.sl_len,
shoelace_obsolete.sl_unit,
shoelace_obsolete.sl_len_cm
FROM shoelace_obsolete
WHERE (shoelace_obsolete.sl_avail = 0);
shoelace_obsolete| SELECT shoelace.sl_name,
shoelace.sl_avail,
shoelace.sl_color,
shoelace.sl_len,
shoelace.sl_unit,
shoelace.sl_len_cm
FROM shoelace
WHERE (NOT (EXISTS ( SELECT shoe.shoename
FROM shoe
WHERE (shoe.slcolor = shoelace.sl_color))));
street| SELECT r.name,
r.thepath,
c.cname
FROM ONLY road r,
real_city c
WHERE (c.outline ## r.thepath);
test_tablesample_v1| SELECT test_tablesample.id
FROM test_tablesample TABLESAMPLE system ((10 * 2)) REPEATABLE (2);
test_tablesample_v2| SELECT test_tablesample.id
FROM test_tablesample TABLESAMPLE system (99);
toyemp| SELECT emp.name,
emp.age,
emp.location,
(12 * emp.salary) AS annualsal
FROM emp;
SELECT tablename, rulename, definition FROM pg_rules
WHERE schemaname IN ('pg_catalog', 'public')
WHERE schemaname = 'pg_catalog'
ORDER BY tablename, rulename;
pg_settings|pg_settings_n|CREATE RULE pg_settings_n AS
ON UPDATE TO pg_catalog.pg_settings DO INSTEAD NOTHING;
pg_settings|pg_settings_u|CREATE RULE pg_settings_u AS
ON UPDATE TO pg_catalog.pg_settings
WHERE (new.name = old.name) DO SELECT set_config(old.name, new.setting, false) AS set_config;
rtest_emp|rtest_emp_del|CREATE RULE rtest_emp_del AS
ON DELETE TO public.rtest_emp DO INSERT INTO rtest_emplog (ename, who, action, newsal, oldsal)
VALUES (old.ename, CURRENT_USER, 'fired'::bpchar, '$0.00'::money, old.salary);
rtest_emp|rtest_emp_ins|CREATE RULE rtest_emp_ins AS
ON INSERT TO public.rtest_emp DO INSERT INTO rtest_emplog (ename, who, action, newsal, oldsal)
VALUES (new.ename, CURRENT_USER, 'hired'::bpchar, new.salary, '$0.00'::money);
rtest_emp|rtest_emp_upd|CREATE RULE rtest_emp_upd AS
ON UPDATE TO public.rtest_emp
WHERE (new.salary <> old.salary) DO INSERT INTO rtest_emplog (ename, who, action, newsal, oldsal)
VALUES (new.ename, CURRENT_USER, 'honored'::bpchar, new.salary, old.salary);
rtest_nothn1|rtest_nothn_r1|CREATE RULE rtest_nothn_r1 AS
ON INSERT TO public.rtest_nothn1
WHERE ((new.a >= 10) AND (new.a < 20)) DO INSTEAD NOTHING;
rtest_nothn1|rtest_nothn_r2|CREATE RULE rtest_nothn_r2 AS
ON INSERT TO public.rtest_nothn1
WHERE ((new.a >= 30) AND (new.a < 40)) DO INSTEAD NOTHING;
rtest_nothn2|rtest_nothn_r3|CREATE RULE rtest_nothn_r3 AS
ON INSERT TO public.rtest_nothn2
WHERE (new.a >= 100) DO INSTEAD INSERT INTO rtest_nothn3 (a, b)
VALUES (new.a, new.b);
rtest_nothn2|rtest_nothn_r4|CREATE RULE rtest_nothn_r4 AS
ON INSERT TO public.rtest_nothn2 DO INSTEAD NOTHING;
rtest_order1|rtest_order_r1|CREATE RULE rtest_order_r1 AS
ON INSERT TO public.rtest_order1 DO INSTEAD INSERT INTO rtest_order2 (a, b, c)
VALUES (new.a, nextval('rtest_seq'::regclass), 'rule 1 - this should run 1st'::text);
rtest_order1|rtest_order_r2|CREATE RULE rtest_order_r2 AS
ON INSERT TO public.rtest_order1 DO INSERT INTO rtest_order2 (a, b, c)
VALUES (new.a, nextval('rtest_seq'::regclass), 'rule 2 - this should run 2nd'::text);
rtest_order1|rtest_order_r3|CREATE RULE rtest_order_r3 AS
ON INSERT TO public.rtest_order1 DO INSTEAD INSERT INTO rtest_order2 (a, b, c)
VALUES (new.a, nextval('rtest_seq'::regclass), 'rule 3 - this should run 3rd'::text);
rtest_order1|rtest_order_r4|CREATE RULE rtest_order_r4 AS
ON INSERT TO public.rtest_order1
WHERE (new.a < 100) DO INSTEAD INSERT INTO rtest_order2 (a, b, c)
VALUES (new.a, nextval('rtest_seq'::regclass), 'rule 4 - this should run 4th'::text);
rtest_person|rtest_pers_del|CREATE RULE rtest_pers_del AS
ON DELETE TO public.rtest_person DO DELETE FROM rtest_admin
WHERE (rtest_admin.pname = old.pname);
rtest_person|rtest_pers_upd|CREATE RULE rtest_pers_upd AS
ON UPDATE TO public.rtest_person DO UPDATE rtest_admin SET pname = new.pname
WHERE (rtest_admin.pname = old.pname);
rtest_system|rtest_sys_del|CREATE RULE rtest_sys_del AS
ON DELETE TO public.rtest_system DO ( DELETE FROM rtest_interface
WHERE (rtest_interface.sysname = old.sysname);
DELETE FROM rtest_admin
WHERE (rtest_admin.sysname = old.sysname);
);
rtest_system|rtest_sys_upd|CREATE RULE rtest_sys_upd AS
ON UPDATE TO public.rtest_system DO ( UPDATE rtest_interface SET sysname = new.sysname
WHERE (rtest_interface.sysname = old.sysname);
UPDATE rtest_admin SET sysname = new.sysname
WHERE (rtest_admin.sysname = old.sysname);
);
rtest_t4|rtest_t4_ins1|CREATE RULE rtest_t4_ins1 AS
ON INSERT TO public.rtest_t4
WHERE ((new.a >= 10) AND (new.a < 20)) DO INSTEAD INSERT INTO rtest_t5 (a, b)
VALUES (new.a, new.b);
rtest_t4|rtest_t4_ins2|CREATE RULE rtest_t4_ins2 AS
ON INSERT TO public.rtest_t4
WHERE ((new.a >= 20) AND (new.a < 30)) DO INSERT INTO rtest_t6 (a, b)
VALUES (new.a, new.b);
rtest_t5|rtest_t5_ins|CREATE RULE rtest_t5_ins AS
ON INSERT TO public.rtest_t5
WHERE (new.a > 15) DO INSERT INTO rtest_t7 (a, b)
VALUES (new.a, new.b);
rtest_t6|rtest_t6_ins|CREATE RULE rtest_t6_ins AS
ON INSERT TO public.rtest_t6
WHERE (new.a > 25) DO INSTEAD INSERT INTO rtest_t8 (a, b)
VALUES (new.a, new.b);
rtest_v1|rtest_v1_del|CREATE RULE rtest_v1_del AS
ON DELETE TO public.rtest_v1 DO INSTEAD DELETE FROM rtest_t1
WHERE (rtest_t1.a = old.a);
rtest_v1|rtest_v1_ins|CREATE RULE rtest_v1_ins AS
ON INSERT TO public.rtest_v1 DO INSTEAD INSERT INTO rtest_t1 (a, b)
VALUES (new.a, new.b);
rtest_v1|rtest_v1_upd|CREATE RULE rtest_v1_upd AS
ON UPDATE TO public.rtest_v1 DO INSTEAD UPDATE rtest_t1 SET a = new.a, b = new.b
WHERE (rtest_t1.a = old.a);
shoelace|shoelace_del|CREATE RULE shoelace_del AS
ON DELETE TO public.shoelace DO INSTEAD DELETE FROM shoelace_data
WHERE (shoelace_data.sl_name = old.sl_name);
shoelace|shoelace_ins|CREATE RULE shoelace_ins AS
ON INSERT TO public.shoelace DO INSTEAD INSERT INTO shoelace_data (sl_name, sl_avail, sl_color, sl_len, sl_unit)
VALUES (new.sl_name, new.sl_avail, new.sl_color, new.sl_len, new.sl_unit);
shoelace|shoelace_upd|CREATE RULE shoelace_upd AS
ON UPDATE TO public.shoelace DO INSTEAD UPDATE shoelace_data SET sl_name = new.sl_name, sl_avail = new.sl_avail, sl_color = new.sl_color, sl_len = new.sl_len, sl_unit = new.sl_unit
WHERE (shoelace_data.sl_name = old.sl_name);
shoelace_data|log_shoelace|CREATE RULE log_shoelace AS
ON UPDATE TO public.shoelace_data
WHERE (new.sl_avail <> old.sl_avail) DO INSERT INTO shoelace_log (sl_name, sl_avail, log_who, log_when)
VALUES (new.sl_name, new.sl_avail, 'Al Bundy'::name, 'Thu Jan 01 00:00:00 1970'::timestamp without time zone);
shoelace_ok|shoelace_ok_ins|CREATE RULE shoelace_ok_ins AS
ON INSERT TO public.shoelace_ok DO INSTEAD UPDATE shoelace SET sl_avail = (shoelace.sl_avail + new.ok_quant)
WHERE (shoelace.sl_name = new.ok_name);
-- restore normal output mode
\a\t
--

View File

@ -1,229 +1,6 @@
VACUUM;
--
-- sanity check, if we don't have indices the test will take years to
-- complete. But skip TOAST relations (since they will have varying
-- names depending on the current OID counter) as well as temp tables
-- of other backends (to avoid timing-dependent behavior).
--
-- temporarily disable fancy output, so catalog changes create less diff noise
\a\t
SELECT relname, relhasindex
FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = relnamespace
WHERE relkind IN ('r', 'p') AND (nspname ~ '^pg_temp_') IS NOT TRUE
ORDER BY relname;
a|f
a_star|f
aggtest|f
array_index_op_test|t
array_op_test|f
b|f
b_star|f
bit_defaults|f
box_tbl|f
bprime|f
bt_f8_heap|t
bt_i4_heap|t
bt_name_heap|t
bt_txt_heap|t
c|f
c_star|f
char_tbl|f
check2_tbl|f
check_tbl|f
circle_tbl|t
city|f
copy_tbl|f
d|f
d_star|f
date_tbl|f
default_tbl|f
defaultexpr_tbl|f
dept|f
dupindexcols|t
e_star|f
emp|f
equipment_r|f
extra_wide_table|f
f_star|f
fast_emp4000|t
float4_tbl|f
float8_tbl|f
func_index_heap|t
hash_f8_heap|t
hash_i4_heap|t
hash_name_heap|t
hash_txt_heap|t
hobbies_r|f
ihighway|t
inet_tbl|f
insert_tbl|f
int2_tbl|f
int4_tbl|f
int8_tbl|f
interval_tbl|f
invalid_check_con|f
invalid_check_con_child|f
iportaltest|f
kd_point_tbl|t
line_tbl|f
log_table|f
lseg_tbl|f
main_table|f
mlparted|f
mlparted1|f
mlparted11|f
mlparted12|f
mlparted2|f
mlparted3|f
mlparted4|f
mlparted_def|f
mlparted_def1|f
mlparted_def2|f
mlparted_defd|f
money_data|f
num_data|f
num_exp_add|t
num_exp_div|t
num_exp_ln|t
num_exp_log10|t
num_exp_mul|t
num_exp_power_10_ln|t
num_exp_sqrt|t
num_exp_sub|t
num_input_test|f
num_result|f
num_typemod_test|f
nummultirange_test|t
numrange_test|t
onek|t
onek2|t
path_tbl|f
person|f
persons|f
persons2|t
persons3|t
pg_aggregate|t
pg_am|t
pg_amop|t
pg_amproc|t
pg_attrdef|t
pg_attribute|t
pg_auth_members|t
pg_authid|t
pg_cast|t
pg_class|t
pg_collation|t
pg_constraint|t
pg_conversion|t
pg_database|t
pg_db_role_setting|t
pg_default_acl|t
pg_depend|t
pg_description|t
pg_enum|t
pg_event_trigger|t
pg_extension|t
pg_foreign_data_wrapper|t
pg_foreign_server|t
pg_foreign_table|t
pg_index|t
pg_inherits|t
pg_init_privs|t
pg_language|t
pg_largeobject|t
pg_largeobject_metadata|t
pg_namespace|t
pg_opclass|t
pg_operator|t
pg_opfamily|t
pg_partitioned_table|t
pg_policy|t
pg_proc|t
pg_publication|t
pg_publication_namespace|t
pg_publication_rel|t
pg_range|t
pg_replication_origin|t
pg_rewrite|t
pg_seclabel|t
pg_sequence|t
pg_shdepend|t
pg_shdescription|t
pg_shseclabel|t
pg_statistic|t
pg_statistic_ext|t
pg_statistic_ext_data|t
pg_subscription|t
pg_subscription_rel|t
pg_tablespace|t
pg_transform|t
pg_trigger|t
pg_ts_config|t
pg_ts_config_map|t
pg_ts_dict|t
pg_ts_parser|t
pg_ts_template|t
pg_type|t
pg_user_mapping|t
point_tbl|t
polygon_tbl|t
quad_box_tbl|t
quad_box_tbl_ord_seq1|f
quad_box_tbl_ord_seq2|f
quad_point_tbl|t
quad_poly_tbl|t
radix_text_tbl|t
ramp|f
real_city|f
reservations|f
road|t
shighway|t
slow_emp4000|f
sql_features|f
sql_implementation_info|f
sql_parts|f
sql_sizing|f
stud_emp|f
student|f
tab_core_types|f
tableam_parted_a_heap2|f
tableam_parted_b_heap2|f
tableam_parted_c_heap2|f
tableam_parted_d_heap2|f
tableam_parted_heap2|f
tableam_tbl_heap2|f
tableam_tblas_heap2|f
tbl_include_box|t
tbl_include_box_pk|f
tbl_include_pk|t
tbl_include_reg|t
tbl_include_unique1|t
tbl_include_unique2|f
tenk1|t
tenk2|t
test_range_excl|t
test_range_gist|t
test_range_spgist|t
test_tsvector|f
testjsonb|f
text_tbl|f
textrange_test|t
time_tbl|f
timestamp_tbl|f
timestamptz_tbl|f
timetz_tbl|f
tmp|f
trigger_parted|t
trigger_parted_p1|t
trigger_parted_p1_1|t
trigger_parted_p2|t
trigger_parted_p2_2|t
varchar_tbl|f
view_base_table|t
-- restore normal output mode
\a\t
--
-- another sanity check: every system catalog that has OIDs should have
-- Sanity check: every system catalog that has OIDs should have
-- a unique index on OID. This ensures that the OIDs will be unique,
-- even after the OID counter wraps around.
-- We exclude non-system tables from the check by looking at nspname.

View File

@ -292,9 +292,6 @@ SELECT onek2.unique1, onek2.stringu1 FROM onek2
RESET enable_seqscan;
RESET enable_bitmapscan;
RESET enable_sort;
SELECT two, stringu1, ten, string4
INTO TABLE tmp
FROM onek;
--
-- awk '{print $1,$2;}' person.data |
-- awk '{if(NF!=2){print $3,$2;}else{print;}}' - emp.data |

View File

@ -4,7 +4,7 @@
--
-- awk '{print $3;}' onek.data | sort -n | uniq
--
SELECT DISTINCT two FROM tmp ORDER BY 1;
SELECT DISTINCT two FROM onek ORDER BY 1;
two
-----
0
@ -14,7 +14,7 @@ SELECT DISTINCT two FROM tmp ORDER BY 1;
--
-- awk '{print $5;}' onek.data | sort -n | uniq
--
SELECT DISTINCT ten FROM tmp ORDER BY 1;
SELECT DISTINCT ten FROM onek ORDER BY 1;
ten
-----
0
@ -32,7 +32,7 @@ SELECT DISTINCT ten FROM tmp ORDER BY 1;
--
-- awk '{print $16;}' onek.data | sort -d | uniq
--
SELECT DISTINCT string4 FROM tmp ORDER BY 1;
SELECT DISTINCT string4 FROM onek ORDER BY 1;
string4
---------
AAAAxx
@ -46,7 +46,7 @@ SELECT DISTINCT string4 FROM tmp ORDER BY 1;
-- sort +0n -1 +1d -2 +2n -3
--
SELECT DISTINCT two, string4, ten
FROM tmp
FROM onek
ORDER BY two using <, string4 using <, ten using <;
two | string4 | ten
-----+---------+-----

View File

@ -2,7 +2,7 @@
-- SELECT_DISTINCT_ON
--
SELECT DISTINCT ON (string4) string4, two, ten
FROM tmp
FROM onek
ORDER BY string4 using <, two using >, ten using <;
string4 | two | ten
---------+-----+-----
@ -14,13 +14,13 @@ SELECT DISTINCT ON (string4) string4, two, ten
-- this will fail due to conflict of ordering requirements
SELECT DISTINCT ON (string4, ten) string4, two, ten
FROM tmp
FROM onek
ORDER BY string4 using <, two using <, ten using <;
ERROR: SELECT DISTINCT ON expressions must match initial ORDER BY expressions
LINE 1: SELECT DISTINCT ON (string4, ten) string4, two, ten
^
SELECT DISTINCT ON (string4, ten) string4, ten, two
FROM tmp
FROM onek
ORDER BY string4 using <, ten using >, two using <;
string4 | ten | two
---------+-----+-----

View File

@ -162,9 +162,9 @@ DROP TABLE easi, easi2;
--
-- Disallowed uses of SELECT ... INTO. All should fail
--
DECLARE foo CURSOR FOR SELECT 1 INTO b;
DECLARE foo CURSOR FOR SELECT 1 INTO int4_tbl;
ERROR: SELECT ... INTO is not allowed here
LINE 1: DECLARE foo CURSOR FOR SELECT 1 INTO b;
LINE 1: DECLARE foo CURSOR FOR SELECT 1 INTO int4_tbl;
^
COPY (SELECT 1 INTO frak UNION SELECT 2) TO 'blob';
ERROR: COPY (SELECT INTO) is not supported
@ -172,12 +172,12 @@ SELECT * FROM (SELECT 1 INTO f) bar;
ERROR: SELECT ... INTO is not allowed here
LINE 1: SELECT * FROM (SELECT 1 INTO f) bar;
^
CREATE VIEW foo AS SELECT 1 INTO b;
CREATE VIEW foo AS SELECT 1 INTO int4_tbl;
ERROR: views must not contain SELECT INTO
INSERT INTO b SELECT 1 INTO f;
INSERT INTO int4_tbl SELECT 1 INTO f;
ERROR: SELECT ... INTO is not allowed here
LINE 1: INSERT INTO b SELECT 1 INTO f;
^
LINE 1: INSERT INTO int4_tbl SELECT 1 INTO f;
^
-- Test CREATE TABLE AS ... IF NOT EXISTS
CREATE TABLE ctas_ine_tbl AS SELECT 1;
CREATE TABLE ctas_ine_tbl AS SELECT 1 / 0; -- error

View File

@ -1,3 +1,219 @@
--
-- TEST_SETUP --- prepare environment expected by regression test scripts
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- Postgres formerly made the public schema read/write by default,
-- and most of the core regression tests still expect that.
--
GRANT ALL ON SCHEMA public TO public;
--
-- These tables have traditionally been referenced by many tests,
-- so create and populate them. Insert only non-error values here.
-- (Some subsequent tests try to insert erroneous values. That's okay
-- because the table won't actually change. Do not change the contents
-- of these tables in later tests, as it may affect other tests.)
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES
('a'),
('ab'),
('abcd'),
('abcd ');
VACUUM CHAR_TBL;
CREATE TABLE FLOAT8_TBL(f1 float8);
INSERT INTO FLOAT8_TBL(f1) VALUES
('0.0'),
('-34.84'),
('-1004.30'),
('-1.2345678901234e+200'),
('-1.2345678901234e-200');
VACUUM FLOAT8_TBL;
CREATE TABLE INT2_TBL(f1 int2);
INSERT INTO INT2_TBL(f1) VALUES
('0 '),
(' 1234 '),
(' -1234'),
('32767'), -- largest and smallest values
('-32767');
VACUUM INT2_TBL;
CREATE TABLE INT4_TBL(f1 int4);
INSERT INTO INT4_TBL(f1) VALUES
(' 0 '),
('123456 '),
(' -123456'),
('2147483647'), -- largest and smallest values
('-2147483647');
VACUUM INT4_TBL;
CREATE TABLE INT8_TBL(q1 int8, q2 int8);
INSERT INTO INT8_TBL VALUES
(' 123 ',' 456'),
('123 ','4567890123456789'),
('4567890123456789','123'),
(+4567890123456789,'4567890123456789'),
('+4567890123456789','-4567890123456789');
VACUUM INT8_TBL;
CREATE TABLE POINT_TBL(f1 point);
INSERT INTO POINT_TBL(f1) VALUES
('(0.0,0.0)'),
('(-10.0,0.0)'),
('(-3.0,4.0)'),
('(5.1, 34.5)'),
('(-5.0,-12.0)'),
('(1e-300,-1e-300)'), -- To underflow
('(1e+300,Inf)'), -- To overflow
('(Inf,1e+300)'), -- Transposed
(' ( Nan , NaN ) '),
('10.0,10.0');
-- We intentionally don't vacuum point_tbl here; geometry depends on that
CREATE TABLE TEXT_TBL (f1 text);
INSERT INTO TEXT_TBL VALUES
('doh!'),
('hi de ho neighbor');
VACUUM TEXT_TBL;
CREATE TABLE VARCHAR_TBL(f1 varchar(4));
INSERT INTO VARCHAR_TBL (f1) VALUES
('a'),
('ab'),
('abcd'),
('abcd ');
VACUUM VARCHAR_TBL;
CREATE TABLE onek (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
\set filename :abs_srcdir '/data/onek.data'
COPY onek FROM :'filename';
VACUUM ANALYZE onek;
CREATE TABLE onek2 AS SELECT * FROM onek;
VACUUM ANALYZE onek2;
CREATE TABLE tenk1 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
\set filename :abs_srcdir '/data/tenk.data'
COPY tenk1 FROM :'filename';
VACUUM ANALYZE tenk1;
CREATE TABLE tenk2 AS SELECT * FROM tenk1;
VACUUM ANALYZE tenk2;
CREATE TABLE person (
name text,
age int4,
location point
);
\set filename :abs_srcdir '/data/person.data'
COPY person FROM :'filename';
VACUUM ANALYZE person;
CREATE TABLE emp (
salary int4,
manager name
) INHERITS (person);
\set filename :abs_srcdir '/data/emp.data'
COPY emp FROM :'filename';
VACUUM ANALYZE emp;
CREATE TABLE student (
gpa float8
) INHERITS (person);
\set filename :abs_srcdir '/data/student.data'
COPY student FROM :'filename';
VACUUM ANALYZE student;
CREATE TABLE stud_emp (
percent int4
) INHERITS (emp, student);
NOTICE: merging multiple inherited definitions of column "name"
NOTICE: merging multiple inherited definitions of column "age"
NOTICE: merging multiple inherited definitions of column "location"
\set filename :abs_srcdir '/data/stud_emp.data'
COPY stud_emp FROM :'filename';
VACUUM ANALYZE stud_emp;
CREATE TABLE road (
name text,
thepath path
);
\set filename :abs_srcdir '/data/streets.data'
COPY road FROM :'filename';
VACUUM ANALYZE road;
CREATE TABLE ihighway () INHERITS (road);
INSERT INTO ihighway
SELECT *
FROM ONLY road
WHERE name ~ 'I- .*';
VACUUM ANALYZE ihighway;
CREATE TABLE shighway (
surface text
) INHERITS (road);
INSERT INTO shighway
SELECT *, 'asphalt'
FROM ONLY road
WHERE name ~ 'State Hwy.*';
VACUUM ANALYZE shighway;
--
-- We must have some enum type in the database for opr_sanity and type_sanity.
--
create type stoplight as enum ('red', 'yellow', 'green');
--
-- Also create some non-built-in range types.
--
create type float8range as range (subtype = float8, subtype_diff = float8mi);
create type textrange as range (subtype = text, collation = "C");
--
-- Create some C functions that will be used by various tests.
--
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
CREATE FUNCTION ttdummy ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
-- Use hand-rolled hash functions and operator classes to get predictable
-- result on different machines. The hash function for int4 simply returns
-- the sum of the values passed to it and the one for text returns the length
-- of the non-empty string value passed to it or 0.
create function part_hashint4_noop(value int4, seed int8)
returns int8 as $$
select value + seed;
$$ language sql strict immutable parallel safe;
create operator class part_test_int4_ops for type int4 using hash as
operator 1 =,
function 2 part_hashint4_noop(int4, int8);
create function part_hashtext_length(value text, seed int8)
returns int8 as $$
select length(coalesce(value, ''))::int8
$$ language sql strict immutable parallel safe;
create operator class part_test_text_ops for type text using hash as
operator 1 =,
function 2 part_hashtext_length(text, int8);

View File

@ -13,9 +13,7 @@ SELECT text 'this is a text string' = text 'this is a text strin' AS false;
f
(1 row)
CREATE TABLE TEXT_TBL (f1 text);
INSERT INTO TEXT_TBL VALUES ('doh!');
INSERT INTO TEXT_TBL VALUES ('hi de ho neighbor');
-- text_tbl was already created and filled in test_setup.sql.
SELECT * FROM TEXT_TBL;
f1
-------------------

View File

@ -2,9 +2,12 @@
-- TRANSACTIONS
--
BEGIN;
SELECT *
INTO TABLE xacttest
FROM aggtest;
CREATE TABLE xacttest (a smallint, b real);
INSERT INTO xacttest VALUES
(56, 7.8),
(100, 99.097),
(0, 0.09561),
(42, 324.78);
INSERT INTO xacttest (a, b) VALUES (777, 777.777);
END;
-- should retrieve one value--
@ -16,9 +19,9 @@ SELECT a FROM xacttest WHERE a > 100;
BEGIN;
CREATE TABLE disappear (a int4);
DELETE FROM aggtest;
DELETE FROM xacttest;
-- should be empty
SELECT * FROM aggtest;
SELECT * FROM xacttest;
a | b
---+---
(0 rows)
@ -31,14 +34,15 @@ SELECT oid FROM pg_class WHERE relname = 'disappear';
(0 rows)
-- should have members again
SELECT * FROM aggtest;
SELECT * FROM xacttest;
a | b
-----+---------
56 | 7.8
100 | 99.097
0 | 0.09561
42 | 324.78
(4 rows)
777 | 777.777
(5 rows)
-- Read-only tests
CREATE TABLE writetest (a int);

View File

@ -1,6 +1,32 @@
--
-- TRIGGERS
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set autoinclib :libdir '/autoinc' :dlsuffix
\set refintlib :libdir '/refint' :dlsuffix
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION autoinc ()
RETURNS trigger
AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION check_primary_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
AS :'regresslib'
LANGUAGE C STRICT;
create table pkeys (pkey1 int4 not null, pkey2 text not null);
create table fkeys (fkey1 int4, fkey2 text, fkey3 int);
create table fkeys2 (fkey21 int4, fkey22 text, pkey23 int not null);

View File

@ -1,3 +1,5 @@
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
--
-- Sanity checks for text search catalogs
--
@ -55,6 +57,14 @@ WHERE
-------+-------+--------+--------------+----------+---------
(0 rows)
-- Load some test data
CREATE TABLE test_tsvector(
t text,
a tsvector
);
\set filename :abs_srcdir '/data/tsearch.data'
COPY test_tsvector FROM :'filename';
ANALYZE test_tsvector;
-- test basic text search behavior without indexes, then with
SELECT count(*) FROM test_tsvector WHERE a @@ 'wr|qh';
count

View File

@ -731,28 +731,23 @@ CREATE TABLE tab_core_types AS SELECT
'n'::information_schema.sql_identifier,
'now'::information_schema.time_stamp,
'YES'::information_schema.yes_or_no,
'venus'::planets,
'i16'::insenum,
'(1,2)'::int4range, '{(1,2)}'::int4multirange,
'(3,4)'::int8range, '{(3,4)}'::int8multirange,
'(1,2)'::float8range, '{(1,2)}'::float8multirange,
'(3,4)'::numrange, '{(3,4)}'::nummultirange,
'(a,b)'::textrange, '{(a,b)}'::textmultirange,
'(12.34, 56.78)'::cashrange, '{(12.34, 56.78)}'::cashmultirange,
'(2020-01-02, 2021-02-03)'::daterange,
'{(2020-01-02, 2021-02-03)}'::datemultirange,
'(2020-01-02 03:04:05, 2021-02-03 06:07:08)'::tsrange,
'{(2020-01-02 03:04:05, 2021-02-03 06:07:08)}'::tsmultirange,
'(2020-01-02 03:04:05, 2021-02-03 06:07:08)'::tstzrange,
'{(2020-01-02 03:04:05, 2021-02-03 06:07:08)}'::tstzmultirange,
arrayrange(ARRAY[1,2], ARRAY[2,1]),
arraymultirange(arrayrange(ARRAY[1,2], ARRAY[2,1]));
'{(2020-01-02 03:04:05, 2021-02-03 06:07:08)}'::tstzmultirange;
-- Sanity check on the previous table, checking that all core types are
-- included in this table.
SELECT oid, typname, typtype, typelem, typarray, typarray
SELECT oid, typname, typtype, typelem, typarray
FROM pg_type t
WHERE typtype NOT IN ('p', 'c') AND
-- reg* types cannot be pg_upgraded, so discard them.
WHERE oid < 16384 AND
-- Exclude pseudotypes and composite types.
typtype NOT IN ('p', 'c') AND
-- These reg* types cannot be pg_upgraded, so discard them.
oid != ALL(ARRAY['regproc', 'regprocedure', 'regoper',
'regoperator', 'regconfig', 'regdictionary',
'regnamespace', 'regcollation']::regtype[]) AND
@ -772,7 +767,7 @@ SELECT oid, typname, typtype, typelem, typarray, typarray
WHERE a.atttypid=t.oid AND
a.attnum > 0 AND
a.attrelid='tab_core_types'::regclass);
oid | typname | typtype | typelem | typarray | typarray
-----+---------+---------+---------+----------+----------
oid | typname | typtype | typelem | typarray
-----+---------+---------+---------+----------
(0 rows)

View File

@ -1,7 +1,11 @@
--
-- VARCHAR
--
CREATE TABLE VARCHAR_TBL(f1 varchar(1));
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TEMP TABLE VARCHAR_TBL(f1 varchar(1));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -93,13 +97,11 @@ DROP TABLE VARCHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE VARCHAR_TBL(f1 varchar(4));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('ab');
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd');
-- This varchar_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character varying(4)
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM VARCHAR_TBL;
f1
------

View File

@ -1,7 +1,11 @@
--
-- VARCHAR
--
CREATE TABLE VARCHAR_TBL(f1 varchar(1));
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TEMP TABLE VARCHAR_TBL(f1 varchar(1));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -93,13 +97,11 @@ DROP TABLE VARCHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE VARCHAR_TBL(f1 varchar(4));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('ab');
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd');
-- This varchar_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character varying(4)
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM VARCHAR_TBL;
f1
------

View File

@ -1,7 +1,11 @@
--
-- VARCHAR
--
CREATE TABLE VARCHAR_TBL(f1 varchar(1));
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TEMP TABLE VARCHAR_TBL(f1 varchar(1));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('A');
-- any of the following three input formats are acceptable
@ -93,13 +97,11 @@ DROP TABLE VARCHAR_TBL;
--
-- Now test longer arrays of char
--
CREATE TABLE VARCHAR_TBL(f1 varchar(4));
INSERT INTO VARCHAR_TBL (f1) VALUES ('a');
INSERT INTO VARCHAR_TBL (f1) VALUES ('ab');
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd');
-- This varchar_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcde');
ERROR: value too long for type character varying(4)
INSERT INTO VARCHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM VARCHAR_TBL;
f1
------

View File

@ -3043,36 +3043,42 @@ SELECT * FROM parent;
-- check EXPLAIN VERBOSE for a wCTE with RETURNING
EXPLAIN (VERBOSE, COSTS OFF)
WITH wcte AS ( INSERT INTO int8_tbl VALUES ( 42, 47 ) RETURNING q2 )
DELETE FROM a USING wcte WHERE aa = q2;
QUERY PLAN
------------------------------------------------------------
Delete on public.a
Delete on public.a a_1
Delete on public.b a_2
Delete on public.c a_3
Delete on public.d a_4
DELETE FROM a_star USING wcte WHERE aa = q2;
QUERY PLAN
---------------------------------------------------------------------------
Delete on public.a_star
Delete on public.a_star a_star_1
Delete on public.b_star a_star_2
Delete on public.c_star a_star_3
Delete on public.d_star a_star_4
Delete on public.e_star a_star_5
Delete on public.f_star a_star_6
CTE wcte
-> Insert on public.int8_tbl
Output: int8_tbl.q2
-> Result
Output: '42'::bigint, '47'::bigint
-> Hash Join
Output: wcte.*, a.tableoid, a.ctid
Hash Cond: (a.aa = wcte.q2)
Output: wcte.*, a_star.tableoid, a_star.ctid
Hash Cond: (a_star.aa = wcte.q2)
-> Append
-> Seq Scan on public.a a_1
Output: a_1.aa, a_1.tableoid, a_1.ctid
-> Seq Scan on public.b a_2
Output: a_2.aa, a_2.tableoid, a_2.ctid
-> Seq Scan on public.c a_3
Output: a_3.aa, a_3.tableoid, a_3.ctid
-> Seq Scan on public.d a_4
Output: a_4.aa, a_4.tableoid, a_4.ctid
-> Seq Scan on public.a_star a_star_1
Output: a_star_1.aa, a_star_1.tableoid, a_star_1.ctid
-> Seq Scan on public.b_star a_star_2
Output: a_star_2.aa, a_star_2.tableoid, a_star_2.ctid
-> Seq Scan on public.c_star a_star_3
Output: a_star_3.aa, a_star_3.tableoid, a_star_3.ctid
-> Seq Scan on public.d_star a_star_4
Output: a_star_4.aa, a_star_4.tableoid, a_star_4.ctid
-> Seq Scan on public.e_star a_star_5
Output: a_star_5.aa, a_star_5.tableoid, a_star_5.ctid
-> Seq Scan on public.f_star a_star_6
Output: a_star_6.aa, a_star_6.tableoid, a_star_6.ctid
-> Hash
Output: wcte.*, wcte.q2
-> CTE Scan on wcte
Output: wcte.*, wcte.q2
(26 rows)
(32 rows)
-- error cases
-- data-modifying WITH tries to use its own output

View File

@ -1,6 +1,9 @@
# ----------
# src/test/regress/parallel_schedule
#
# Most test scripts can be run after running just test_setup and possibly
# create_index. Exceptions to this rule are documented below.
#
# By convention, we put no more than twenty tests in any one parallel group;
# this limits the number of connections needed to run the tests.
# ----------
@ -20,42 +23,31 @@ test: boolean char name varchar text int2 int4 int8 oid float4 float8 bit numeri
# ----------
# The second group of parallel tests
# strings depends on char, varchar and text
# numerology depends on int2, int4, int8, float4, float8
# multirangetypes depends on rangetypes
# multirangetypes shouldn't run concurrently with type_sanity
# ----------
test: strings numerology point lseg line box path polygon circle date time timetz timestamp timestamptz interval inet macaddr macaddr8 multirangetypes create_function_0
test: strings numerology point lseg line box path polygon circle date time timetz timestamp timestamptz interval inet macaddr macaddr8 multirangetypes
# ----------
# Another group of parallel tests
# geometry depends on point, lseg, box, path, polygon and circle
# horology depends on interval, timetz, timestamp, timestamptz
# opr_sanity depends on create_function_0
# geometry depends on point, lseg, line, box, path, polygon, circle
# horology depends on date, time, timetz, timestamp, timestamptz, interval
# ----------
test: geometry horology tstypes regex type_sanity opr_sanity misc_sanity comments expressions unicode xid mvcc
# ----------
# These four each depend on the previous one
# ----------
test: create_function_1
test: create_type
test: create_table
test: create_function_2
# ----------
# Load huge amounts of data
# We should split the data files into single files and then
# execute two copy tests parallel, to check that copy itself
# execute two copy tests in parallel, to check that copy itself
# is concurrent safe.
# ----------
test: copy copyselect copydml insert insert_conflict
# ----------
# More groups of parallel tests
# Note: many of the tests in later groups depend on create_index
# ----------
test: create_misc create_operator create_procedure
# These depend on create_misc and create_operator
test: create_function_0 create_misc create_operator create_procedure create_table create_type
test: create_index create_index_spgist create_view index_including index_including_gist
# ----------
@ -69,11 +61,14 @@ test: create_aggregate create_function_3 create_cast constraints triggers select
# ----------
test: sanity_check
# Note: the ignore: line does not skip random, just mark it as ignorable
ignore: random
# ----------
# Another group of parallel tests
# Note: the ignore: line does not run random, just mark it as ignorable
# aggregates depends on create_aggregate
# join depends on create_misc
# ----------
ignore: random
test: select_into select_distinct select_distinct_on select_implicit select_having subselect union case join aggregates transactions random portals arrays btree_index hash_index update delete namespace prepared_xacts
# ----------
@ -88,15 +83,18 @@ test: brin_bloom brin_multi
# ----------
# Another group of parallel tests
# psql depends on create_am
# amutils depends on geometry, create_index_spgist, hash_index, brin
# ----------
test: create_table_like alter_generic alter_operator misc async dbsize misc_functions sysviews tsrf tid tidscan tidrangescan collate.icu.utf8 incremental_sort create_role
# rules cannot run concurrently with any test that creates
# a view or rule in the public schema
# collate.*.utf8 tests cannot be run in parallel with each other
test: rules psql psql_crosstab amutils stats_ext collate.linux.utf8
# run by itself so it can run parallel workers
# ----------
# Run these alone so they don't run out of parallel workers
# select_parallel depends on create_misc
# ----------
test: select_parallel
test: write_parallel
test: vacuum_parallel
@ -106,6 +104,7 @@ test: publication subscription
# ----------
# Another group of parallel tests
# select_views depends on create_view
# ----------
test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combocid tsearch tsdicts foreign_data window xmlmap functional_deps advisory_lock indirect_toast equivclass
@ -116,6 +115,7 @@ test: json jsonb json_encoding jsonpath jsonpath_encoding jsonb_jsonpath
# ----------
# Another group of parallel tests
# with depends on create_misc
# NB: temp.sql does a reconnect which transiently uses 2 connections,
# so keep this parallel group to at most 19 tests
# ----------
@ -126,9 +126,10 @@ test: plancache limit plpgsql copy2 temp domain rangefuncs prepare conversion tr
# ----------
test: partition_join partition_prune reloptions hash_part indexing partition_aggregate partition_info tuplesort explain compression memoize
# event triggers cannot run concurrently with any test that runs DDL
# event_trigger cannot run concurrently with any test that runs DDL
# oidjoins is read-only, though, and should run late for best coverage
test: event_trigger oidjoins
# this test also uses event triggers, so likewise run it by itself
test: fast_default

View File

@ -2,9 +2,24 @@
-- AGGREGATES
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
-- prepare some test data
CREATE TABLE aggtest (
a int2,
b float4
);
\set filename :abs_srcdir '/data/agg.data'
COPY aggtest FROM :'filename';
ANALYZE aggtest;
SELECT avg(four) AS avg_1 FROM onek;
SELECT avg(a) AS avg_32 FROM aggtest WHERE a < 100;

View File

@ -2,6 +2,17 @@
-- Test for ALTER some_object {RENAME TO, OWNER TO, SET SCHEMA}
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
-- Clean up in case a prior regression run failed
SET client_min_messages TO 'warning';

View File

@ -2,6 +2,9 @@
-- ARRAYS
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE arrtest (
a int2[],
b int4[][][],
@ -12,6 +15,16 @@ CREATE TABLE arrtest (
g varchar(5)[]
);
CREATE TABLE array_op_test (
seqno int4,
i int4[],
t text[]
);
\set filename :abs_srcdir '/data/array.data'
COPY array_op_test FROM :'filename';
ANALYZE array_op_test;
--
-- only the 'e' array is 0-based, the others are 1-based.
--
@ -153,6 +166,10 @@ UPDATE arrtest_s SET a[:] = '{23, 24, 25}'; -- fail, too small
INSERT INTO arrtest_s VALUES(NULL, NULL);
UPDATE arrtest_s SET a[:] = '{11, 12, 13, 14, 15}'; -- fail, no good with null
-- we want to work with a point_tbl that includes a null
CREATE TEMP TABLE point_tbl AS SELECT * FROM public.point_tbl;
INSERT INTO POINT_TBL(f1) VALUES (NULL);
-- check with fixed-length-array type, such as point
SELECT f1[0:1] FROM POINT_TBL;
SELECT f1[0:] FROM POINT_TBL;

View File

@ -1,5 +1,64 @@
--
-- BTREE_INDEX
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE bt_i4_heap (
seqno int4,
random int4
);
CREATE TABLE bt_name_heap (
seqno name,
random int4
);
CREATE TABLE bt_txt_heap (
seqno text,
random int4
);
CREATE TABLE bt_f8_heap (
seqno float8,
random int4
);
\set filename :abs_srcdir '/data/desc.data'
COPY bt_i4_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_name_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_txt_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_f8_heap FROM :'filename';
ANALYZE bt_i4_heap;
ANALYZE bt_name_heap;
ANALYZE bt_txt_heap;
ANALYZE bt_f8_heap;
--
-- BTREE ascending/descending cases
--
-- we load int4/text from pure descending data (each key is a new
-- low key) and name/f8 from pure ascending data (each key is a new
-- high key). we had a bug where new low keys would sometimes be
-- "lost".
--
CREATE INDEX bt_i4_index ON bt_i4_heap USING btree (seqno int4_ops);
CREATE INDEX bt_name_index ON bt_name_heap USING btree (seqno name_ops);
CREATE INDEX bt_txt_index ON bt_txt_heap USING btree (seqno text_ops);
CREATE INDEX bt_f8_index ON bt_f8_heap USING btree (seqno float8_ops);
--
-- test retrieval of min/max keys for each index
--

View File

@ -9,9 +9,10 @@ SELECT char 'c' = char 'c' AS true;
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TABLE CHAR_TBL(f1 char);
CREATE TEMP TABLE CHAR_TBL(f1 char);
INSERT INTO CHAR_TBL (f1) VALUES ('a');
@ -63,13 +64,10 @@ DROP TABLE CHAR_TBL;
--
-- Now test longer arrays of char
--
-- This char_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES ('a');
INSERT INTO CHAR_TBL (f1) VALUES ('ab');
INSERT INTO CHAR_TBL (f1) VALUES ('abcd');
INSERT INTO CHAR_TBL (f1) VALUES ('abcde');
INSERT INTO CHAR_TBL (f1) VALUES ('abcd ');
SELECT * FROM CHAR_TBL;

View File

@ -1,6 +1,17 @@
--
-- create user defined conversion
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE USER regress_conversion_user WITH NOCREATEDB NOCREATEROLE;
SET SESSION AUTHORIZATION regress_conversion_user;
CREATE CONVERSION myconv FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;

View File

@ -6,112 +6,6 @@
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv abs_builddir PG_ABS_BUILDDIR
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
\set filename :abs_srcdir '/data/agg.data'
COPY aggtest FROM :'filename';
\set filename :abs_srcdir '/data/onek.data'
COPY onek FROM :'filename';
\set filename :abs_builddir '/results/onek.data'
COPY onek TO :'filename';
DELETE FROM onek;
COPY onek FROM :'filename';
\set filename :abs_srcdir '/data/tenk.data'
COPY tenk1 FROM :'filename';
\set filename :abs_srcdir '/data/rect.data'
COPY slow_emp4000 FROM :'filename';
\set filename :abs_srcdir '/data/person.data'
COPY person FROM :'filename';
\set filename :abs_srcdir '/data/emp.data'
COPY emp FROM :'filename';
\set filename :abs_srcdir '/data/student.data'
COPY student FROM :'filename';
\set filename :abs_srcdir '/data/stud_emp.data'
COPY stud_emp FROM :'filename';
\set filename :abs_srcdir '/data/streets.data'
COPY road FROM :'filename';
\set filename :abs_srcdir '/data/real_city.data'
COPY real_city FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY hash_i4_heap FROM :'filename';
COPY hash_name_heap FROM :'filename';
COPY hash_txt_heap FROM :'filename';
COPY hash_f8_heap FROM :'filename';
\set filename :abs_srcdir '/data/tsearch.data'
COPY test_tsvector FROM :'filename';
\set filename :abs_srcdir '/data/jsonb.data'
COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
-- \set filename :abs_srcdir '/data/hashovfl.data'
-- COPY hash_ovfl_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_i4_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_name_heap FROM :'filename';
\set filename :abs_srcdir '/data/desc.data'
COPY bt_txt_heap FROM :'filename';
\set filename :abs_srcdir '/data/hash.data'
COPY bt_f8_heap FROM :'filename';
\set filename :abs_srcdir '/data/array.data'
COPY array_op_test FROM :'filename';
\set filename :abs_srcdir '/data/array.data'
COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
ANALYZE aggtest;
ANALYZE onek;
ANALYZE tenk1;
ANALYZE slow_emp4000;
ANALYZE person;
ANALYZE emp;
ANALYZE student;
ANALYZE stud_emp;
ANALYZE road;
ANALYZE real_city;
ANALYZE hash_i4_heap;
ANALYZE hash_name_heap;
ANALYZE hash_txt_heap;
ANALYZE hash_f8_heap;
ANALYZE test_tsvector;
ANALYZE bt_i4_heap;
ANALYZE bt_name_heap;
ANALYZE bt_txt_heap;
ANALYZE bt_f8_heap;
ANALYZE array_op_test;
ANALYZE array_index_op_test;
--- test copying in CSV mode with various styles
--- of embedded line ending characters

View File

@ -1,98 +1,25 @@
--
-- CREATE_FUNCTION_0
--
-- This script used to create C functions for other scripts to use.
-- But to get rid of the ordering dependencies that caused, such
-- functions are now made either in test_setup.sql or in the specific
-- test script that needs them. All that remains here is error cases.
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set autoinclib :libdir '/autoinc' :dlsuffix
\set refintlib :libdir '/refint' :dlsuffix
\set regresslib :libdir '/regress' :dlsuffix
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
--
-- Check LOAD command. (The alternative of implicitly loading the library
-- is checked in many other test scripts.)
--
LOAD :'regresslib';
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT ''not an integer'';';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'not even SQL';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT 1, 2, 3;';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT $2;';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'a', 'b';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';

View File

@ -1,36 +0,0 @@
--
-- CREATE_FUNCTION_1
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION widget_out(widget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION test_canonicalize_path(text)
RETURNS text
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;

View File

@ -1,96 +0,0 @@
--
-- CREATE_FUNCTION_2
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct(text, text)
RETURNS hobbies_r
AS 'select $1 as name, $2 as hobby'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct_named(name text, hobby text)
RETURNS hobbies_r
AS 'select name, hobby'
LANGUAGE SQL;
CREATE FUNCTION hobbies_by_name(hobbies_r.name%TYPE)
RETURNS hobbies_r.person%TYPE
AS 'select person from hobbies_r where name = $1'
LANGUAGE SQL;
CREATE FUNCTION equipment(hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = $1.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = equipment_named.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1a(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_1a.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1b(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1c(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2a(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_2a.hobby'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby'
LANGUAGE SQL;
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
LOAD :'regresslib';

View File

@ -385,6 +385,23 @@ CREATE FUNCTION voidtest5(a int) RETURNS SETOF VOID LANGUAGE SQL AS
$$ SELECT generate_series(1, a) $$ STABLE;
SELECT * FROM voidtest5(3);
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT ''not an integer'';';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'not even SQL';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT 1, 2, 3;';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'SELECT $2;';
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
AS 'a', 'b';
-- Cleanup
DROP SCHEMA temp_func_test CASCADE;
DROP USER regress_unpriv_user;

View File

@ -3,6 +3,9 @@
-- Create ancillary data structures (i.e. indices)
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
--
-- BTREE
--
@ -43,22 +46,6 @@ COMMENT ON INDEX six_wrong IS 'bad index';
COMMENT ON INDEX six IS 'good index';
COMMENT ON INDEX six IS NULL;
--
-- BTREE ascending/descending cases
--
-- we load int4/text from pure descending data (each key is a new
-- low key) and name/f8 from pure ascending data (each key is a new
-- high key). we had a bug where new low keys would sometimes be
-- "lost".
--
CREATE INDEX bt_i4_index ON bt_i4_heap USING btree (seqno int4_ops);
CREATE INDEX bt_name_index ON bt_name_heap USING btree (seqno name_ops);
CREATE INDEX bt_txt_index ON bt_txt_heap USING btree (seqno text_ops);
CREATE INDEX bt_f8_index ON bt_f8_heap USING btree (seqno float8_ops);
--
-- BTREE partial indices
--
@ -74,12 +61,27 @@ CREATE INDEX onek2_stu1_prtl ON onek2 USING btree(stringu1 name_ops)
--
-- GiST (rtree-equivalent opclasses only)
--
CREATE TABLE slow_emp4000 (
home_base box
);
CREATE TABLE fast_emp4000 (
home_base box
);
\set filename :abs_srcdir '/data/rect.data'
COPY slow_emp4000 FROM :'filename';
INSERT INTO fast_emp4000 SELECT * FROM slow_emp4000;
ANALYZE slow_emp4000;
ANALYZE fast_emp4000;
CREATE INDEX grect2ind ON fast_emp4000 USING gist (home_base);
CREATE INDEX gpolygonind ON polygon_tbl USING gist (f1);
CREATE INDEX gcircleind ON circle_tbl USING gist (f1);
-- we want to work with a point_tbl that includes a null
CREATE TEMP TABLE point_tbl AS SELECT * FROM public.point_tbl;
INSERT INTO POINT_TBL(f1) VALUES (NULL);
CREATE INDEX gpointind ON point_tbl USING gist (f1);
@ -114,12 +116,6 @@ SELECT count(*) FROM fast_emp4000 WHERE home_base && '(1000,1000,0,0)'::box;
SELECT count(*) FROM fast_emp4000 WHERE home_base IS NULL;
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
SELECT count(*) FROM gpolygon_tbl WHERE f1 && '(1000,1000,0,0)'::polygon;
SELECT count(*) FROM gcircle_tbl WHERE f1 && '<(500,500),500>'::circle;
@ -175,18 +171,6 @@ EXPLAIN (COSTS OFF)
SELECT count(*) FROM fast_emp4000 WHERE home_base IS NULL;
SELECT count(*) FROM fast_emp4000 WHERE home_base IS NULL;
EXPLAIN (COSTS OFF)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
EXPLAIN (COSTS OFF)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
EXPLAIN (COSTS OFF)
SELECT count(*) FROM gpolygon_tbl WHERE f1 && '(1000,1000,0,0)'::polygon;
SELECT count(*) FROM gpolygon_tbl WHERE f1 && '(1000,1000,0,0)'::polygon;
@ -274,6 +258,21 @@ RESET enable_bitmapscan;
-- Note: GIN currently supports only bitmap scans, not plain indexscans
--
CREATE TABLE array_index_op_test (
seqno int4,
i int4[],
t text[]
);
\set filename :abs_srcdir '/data/array.data'
COPY array_index_op_test FROM :'filename';
ANALYZE array_index_op_test;
SELECT * FROM array_index_op_test WHERE i = '{NULL}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i @> '{NULL}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i && '{NULL}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
SET enable_seqscan = OFF;
SET enable_indexscan = OFF;
SET enable_bitmapscan = ON;
@ -295,10 +294,6 @@ SELECT * FROM array_index_op_test WHERE i = '{}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i @> '{}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i && '{}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i <@ '{}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i = '{NULL}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i @> '{NULL}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i && '{NULL}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
CREATE INDEX textarrayidx ON array_index_op_test USING gin (t);
@ -331,8 +326,6 @@ SELECT * FROM array_index_op_test WHERE t && '{AAAAAAA80240}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i @> '{32}' AND t && '{AAAAAAA80240}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE i && '{32}' AND t @> '{AAAAAAA80240}' ORDER BY seqno;
SELECT * FROM array_index_op_test WHERE t = '{}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i = '{NULL}' ORDER BY seqno;
SELECT * FROM array_op_test WHERE i <@ '{NULL}' ORDER BY seqno;
RESET enable_seqscan;
RESET enable_indexscan;
@ -362,14 +355,6 @@ CREATE INDEX gin_relopts_test ON array_index_op_test USING gin (i)
--
-- HASH
--
CREATE INDEX hash_i4_index ON hash_i4_heap USING hash (random int4_ops);
CREATE INDEX hash_name_index ON hash_name_heap USING hash (random name_ops);
CREATE INDEX hash_txt_index ON hash_txt_heap USING hash (random text_ops);
CREATE INDEX hash_f8_index ON hash_f8_heap USING hash (random float8_ops) WITH (fillfactor=60);
CREATE UNLOGGED TABLE unlogged_hash_table (id int4);
CREATE INDEX unlogged_hash_index ON unlogged_hash_table USING hash (id int4_ops);
DROP TABLE unlogged_hash_table;
@ -485,15 +470,6 @@ ALTER TABLE covering_index_heap ADD CONSTRAINT covering_pkey PRIMARY KEY USING I
covering_pkey;
DROP TABLE covering_index_heap;
--
-- Also try building functional, expressional, and partial indexes on
-- tables that already contain data.
--
create unique index hash_f8_index_1 on hash_f8_heap(abs(random));
create unique index hash_f8_index_2 on hash_f8_heap((seqno + 1), random);
create unique index hash_f8_index_3 on hash_f8_heap(random) where seqno > 1000;
--
-- Try some concurrent index builds
--
@ -770,8 +746,6 @@ SELECT count(*) FROM dupindexcols
-- Check ordering of =ANY indexqual results (bug in 9.2.0)
--
vacuum tenk1; -- ensure we get consistent plans here
explain (costs off)
SELECT unique1 FROM tenk1
WHERE unique1 IN (1,42,7)

View File

@ -2,63 +2,37 @@
-- CREATE_MISC
--
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
-- a is the type root
-- b and c inherit from a (one-level single inheritance)
-- d inherits from b and c (two-level multiple inheritance)
-- e inherits from c (two-level single inheritance)
-- f inherits from e (three-level single inheritance)
--
CREATE TABLE a_star (
class char,
a int4
);
INSERT INTO tenk2 SELECT * FROM tenk1;
CREATE TABLE b_star (
b text
) INHERITS (a_star);
CREATE TABLE onek2 AS SELECT * FROM onek;
CREATE TABLE c_star (
c name
) INHERITS (a_star);
INSERT INTO fast_emp4000 SELECT * FROM slow_emp4000;
CREATE TABLE d_star (
d float8
) INHERITS (b_star, c_star);
SELECT *
INTO TABLE Bprime
FROM tenk1
WHERE unique2 < 1000;
CREATE TABLE e_star (
e int2
) INHERITS (c_star);
INSERT INTO hobbies_r (name, person)
SELECT 'posthacking', p.name
FROM person* p
WHERE p.name = 'mike' or p.name = 'jeff';
INSERT INTO hobbies_r (name, person)
SELECT 'basketball', p.name
FROM person p
WHERE p.name = 'joe' or p.name = 'sally';
INSERT INTO hobbies_r (name) VALUES ('skywalking');
INSERT INTO equipment_r (name, hobby) VALUES ('advil', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('peet''s coffee', 'posthacking');
INSERT INTO equipment_r (name, hobby) VALUES ('hightops', 'basketball');
INSERT INTO equipment_r (name, hobby) VALUES ('guts', 'skywalking');
INSERT INTO city VALUES
('Podunk', '(1,2),(3,4)', '100,127,1000'),
('Gotham', '(1000,34),(1100,334)', '123456,127,-1000,6789');
TABLE city;
SELECT *
INTO TABLE ramp
FROM road
WHERE name ~ '.*Ramp';
INSERT INTO ihighway
SELECT *
FROM road
WHERE name ~ 'I- .*';
INSERT INTO shighway
SELECT *
FROM road
WHERE name ~ 'State Hwy.*';
UPDATE shighway
SET surface = 'asphalt';
CREATE TABLE f_star (
f polygon
) INHERITS (e_star);
INSERT INTO a_star (class, a) VALUES ('a', 1);
@ -200,18 +174,85 @@ ANALYZE d_star;
ANALYZE e_star;
ANALYZE f_star;
--
-- for internal portal (cursor) tests
-- inheritance stress test
--
CREATE TABLE iportaltest (
i int4,
d float4,
p polygon
);
SELECT * FROM a_star*;
INSERT INTO iportaltest (i, d, p)
VALUES (1, 3.567, '(3.0,1.0),(4.0,2.0)'::polygon);
SELECT *
FROM b_star* x
WHERE x.b = text 'bumble' or x.a < 3;
INSERT INTO iportaltest (i, d, p)
VALUES (2, 89.05, '(4.0,2.0),(3.0,1.0)'::polygon);
SELECT class, a
FROM c_star* x
WHERE x.c ~ text 'hi';
SELECT class, b, c
FROM d_star* x
WHERE x.a < 100;
SELECT class, c FROM e_star* x WHERE x.c NOTNULL;
SELECT * FROM f_star* x WHERE x.c ISNULL;
-- grouping and aggregation on inherited sets have been busted in the past...
SELECT sum(a) FROM a_star*;
SELECT class, sum(a) FROM a_star* GROUP BY class ORDER BY class;
ALTER TABLE f_star RENAME COLUMN f TO ff;
ALTER TABLE e_star* RENAME COLUMN e TO ee;
ALTER TABLE d_star* RENAME COLUMN d TO dd;
ALTER TABLE c_star* RENAME COLUMN c TO cc;
ALTER TABLE b_star* RENAME COLUMN b TO bb;
ALTER TABLE a_star* RENAME COLUMN a TO aa;
SELECT class, aa
FROM a_star* x
WHERE aa ISNULL;
-- As of Postgres 7.1, ALTER implicitly recurses,
-- so this should be same as ALTER a_star*
ALTER TABLE a_star RENAME COLUMN aa TO foo;
SELECT class, foo
FROM a_star* x
WHERE x.foo >= 2;
ALTER TABLE a_star RENAME COLUMN foo TO aa;
SELECT *
from a_star*
WHERE aa < 1000;
ALTER TABLE f_star ADD COLUMN f int4;
UPDATE f_star SET f = 10;
ALTER TABLE e_star* ADD COLUMN e int4;
--UPDATE e_star* SET e = 42;
SELECT * FROM e_star*;
ALTER TABLE a_star* ADD COLUMN a text;
-- That ALTER TABLE should have added TOAST tables.
SELECT relname, reltoastrelid <> 0 AS has_toast_table
FROM pg_class
WHERE oid::regclass IN ('a_star', 'c_star')
ORDER BY 1;
--UPDATE b_star*
-- SET a = text 'gazpacho'
-- WHERE aa > 4;
SELECT class, aa, a FROM a_star*;

View File

@ -9,14 +9,6 @@ CREATE OPERATOR ## (
commutator = ##
);
CREATE OPERATOR <% (
leftarg = point,
rightarg = widget,
procedure = pt_in_widget,
commutator = >% ,
negator = >=%
);
CREATE OPERATOR @#@ (
rightarg = int8, -- prefix
procedure = factorial
@ -28,8 +20,7 @@ CREATE OPERATOR #%# (
);
-- Test operator created above
SELECT point '(1,2)' <% widget '(0,0,3)' AS t,
point '(1,2)' <% widget '(0,0,1)' AS f;
SELECT @#@ 24;
-- Test comments
COMMENT ON OPERATOR ###### (NONE, int4) IS 'bad prefix';

View File

@ -2,240 +2,7 @@
-- CREATE_TABLE
--
--
-- CLASS DEFINITIONS
--
CREATE TABLE hobbies_r (
name text,
person text
);
CREATE TABLE equipment_r (
name text,
hobby text
);
CREATE TABLE onek (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE tenk1 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE tenk2 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
CREATE TABLE person (
name text,
age int4,
location point
);
CREATE TABLE emp (
salary int4,
manager name
) INHERITS (person);
CREATE TABLE student (
gpa float8
) INHERITS (person);
CREATE TABLE stud_emp (
percent int4
) INHERITS (emp, student);
CREATE TABLE city (
name name,
location box,
budget city_budget
);
CREATE TABLE dept (
dname name,
mgrname text
);
CREATE TABLE slow_emp4000 (
home_base box
);
CREATE TABLE fast_emp4000 (
home_base box
);
CREATE TABLE road (
name text,
thepath path
);
CREATE TABLE ihighway () INHERITS (road);
CREATE TABLE shighway (
surface text
) INHERITS (road);
CREATE TABLE real_city (
pop int4,
cname text,
outline path
);
--
-- test the "star" operators a bit more thoroughly -- this time,
-- throw in lots of NULL fields...
--
-- a is the type root
-- b and c inherit from a (one-level single inheritance)
-- d inherits from b and c (two-level multiple inheritance)
-- e inherits from c (two-level single inheritance)
-- f inherits from e (three-level single inheritance)
--
CREATE TABLE a_star (
class char,
a int4
);
CREATE TABLE b_star (
b text
) INHERITS (a_star);
CREATE TABLE c_star (
c name
) INHERITS (a_star);
CREATE TABLE d_star (
d float8
) INHERITS (b_star, c_star);
CREATE TABLE e_star (
e int2
) INHERITS (c_star);
CREATE TABLE f_star (
f polygon
) INHERITS (e_star);
CREATE TABLE aggtest (
a int2,
b float4
);
CREATE TABLE hash_i4_heap (
seqno int4,
random int4
);
CREATE TABLE hash_name_heap (
seqno int4,
random name
);
CREATE TABLE hash_txt_heap (
seqno int4,
random text
);
CREATE TABLE hash_f8_heap (
seqno int4,
random float8
);
-- don't include the hash_ovfl_heap stuff in the distribution
-- the data set is too large for what it's worth
--
-- CREATE TABLE hash_ovfl_heap (
-- x int4,
-- y int4
-- );
CREATE TABLE bt_i4_heap (
seqno int4,
random int4
);
CREATE TABLE bt_name_heap (
seqno name,
random int4
);
CREATE TABLE bt_txt_heap (
seqno text,
random int4
);
CREATE TABLE bt_f8_heap (
seqno float8,
random int4
);
CREATE TABLE array_op_test (
seqno int4,
i int4[],
t text[]
);
CREATE TABLE array_index_op_test (
seqno int4,
i int4[],
t text[]
);
CREATE TABLE testjsonb (
j jsonb
);
-- Error cases
CREATE TABLE unknowntab (
u unknown -- fail
);
@ -244,15 +11,6 @@ CREATE TYPE unknown_comptype AS (
u unknown -- fail
);
CREATE TABLE IF NOT EXISTS test_tsvector(
t text,
a tsvector
);
CREATE TABLE IF NOT EXISTS test_tsvector(
t text
);
-- invalid: non-lowercase quoted reloptions identifiers
CREATE TABLE tas_case WITH ("Fillfactor" = 10) AS SELECT 1 a;

View File

@ -2,11 +2,36 @@
-- CREATE_TYPE
--
-- directory path and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- Note: widget_in/out were created in create_function_1, without any
-- prior shell-type creation. These commands therefore complete a test
-- of the "old style" approach of making the functions first.
-- Test the "old style" approach of making the I/O functions first,
-- with no explicit shell type creation.
--
CREATE FUNCTION widget_in(cstring)
RETURNS widget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION widget_out(widget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE TYPE widget (
internallength = 24,
input = widget_in,
@ -167,6 +192,37 @@ select format_type('bpchar'::regtype, null);
-- this behavior difference is intentional
select format_type('bpchar'::regtype, -1);
-- Test creation of an operator over a user-defined type
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE OPERATOR <% (
leftarg = point,
rightarg = widget,
procedure = pt_in_widget,
commutator = >% ,
negator = >=%
);
SELECT point '(1,2)' <% widget '(0,0,3)' AS t,
point '(1,2)' <% widget '(0,0,1)' AS f;
-- exercise city_budget type
CREATE TABLE city (
name name,
location box,
budget city_budget
);
INSERT INTO city VALUES
('Podunk', '(1,2),(3,4)', '100,127,1000'),
('Gotham', '(1000,34),(1100,334)', '123456,127,-1000,6789');
TABLE city;
--
-- Test CREATE/ALTER TYPE using a type that's compatible with varchar,
-- so we can re-use those support functions

View File

@ -4,16 +4,43 @@
-- (this also tests the query rewrite system)
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
AS :'regresslib'
LANGUAGE C STRICT;
CREATE TABLE real_city (
pop int4,
cname text,
outline path
);
\set filename :abs_srcdir '/data/real_city.data'
COPY real_city FROM :'filename';
ANALYZE real_city;
SELECT *
INTO TABLE ramp
FROM ONLY road
WHERE name ~ '.*Ramp';
CREATE VIEW street AS
SELECT r.name, r.thepath, c.cname AS cname
FROM ONLY road r, real_city c
WHERE c.outline ## r.thepath;
WHERE c.outline ?# r.thepath;
CREATE VIEW iexit AS
SELECT ih.name, ih.thepath,
interpt_pp(ih.thepath, r.thepath) AS exit
FROM ihighway ih, ramp r
WHERE ih.thepath ## r.thepath;
WHERE ih.thepath ?# r.thepath;
CREATE VIEW toyemp AS
SELECT name, age, location, 12*salary AS annualsal

View File

@ -77,7 +77,7 @@ alter table nonesuch rename to newnonesuch;
alter table nonesuch rename to stud_emp;
-- conflict
alter table stud_emp rename to aggtest;
alter table stud_emp rename to student;
-- self-conflict
alter table stud_emp rename to stud_emp;

View File

@ -3,7 +3,7 @@
--
--
-- Tests for SQLVAlueFunction
-- Tests for SQLValueFunction
--
@ -38,35 +38,6 @@ SELECT current_schema;
RESET search_path;
--
-- Tests for BETWEEN
--
explain (costs off)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
--
-- Test parsing of a no-op cast to a type with unspecified typmod
--

View File

@ -2,7 +2,12 @@
-- FLOAT8
--
CREATE TABLE FLOAT8_TBL(f1 float8);
--
-- Build a table for testing
-- (This temporarily hides the table created in test_setup.sql)
--
CREATE TEMP TABLE FLOAT8_TBL(f1 float8);
INSERT INTO FLOAT8_TBL(f1) VALUES (' 0.0 ');
INSERT INTO FLOAT8_TBL(f1) VALUES ('1004.30 ');
@ -229,20 +234,9 @@ INSERT INTO FLOAT8_TBL(f1) VALUES ('10e-400');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e-400');
-- maintain external table consistency across platforms
-- delete all values and reinsert well-behaved ones
DROP TABLE FLOAT8_TBL;
DELETE FROM FLOAT8_TBL;
INSERT INTO FLOAT8_TBL(f1) VALUES ('0.0');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-34.84');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1004.30');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e+200');
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e-200');
-- Check the float8 values exported for use by other tests
SELECT * FROM FLOAT8_TBL;

View File

@ -2,6 +2,17 @@
-- Test foreign-data wrapper and server management.
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
-- Clean up in case a prior regression run failed
-- Suppress NOTICE messages when roles don't exist

View File

@ -497,3 +497,29 @@ SELECT c.f1, p.f1, c.f1 / p.f1 FROM CIRCLE_TBL c, POINT_TBL p WHERE p.f1 ~= '(0,
-- Distance to polygon
SELECT c.f1, p.f1, c.f1 <-> p.f1 FROM CIRCLE_TBL c, POLYGON_TBL p;
-- Check index behavior for circles
CREATE INDEX gcircleind ON circle_tbl USING gist (f1);
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
EXPLAIN (COSTS OFF)
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
SELECT * FROM circle_tbl WHERE f1 && circle(point(1,-2), 1)
ORDER BY area(f1);
-- Check index behavior for polygons
CREATE INDEX gpolygonind ON polygon_tbl USING gist (f1);
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
EXPLAIN (COSTS OFF)
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];
SELECT * FROM polygon_tbl WHERE f1 @> '((1,1),(2,2),(2,1))'::polygon
ORDER BY (poly_center(f1))[0];

View File

@ -1,8 +1,70 @@
--
-- HASH_INDEX
-- grep 843938989 hash.data
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE hash_i4_heap (
seqno int4,
random int4
);
CREATE TABLE hash_name_heap (
seqno int4,
random name
);
CREATE TABLE hash_txt_heap (
seqno int4,
random text
);
CREATE TABLE hash_f8_heap (
seqno int4,
random float8
);
\set filename :abs_srcdir '/data/hash.data'
COPY hash_i4_heap FROM :'filename';
COPY hash_name_heap FROM :'filename';
COPY hash_txt_heap FROM :'filename';
COPY hash_f8_heap FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
-- \set filename :abs_srcdir '/data/hashovfl.data'
-- COPY hash_ovfl_heap FROM :'filename';
ANALYZE hash_i4_heap;
ANALYZE hash_name_heap;
ANALYZE hash_txt_heap;
ANALYZE hash_f8_heap;
CREATE INDEX hash_i4_index ON hash_i4_heap USING hash (random int4_ops);
CREATE INDEX hash_name_index ON hash_name_heap USING hash (random name_ops);
CREATE INDEX hash_txt_index ON hash_txt_heap USING hash (random text_ops);
CREATE INDEX hash_f8_index ON hash_f8_heap USING hash (random float8_ops)
WITH (fillfactor=60);
--
-- Also try building functional, expressional, and partial indexes on
-- tables that already contain data.
--
create unique index hash_f8_index_1 on hash_f8_heap(abs(random));
create unique index hash_f8_index_2 on hash_f8_heap((seqno + 1), random);
create unique index hash_f8_index_3 on hash_f8_heap(random) where seqno > 1000;
--
-- hash index
-- grep 843938989 hash.data
--
SELECT * FROM hash_i4_heap
WHERE hash_i4_heap.random = 843938989;

View File

@ -306,6 +306,34 @@ SELECT '2020-10-05'::timestamptz >= '4714-11-24 BC'::timestamp as t;
RESET TimeZone;
--
-- Tests for BETWEEN
--
explain (costs off)
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 between '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 not between '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 between symmetric '1997-01-01' and '1998-01-01';
explain (costs off)
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
select count(*) from date_tbl
where f1 not between symmetric '1997-01-01' and '1998-01-01';
--
-- Formats
--

View File

@ -2,6 +2,17 @@
-- Tests for external toast datums
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
AS :'regresslib'
LANGUAGE C STRICT;
-- Other compression algorithms may cause the compressed data to be stored
-- inline. pglz guarantees that the data is externalized, so stick to it.
SET default_toast_compression = 'pglz';

View File

@ -250,33 +250,6 @@ select tableoid::regclass::text, a, min(b) as min_b, max(b) as max_b from list_p
-- direct partition inserts should check hash partition bound constraint
-- Use hand-rolled hash functions and operator classes to get predictable
-- result on different machines. The hash function for int4 simply returns
-- the sum of the values passed to it and the one for text returns the length
-- of the non-empty string value passed to it or 0.
create or replace function part_hashint4_noop(value int4, seed int8)
returns int8 as $$
select value + seed;
$$ language sql immutable;
create operator class part_test_int4_ops
for type int4
using hash as
operator 1 =,
function 2 part_hashint4_noop(int4, int8);
create or replace function part_hashtext_length(value text, seed int8)
RETURNS int8 AS $$
select length(coalesce(value, ''))::int8
$$ language sql immutable;
create operator class part_test_text_ops
for type text
using hash as
operator 1 =,
function 2 part_hashtext_length(text, int8);
create table hash_parted (
a int
) partition by hash (a part_test_int4_ops);

View File

@ -2,22 +2,10 @@
-- INT2
--
CREATE TABLE INT2_TBL(f1 int2);
INSERT INTO INT2_TBL(f1) VALUES ('0 ');
INSERT INTO INT2_TBL(f1) VALUES (' 1234 ');
INSERT INTO INT2_TBL(f1) VALUES (' -1234');
-- int2_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO INT2_TBL(f1) VALUES ('34.5');
-- largest and smallest values
INSERT INTO INT2_TBL(f1) VALUES ('32767');
INSERT INTO INT2_TBL(f1) VALUES ('-32767');
-- bad input values -- should give errors
INSERT INTO INT2_TBL(f1) VALUES ('100000');
INSERT INTO INT2_TBL(f1) VALUES ('asdf');
INSERT INTO INT2_TBL(f1) VALUES (' ');

View File

@ -2,22 +2,10 @@
-- INT4
--
CREATE TABLE INT4_TBL(f1 int4);
INSERT INTO INT4_TBL(f1) VALUES (' 0 ');
INSERT INTO INT4_TBL(f1) VALUES ('123456 ');
INSERT INTO INT4_TBL(f1) VALUES (' -123456');
-- int4_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO INT4_TBL(f1) VALUES ('34.5');
-- largest and smallest values
INSERT INTO INT4_TBL(f1) VALUES ('2147483647');
INSERT INTO INT4_TBL(f1) VALUES ('-2147483647');
-- bad input values -- should give errors
INSERT INTO INT4_TBL(f1) VALUES ('1000000000000');
INSERT INTO INT4_TBL(f1) VALUES ('asdf');
INSERT INTO INT4_TBL(f1) VALUES (' ');

View File

@ -2,15 +2,10 @@
-- INT8
-- Test int8 64-bit integers.
--
CREATE TABLE INT8_TBL(q1 int8, q2 int8);
INSERT INTO INT8_TBL VALUES(' 123 ',' 456');
INSERT INTO INT8_TBL VALUES('123 ','4567890123456789');
INSERT INTO INT8_TBL VALUES('4567890123456789','123');
INSERT INTO INT8_TBL VALUES(+4567890123456789,'4567890123456789');
INSERT INTO INT8_TBL VALUES('+4567890123456789','-4567890123456789');
-- int8_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
-- bad inputs
INSERT INTO INT8_TBL(q1) VALUES (' ');
INSERT INTO INT8_TBL(q1) VALUES ('xxx');
INSERT INTO INT8_TBL(q1) VALUES ('3908203590239580293850293850329485');

View File

@ -391,11 +391,11 @@ rollback;
--
explain (costs off)
select aa, bb, unique1, unique1
from tenk1 right join b on aa = unique1
from tenk1 right join b_star on aa = unique1
where bb < bb and bb is null;
select aa, bb, unique1, unique1
from tenk1 right join b on aa = unique1
from tenk1 right join b_star on aa = unique1
where bb < bb and bb is null;
--

View File

@ -1,3 +1,13 @@
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
CREATE TABLE testjsonb (
j jsonb
);
\set filename :abs_srcdir '/data/jsonb.data'
COPY testjsonb FROM :'filename';
-- Strings.
SELECT '""'::jsonb; -- OK.
SELECT $$''$$::jsonb; -- ERROR, single quotes are not allowed

View File

@ -2,6 +2,12 @@
-- Test the LOCK statement
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
-- Setup
CREATE SCHEMA lock_schema1;
SET search_path = lock_schema1;
@ -136,4 +142,10 @@ DROP ROLE regress_rol_lock1;
-- atomic ops tests
RESET search_path;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
AS :'regresslib'
LANGUAGE C;
SELECT test_atomic_ops();

View File

@ -2,9 +2,23 @@
-- MISC
--
-- directory paths are passed to us in environment variables
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv abs_builddir PG_ABS_BUILDDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION overpaid(emp)
RETURNS bool
AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
AS :'regresslib'
LANGUAGE C STRICT;
--
-- BTREE
@ -31,6 +45,10 @@ UPDATE onek
-- systems. This non-func update stuff needs to be examined
-- more closely. - jolly (2/22/96)
--
SELECT two, stringu1, ten, string4
INTO TABLE tmp
FROM onek;
UPDATE tmp
SET stringu1 = reverse_name(onek.stringu1)
FROM onek
@ -58,127 +76,116 @@ DROP TABLE tmp;
\set filename :abs_builddir '/results/onek.data'
COPY onek TO :'filename';
DELETE FROM onek;
CREATE TEMP TABLE onek_copy (LIKE onek);
COPY onek FROM :'filename';
COPY onek_copy FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
SELECT * FROM onek EXCEPT ALL SELECT * FROM onek_copy;
DELETE FROM onek2;
COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
SELECT * FROM onek_copy EXCEPT ALL SELECT * FROM onek;
\set filename :abs_builddir '/results/stud_emp.data'
COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
CREATE TEMP TABLE stud_emp_copy (LIKE stud_emp);
COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
-- COPY aggtest FROM stdin;
-- 56 7.8
-- 100 99.097
-- 0 0.09561
-- 42 324.78
-- .
-- COPY aggtest TO stdout;
COPY BINARY stud_emp_copy FROM :'filename';
SELECT * FROM stud_emp_copy;
--
-- inheritance stress test
-- test data for postquel functions
--
SELECT * FROM a_star*;
SELECT *
FROM b_star* x
WHERE x.b = text 'bumble' or x.a < 3;
CREATE TABLE hobbies_r (
name text,
person text
);
SELECT class, a
FROM c_star* x
WHERE x.c ~ text 'hi';
CREATE TABLE equipment_r (
name text,
hobby text
);
SELECT class, b, c
FROM d_star* x
WHERE x.a < 100;
INSERT INTO hobbies_r (name, person)
SELECT 'posthacking', p.name
FROM person* p
WHERE p.name = 'mike' or p.name = 'jeff';
SELECT class, c FROM e_star* x WHERE x.c NOTNULL;
INSERT INTO hobbies_r (name, person)
SELECT 'basketball', p.name
FROM person p
WHERE p.name = 'joe' or p.name = 'sally';
SELECT * FROM f_star* x WHERE x.c ISNULL;
INSERT INTO hobbies_r (name) VALUES ('skywalking');
-- grouping and aggregation on inherited sets have been busted in the past...
INSERT INTO equipment_r (name, hobby) VALUES ('advil', 'posthacking');
SELECT sum(a) FROM a_star*;
INSERT INTO equipment_r (name, hobby) VALUES ('peet''s coffee', 'posthacking');
SELECT class, sum(a) FROM a_star* GROUP BY class ORDER BY class;
INSERT INTO equipment_r (name, hobby) VALUES ('hightops', 'basketball');
ALTER TABLE f_star RENAME COLUMN f TO ff;
ALTER TABLE e_star* RENAME COLUMN e TO ee;
ALTER TABLE d_star* RENAME COLUMN d TO dd;
ALTER TABLE c_star* RENAME COLUMN c TO cc;
ALTER TABLE b_star* RENAME COLUMN b TO bb;
ALTER TABLE a_star* RENAME COLUMN a TO aa;
SELECT class, aa
FROM a_star* x
WHERE aa ISNULL;
-- As of Postgres 7.1, ALTER implicitly recurses,
-- so this should be same as ALTER a_star*
ALTER TABLE a_star RENAME COLUMN aa TO foo;
SELECT class, foo
FROM a_star* x
WHERE x.foo >= 2;
ALTER TABLE a_star RENAME COLUMN foo TO aa;
SELECT *
from a_star*
WHERE aa < 1000;
ALTER TABLE f_star ADD COLUMN f int4;
UPDATE f_star SET f = 10;
ALTER TABLE e_star* ADD COLUMN e int4;
--UPDATE e_star* SET e = 42;
SELECT * FROM e_star*;
ALTER TABLE a_star* ADD COLUMN a text;
-- That ALTER TABLE should have added TOAST tables.
SELECT relname, reltoastrelid <> 0 AS has_toast_table
FROM pg_class
WHERE oid::regclass IN ('a_star', 'c_star')
ORDER BY 1;
--UPDATE b_star*
-- SET a = text 'gazpacho'
-- WHERE aa > 4;
SELECT class, aa, a FROM a_star*;
--
-- versions
--
INSERT INTO equipment_r (name, hobby) VALUES ('guts', 'skywalking');
--
-- postquel functions
--
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct(text, text)
RETURNS hobbies_r
AS 'select $1 as name, $2 as hobby'
LANGUAGE SQL;
CREATE FUNCTION hobby_construct_named(name text, hobby text)
RETURNS hobbies_r
AS 'select name, hobby'
LANGUAGE SQL;
CREATE FUNCTION hobbies_by_name(hobbies_r.name%TYPE)
RETURNS hobbies_r.person%TYPE
AS 'select person from hobbies_r where name = $1'
LANGUAGE SQL;
CREATE FUNCTION equipment(hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = $1.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = equipment_named.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1a(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_1a.hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1b(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_1c(hobby hobbies_r)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = hobby.name'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2a(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where hobby = equipment_named_ambiguous_2a.hobby'
LANGUAGE SQL;
CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
RETURNS setof equipment_r
AS 'select * from equipment_r where equipment_r.hobby = hobby'
LANGUAGE SQL;
--
-- mike does post_hacking,
-- joe and sally play basketball, and

View File

@ -1,3 +1,9 @@
-- directory paths and dlsuffix are passed to us in environment variables
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- num_nulls()
--
@ -34,6 +40,11 @@ SELECT num_nulls();
-- canonicalize_path()
--
CREATE FUNCTION test_canonicalize_path(text)
RETURNS text
AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
SELECT test_canonicalize_path('/');
SELECT test_canonicalize_path('/./abc/def/');
SELECT test_canonicalize_path('/./../abc/def');
@ -156,6 +167,11 @@ SELECT * FROM tenk1 a JOIN tenk1 b ON a.unique1 = b.unique1
WHERE my_int_eq(a.unique2, 42);
-- With support function that knows it's int4eq, we get a different plan
CREATE FUNCTION test_support_func(internal)
RETURNS internal
AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
ALTER FUNCTION my_int_eq(int, int) SUPPORT test_support_func;
EXPLAIN (COSTS OFF)

View File

@ -5,31 +5,11 @@
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
CREATE TABLE POINT_TBL(f1 point);
-- point_tbl was already created and filled in test_setup.sql.
-- Here we just try to insert bad values.
INSERT INTO POINT_TBL(f1) VALUES ('(0.0,0.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(-10.0,0.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(-3.0,4.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(5.1, 34.5)');
INSERT INTO POINT_TBL(f1) VALUES ('(-5.0,-12.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(1e-300,-1e-300)'); -- To underflow
INSERT INTO POINT_TBL(f1) VALUES ('(1e+300,Inf)'); -- To overflow
INSERT INTO POINT_TBL(f1) VALUES ('(Inf,1e+300)'); -- Transposed
INSERT INTO POINT_TBL(f1) VALUES (' ( Nan , NaN ) ');
-- bad format points
INSERT INTO POINT_TBL(f1) VALUES ('asdfasdf');
INSERT INTO POINT_TBL(f1) VALUES ('10.0,10.0');
INSERT INTO POINT_TBL(f1) VALUES ('(10.0 10.0)');
INSERT INTO POINT_TBL(f1) VALUES ('(10.0, 10.0) x');

View File

@ -1,9 +1,8 @@
-- Tests for range data types.
create type textrange as range (subtype=text, collation="C");
--
-- test input parser
-- (type textrange was already made in test_setup.sql)
--
-- negative tests; should fail
@ -424,13 +423,12 @@ set timezone to default;
--
-- Test user-defined range of floats
-- (type float8range was already made in test_setup.sql)
--
--should fail
create type float8range as range (subtype=float8, subtype_diff=float4mi);
create type bogus_float8range as range (subtype=float8, subtype_diff=float4mi);
--should succeed
create type float8range as range (subtype=float8, subtype_diff=float8mi);
select '[123.001, 5.e9)'::float8range @> 888.882::float8;
create table float8range_test(f8r float8range, i int);
insert into float8range_test values(float8range(-100.00007, '1.111113e9'), 42);

View File

@ -776,11 +776,11 @@ drop table cchild;
\a\t
SELECT viewname, definition FROM pg_views
WHERE schemaname IN ('pg_catalog', 'public')
WHERE schemaname = 'pg_catalog'
ORDER BY viewname;
SELECT tablename, rulename, definition FROM pg_rules
WHERE schemaname IN ('pg_catalog', 'public')
WHERE schemaname = 'pg_catalog'
ORDER BY tablename, rulename;
-- restore normal output mode

View File

@ -1,25 +1,7 @@
VACUUM;
--
-- sanity check, if we don't have indices the test will take years to
-- complete. But skip TOAST relations (since they will have varying
-- names depending on the current OID counter) as well as temp tables
-- of other backends (to avoid timing-dependent behavior).
--
-- temporarily disable fancy output, so catalog changes create less diff noise
\a\t
SELECT relname, relhasindex
FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = relnamespace
WHERE relkind IN ('r', 'p') AND (nspname ~ '^pg_temp_') IS NOT TRUE
ORDER BY relname;
-- restore normal output mode
\a\t
--
-- another sanity check: every system catalog that has OIDs should have
-- Sanity check: every system catalog that has OIDs should have
-- a unique index on OID. This ensures that the OIDs will be unique,
-- even after the OID counter wraps around.
-- We exclude non-system tables from the check by looking at nspname.

View File

@ -90,11 +90,6 @@ RESET enable_seqscan;
RESET enable_bitmapscan;
RESET enable_sort;
SELECT two, stringu1, ten, string4
INTO TABLE tmp
FROM onek;
--
-- awk '{print $1,$2;}' person.data |
-- awk '{if(NF!=2){print $3,$2;}else{print;}}' - emp.data |

View File

@ -5,24 +5,24 @@
--
-- awk '{print $3;}' onek.data | sort -n | uniq
--
SELECT DISTINCT two FROM tmp ORDER BY 1;
SELECT DISTINCT two FROM onek ORDER BY 1;
--
-- awk '{print $5;}' onek.data | sort -n | uniq
--
SELECT DISTINCT ten FROM tmp ORDER BY 1;
SELECT DISTINCT ten FROM onek ORDER BY 1;
--
-- awk '{print $16;}' onek.data | sort -d | uniq
--
SELECT DISTINCT string4 FROM tmp ORDER BY 1;
SELECT DISTINCT string4 FROM onek ORDER BY 1;
--
-- awk '{print $3,$16,$5;}' onek.data | sort -d | uniq |
-- sort +0n -1 +1d -2 +2n -3
--
SELECT DISTINCT two, string4, ten
FROM tmp
FROM onek
ORDER BY two using <, string4 using <, ten using <;
--

View File

@ -3,16 +3,16 @@
--
SELECT DISTINCT ON (string4) string4, two, ten
FROM tmp
FROM onek
ORDER BY string4 using <, two using >, ten using <;
-- this will fail due to conflict of ordering requirements
SELECT DISTINCT ON (string4, ten) string4, two, ten
FROM tmp
FROM onek
ORDER BY string4 using <, two using <, ten using <;
SELECT DISTINCT ON (string4, ten) string4, ten, two
FROM tmp
FROM onek
ORDER BY string4 using <, ten using >, two using <;
-- bug #5049: early 8.4.x chokes on volatile DISTINCT ON clauses

View File

@ -110,11 +110,11 @@ DROP TABLE easi, easi2;
--
-- Disallowed uses of SELECT ... INTO. All should fail
--
DECLARE foo CURSOR FOR SELECT 1 INTO b;
DECLARE foo CURSOR FOR SELECT 1 INTO int4_tbl;
COPY (SELECT 1 INTO frak UNION SELECT 2) TO 'blob';
SELECT * FROM (SELECT 1 INTO f) bar;
CREATE VIEW foo AS SELECT 1 INTO b;
INSERT INTO b SELECT 1 INTO f;
CREATE VIEW foo AS SELECT 1 INTO int4_tbl;
INSERT INTO int4_tbl SELECT 1 INTO f;
-- Test CREATE TABLE AS ... IF NOT EXISTS
CREATE TABLE ctas_ine_tbl AS SELECT 1;

View File

@ -1,3 +1,269 @@
--
-- TEST_SETUP --- prepare environment expected by regression test scripts
--
-- directory paths and dlsuffix are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv libdir PG_LIBDIR
\getenv dlsuffix PG_DLSUFFIX
\set regresslib :libdir '/regress' :dlsuffix
--
-- Postgres formerly made the public schema read/write by default,
-- and most of the core regression tests still expect that.
--
GRANT ALL ON SCHEMA public TO public;
--
-- These tables have traditionally been referenced by many tests,
-- so create and populate them. Insert only non-error values here.
-- (Some subsequent tests try to insert erroneous values. That's okay
-- because the table won't actually change. Do not change the contents
-- of these tables in later tests, as it may affect other tests.)
--
CREATE TABLE CHAR_TBL(f1 char(4));
INSERT INTO CHAR_TBL (f1) VALUES
('a'),
('ab'),
('abcd'),
('abcd ');
VACUUM CHAR_TBL;
CREATE TABLE FLOAT8_TBL(f1 float8);
INSERT INTO FLOAT8_TBL(f1) VALUES
('0.0'),
('-34.84'),
('-1004.30'),
('-1.2345678901234e+200'),
('-1.2345678901234e-200');
VACUUM FLOAT8_TBL;
CREATE TABLE INT2_TBL(f1 int2);
INSERT INTO INT2_TBL(f1) VALUES
('0 '),
(' 1234 '),
(' -1234'),
('32767'), -- largest and smallest values
('-32767');
VACUUM INT2_TBL;
CREATE TABLE INT4_TBL(f1 int4);
INSERT INTO INT4_TBL(f1) VALUES
(' 0 '),
('123456 '),
(' -123456'),
('2147483647'), -- largest and smallest values
('-2147483647');
VACUUM INT4_TBL;
CREATE TABLE INT8_TBL(q1 int8, q2 int8);
INSERT INTO INT8_TBL VALUES
(' 123 ',' 456'),
('123 ','4567890123456789'),
('4567890123456789','123'),
(+4567890123456789,'4567890123456789'),
('+4567890123456789','-4567890123456789');
VACUUM INT8_TBL;
CREATE TABLE POINT_TBL(f1 point);
INSERT INTO POINT_TBL(f1) VALUES
('(0.0,0.0)'),
('(-10.0,0.0)'),
('(-3.0,4.0)'),
('(5.1, 34.5)'),
('(-5.0,-12.0)'),
('(1e-300,-1e-300)'), -- To underflow
('(1e+300,Inf)'), -- To overflow
('(Inf,1e+300)'), -- Transposed
(' ( Nan , NaN ) '),
('10.0,10.0');
-- We intentionally don't vacuum point_tbl here; geometry depends on that
CREATE TABLE TEXT_TBL (f1 text);
INSERT INTO TEXT_TBL VALUES
('doh!'),
('hi de ho neighbor');
VACUUM TEXT_TBL;
CREATE TABLE VARCHAR_TBL(f1 varchar(4));
INSERT INTO VARCHAR_TBL (f1) VALUES
('a'),
('ab'),
('abcd'),
('abcd ');
VACUUM VARCHAR_TBL;
CREATE TABLE onek (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
\set filename :abs_srcdir '/data/onek.data'
COPY onek FROM :'filename';
VACUUM ANALYZE onek;
CREATE TABLE onek2 AS SELECT * FROM onek;
VACUUM ANALYZE onek2;
CREATE TABLE tenk1 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
);
\set filename :abs_srcdir '/data/tenk.data'
COPY tenk1 FROM :'filename';
VACUUM ANALYZE tenk1;
CREATE TABLE tenk2 AS SELECT * FROM tenk1;
VACUUM ANALYZE tenk2;
CREATE TABLE person (
name text,
age int4,
location point
);
\set filename :abs_srcdir '/data/person.data'
COPY person FROM :'filename';
VACUUM ANALYZE person;
CREATE TABLE emp (
salary int4,
manager name
) INHERITS (person);
\set filename :abs_srcdir '/data/emp.data'
COPY emp FROM :'filename';
VACUUM ANALYZE emp;
CREATE TABLE student (
gpa float8
) INHERITS (person);
\set filename :abs_srcdir '/data/student.data'
COPY student FROM :'filename';
VACUUM ANALYZE student;
CREATE TABLE stud_emp (
percent int4
) INHERITS (emp, student);
\set filename :abs_srcdir '/data/stud_emp.data'
COPY stud_emp FROM :'filename';
VACUUM ANALYZE stud_emp;
CREATE TABLE road (
name text,
thepath path
);
\set filename :abs_srcdir '/data/streets.data'
COPY road FROM :'filename';
VACUUM ANALYZE road;
CREATE TABLE ihighway () INHERITS (road);
INSERT INTO ihighway
SELECT *
FROM ONLY road
WHERE name ~ 'I- .*';
VACUUM ANALYZE ihighway;
CREATE TABLE shighway (
surface text
) INHERITS (road);
INSERT INTO shighway
SELECT *, 'asphalt'
FROM ONLY road
WHERE name ~ 'State Hwy.*';
VACUUM ANALYZE shighway;
--
-- We must have some enum type in the database for opr_sanity and type_sanity.
--
create type stoplight as enum ('red', 'yellow', 'green');
--
-- Also create some non-built-in range types.
--
create type float8range as range (subtype = float8, subtype_diff = float8mi);
create type textrange as range (subtype = text, collation = "C");
--
-- Create some C functions that will be used by various tests.
--
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
CREATE FUNCTION ttdummy ()
RETURNS trigger
AS :'regresslib'
LANGUAGE C;
-- Use hand-rolled hash functions and operator classes to get predictable
-- result on different machines. The hash function for int4 simply returns
-- the sum of the values passed to it and the one for text returns the length
-- of the non-empty string value passed to it or 0.
create function part_hashint4_noop(value int4, seed int8)
returns int8 as $$
select value + seed;
$$ language sql strict immutable parallel safe;
create operator class part_test_int4_ops for type int4 using hash as
operator 1 =,
function 2 part_hashint4_noop(int4, int8);
create function part_hashtext_length(value text, seed int8)
returns int8 as $$
select length(coalesce(value, ''))::int8
$$ language sql strict immutable parallel safe;
create operator class part_test_text_ops for type text using hash as
operator 1 =,
function 2 part_hashtext_length(text, int8);

View File

@ -6,11 +6,7 @@ SELECT text 'this is a text string' = text 'this is a text string' AS true;
SELECT text 'this is a text string' = text 'this is a text strin' AS false;
CREATE TABLE TEXT_TBL (f1 text);
INSERT INTO TEXT_TBL VALUES ('doh!');
INSERT INTO TEXT_TBL VALUES ('hi de ho neighbor');
-- text_tbl was already created and filled in test_setup.sql.
SELECT * FROM TEXT_TBL;
-- As of 8.3 we have removed most implicit casts to text, so that for example

View File

@ -4,10 +4,12 @@
BEGIN;
SELECT *
INTO TABLE xacttest
FROM aggtest;
CREATE TABLE xacttest (a smallint, b real);
INSERT INTO xacttest VALUES
(56, 7.8),
(100, 99.097),
(0, 0.09561),
(42, 324.78);
INSERT INTO xacttest (a, b) VALUES (777, 777.777);
END;
@ -20,10 +22,10 @@ BEGIN;
CREATE TABLE disappear (a int4);
DELETE FROM aggtest;
DELETE FROM xacttest;
-- should be empty
SELECT * FROM aggtest;
SELECT * FROM xacttest;
ABORT;
@ -31,7 +33,7 @@ ABORT;
SELECT oid FROM pg_class WHERE relname = 'disappear';
-- should have members again
SELECT * FROM aggtest;
SELECT * FROM xacttest;
-- Read-only tests

Some files were not shown because too many files have changed in this diff Show More