Restrict vertical tightness to parentheses in Perl code

The vertical tightness settings collapse vertical whitespace between
opening and closing brackets (parentheses, square brakets and braces).
This can make data structures in particular harder to read, and is not
very consistent with our style in non-Perl code. This patch restricts
that setting to parentheses only, and reformats all the perl code
accordingly. Not applying this to parentheses has some unfortunate
effects, so the consensus is to keep the setting for parentheses and not
for the others.

The diff for this patch does highlight some places where structures
should have trailing commas. They can be added manually, as there is no
automatic tool to do so.

Discussion: https://postgr.es/m/a2f2b87c-56be-c070-bfc0-36288b4b41c1@2ndQuadrant.com
This commit is contained in:
Andrew Dunstan 2018-05-09 10:14:46 -04:00
parent 286bb240e1
commit 35361ee788
46 changed files with 1815 additions and 851 deletions

View File

@ -95,10 +95,12 @@ sub ParseHeader
elsif (/^DECLARE_(UNIQUE_)?INDEX\(\s*(\w+),\s*(\d+),\s*(.+)\)/) elsif (/^DECLARE_(UNIQUE_)?INDEX\(\s*(\w+),\s*(\d+),\s*(.+)\)/)
{ {
push @{ $catalog{indexing} }, push @{ $catalog{indexing} },
{ is_unique => $1 ? 1 : 0, {
is_unique => $1 ? 1 : 0,
index_name => $2, index_name => $2,
index_oid => $3, index_oid => $3,
index_decl => $4 }; index_decl => $4
};
} }
elsif (/^CATALOG\((\w+),(\d+),(\w+)\)/) elsif (/^CATALOG\((\w+),(\d+),(\w+)\)/)
{ {

View File

@ -97,11 +97,13 @@ foreach my $row (@{ $catalog_data{pg_proc} })
next if $bki_values{prolang} ne $INTERNALlanguageId; next if $bki_values{prolang} ne $INTERNALlanguageId;
push @fmgr, push @fmgr,
{ oid => $bki_values{oid}, {
oid => $bki_values{oid},
strict => $bki_values{proisstrict}, strict => $bki_values{proisstrict},
retset => $bki_values{proretset}, retset => $bki_values{proretset},
nargs => $bki_values{pronargs}, nargs => $bki_values{pronargs},
prosrc => $bki_values{prosrc}, }; prosrc => $bki_values{prosrc},
};
} }
# Emit headers for both files # Emit headers for both files

View File

@ -48,12 +48,14 @@ foreach my $i (@$cp950txt)
&& $code <= 0xf9dc) && $code <= 0xf9dc)
{ {
push @$all, push @$all,
{ code => $code, {
code => $code,
ucs => $ucs, ucs => $ucs,
comment => $i->{comment}, comment => $i->{comment},
direction => BOTH, direction => BOTH,
f => $i->{f}, f => $i->{f},
l => $i->{l} }; l => $i->{l}
};
} }
} }

View File

@ -70,11 +70,13 @@ while (<$in>)
} }
push @mapping, push @mapping,
{ ucs => $ucs, {
ucs => $ucs,
code => $code, code => $code,
direction => BOTH, direction => BOTH,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
close($in); close($in);

View File

@ -33,13 +33,15 @@ while (my $line = <$in>)
my $ucs2 = hex($u2); my $ucs2 = hex($u2);
push @all, push @all,
{ direction => BOTH, {
direction => BOTH,
ucs => $ucs1, ucs => $ucs1,
ucs_second => $ucs2, ucs_second => $ucs2,
code => $code, code => $code,
comment => $rest, comment => $rest,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
elsif ($line =~ /^0x(.*)[ \t]*U\+(.*)[ \t]*#(.*)$/) elsif ($line =~ /^0x(.*)[ \t]*U\+(.*)[ \t]*#(.*)$/)
{ {
@ -52,12 +54,14 @@ while (my $line = <$in>)
next if ($code < 0x80 && $ucs < 0x80); next if ($code < 0x80 && $ucs < 0x80);
push @all, push @all,
{ direction => BOTH, {
direction => BOTH,
ucs => $ucs, ucs => $ucs,
code => $code, code => $code,
comment => $rest, comment => $rest,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
} }
close($in); close($in);

View File

@ -115,352 +115,524 @@ foreach my $i (@mapping)
} }
push @mapping, ( push @mapping, (
{ direction => BOTH, {
direction => BOTH,
ucs => 0x4efc, ucs => 0x4efc,
code => 0x8ff4af, code => 0x8ff4af,
comment => '# CJK(4EFC)' }, comment => '# CJK(4EFC)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x50f4, ucs => 0x50f4,
code => 0x8ff4b0, code => 0x8ff4b0,
comment => '# CJK(50F4)' }, comment => '# CJK(50F4)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x51EC, ucs => 0x51EC,
code => 0x8ff4b1, code => 0x8ff4b1,
comment => '# CJK(51EC)' }, comment => '# CJK(51EC)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5307, ucs => 0x5307,
code => 0x8ff4b2, code => 0x8ff4b2,
comment => '# CJK(5307)' }, comment => '# CJK(5307)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5324, ucs => 0x5324,
code => 0x8ff4b3, code => 0x8ff4b3,
comment => '# CJK(5324)' }, comment => '# CJK(5324)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x548A, ucs => 0x548A,
code => 0x8ff4b5, code => 0x8ff4b5,
comment => '# CJK(548A)' }, comment => '# CJK(548A)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5759, ucs => 0x5759,
code => 0x8ff4b6, code => 0x8ff4b6,
comment => '# CJK(5759)' }, comment => '# CJK(5759)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x589E, ucs => 0x589E,
code => 0x8ff4b9, code => 0x8ff4b9,
comment => '# CJK(589E)' }, comment => '# CJK(589E)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5BEC, ucs => 0x5BEC,
code => 0x8ff4ba, code => 0x8ff4ba,
comment => '# CJK(5BEC)' }, comment => '# CJK(5BEC)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5CF5, ucs => 0x5CF5,
code => 0x8ff4bb, code => 0x8ff4bb,
comment => '# CJK(5CF5)' }, comment => '# CJK(5CF5)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5D53, ucs => 0x5D53,
code => 0x8ff4bc, code => 0x8ff4bc,
comment => '# CJK(5D53)' }, comment => '# CJK(5D53)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x5FB7, ucs => 0x5FB7,
code => 0x8ff4be, code => 0x8ff4be,
comment => '# CJK(5FB7)' }, comment => '# CJK(5FB7)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6085, ucs => 0x6085,
code => 0x8ff4bf, code => 0x8ff4bf,
comment => '# CJK(6085)' }, comment => '# CJK(6085)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6120, ucs => 0x6120,
code => 0x8ff4c0, code => 0x8ff4c0,
comment => '# CJK(6120)' }, comment => '# CJK(6120)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x654E, ucs => 0x654E,
code => 0x8ff4c1, code => 0x8ff4c1,
comment => '# CJK(654E)' }, comment => '# CJK(654E)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x663B, ucs => 0x663B,
code => 0x8ff4c2, code => 0x8ff4c2,
comment => '# CJK(663B)' }, comment => '# CJK(663B)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6665, ucs => 0x6665,
code => 0x8ff4c3, code => 0x8ff4c3,
comment => '# CJK(6665)' }, comment => '# CJK(6665)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6801, ucs => 0x6801,
code => 0x8ff4c6, code => 0x8ff4c6,
comment => '# CJK(6801)' }, comment => '# CJK(6801)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6A6B, ucs => 0x6A6B,
code => 0x8ff4c9, code => 0x8ff4c9,
comment => '# CJK(6A6B)' }, comment => '# CJK(6A6B)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6AE2, ucs => 0x6AE2,
code => 0x8ff4ca, code => 0x8ff4ca,
comment => '# CJK(6AE2)' }, comment => '# CJK(6AE2)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6DF2, ucs => 0x6DF2,
code => 0x8ff4cc, code => 0x8ff4cc,
comment => '# CJK(6DF2)' }, comment => '# CJK(6DF2)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x6DF8, ucs => 0x6DF8,
code => 0x8ff4cb, code => 0x8ff4cb,
comment => '# CJK(6DF8)' }, comment => '# CJK(6DF8)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7028, ucs => 0x7028,
code => 0x8ff4cd, code => 0x8ff4cd,
comment => '# CJK(7028)' }, comment => '# CJK(7028)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x70BB, ucs => 0x70BB,
code => 0x8ff4ae, code => 0x8ff4ae,
comment => '# CJK(70BB)' }, comment => '# CJK(70BB)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7501, ucs => 0x7501,
code => 0x8ff4d0, code => 0x8ff4d0,
comment => '# CJK(7501)' }, comment => '# CJK(7501)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7682, ucs => 0x7682,
code => 0x8ff4d1, code => 0x8ff4d1,
comment => '# CJK(7682)' }, comment => '# CJK(7682)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x769E, ucs => 0x769E,
code => 0x8ff4d2, code => 0x8ff4d2,
comment => '# CJK(769E)' }, comment => '# CJK(769E)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7930, ucs => 0x7930,
code => 0x8ff4d4, code => 0x8ff4d4,
comment => '# CJK(7930)' }, comment => '# CJK(7930)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7AE7, ucs => 0x7AE7,
code => 0x8ff4d9, code => 0x8ff4d9,
comment => '# CJK(7AE7)' }, comment => '# CJK(7AE7)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7DA0, ucs => 0x7DA0,
code => 0x8ff4dc, code => 0x8ff4dc,
comment => '# CJK(7DA0)' }, comment => '# CJK(7DA0)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x7DD6, ucs => 0x7DD6,
code => 0x8ff4dd, code => 0x8ff4dd,
comment => '# CJK(7DD6)' }, comment => '# CJK(7DD6)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x8362, ucs => 0x8362,
code => 0x8ff4df, code => 0x8ff4df,
comment => '# CJK(8362)' }, comment => '# CJK(8362)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x85B0, ucs => 0x85B0,
code => 0x8ff4e1, code => 0x8ff4e1,
comment => '# CJK(85B0)' }, comment => '# CJK(85B0)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x8807, ucs => 0x8807,
code => 0x8ff4e4, code => 0x8ff4e4,
comment => '# CJK(8807)' }, comment => '# CJK(8807)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x8B7F, ucs => 0x8B7F,
code => 0x8ff4e6, code => 0x8ff4e6,
comment => '# CJK(8B7F)' }, comment => '# CJK(8B7F)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x8CF4, ucs => 0x8CF4,
code => 0x8ff4e7, code => 0x8ff4e7,
comment => '# CJK(8CF4)' }, comment => '# CJK(8CF4)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x8D76, ucs => 0x8D76,
code => 0x8ff4e8, code => 0x8ff4e8,
comment => '# CJK(8D76)' }, comment => '# CJK(8D76)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x90DE, ucs => 0x90DE,
code => 0x8ff4ec, code => 0x8ff4ec,
comment => '# CJK(90DE)' }, comment => '# CJK(90DE)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9115, ucs => 0x9115,
code => 0x8ff4ee, code => 0x8ff4ee,
comment => '# CJK(9115)' }, comment => '# CJK(9115)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9592, ucs => 0x9592,
code => 0x8ff4f1, code => 0x8ff4f1,
comment => '# CJK(9592)' }, comment => '# CJK(9592)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x973B, ucs => 0x973B,
code => 0x8ff4f4, code => 0x8ff4f4,
comment => '# CJK(973B)' }, comment => '# CJK(973B)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x974D, ucs => 0x974D,
code => 0x8ff4f5, code => 0x8ff4f5,
comment => '# CJK(974D)' }, comment => '# CJK(974D)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9751, ucs => 0x9751,
code => 0x8ff4f6, code => 0x8ff4f6,
comment => '# CJK(9751)' }, comment => '# CJK(9751)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x999E, ucs => 0x999E,
code => 0x8ff4fa, code => 0x8ff4fa,
comment => '# CJK(999E)' }, comment => '# CJK(999E)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9AD9, ucs => 0x9AD9,
code => 0x8ff4fb, code => 0x8ff4fb,
comment => '# CJK(9AD9)' }, comment => '# CJK(9AD9)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9B72, ucs => 0x9B72,
code => 0x8ff4fc, code => 0x8ff4fc,
comment => '# CJK(9B72)' }, comment => '# CJK(9B72)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x9ED1, ucs => 0x9ED1,
code => 0x8ff4fe, code => 0x8ff4fe,
comment => '# CJK(9ED1)' }, comment => '# CJK(9ED1)'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xF929, ucs => 0xF929,
code => 0x8ff4c5, code => 0x8ff4c5,
comment => '# CJK COMPATIBILITY IDEOGRAPH-F929' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-F929'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xF9DC, ucs => 0xF9DC,
code => 0x8ff4f2, code => 0x8ff4f2,
comment => '# CJK COMPATIBILITY IDEOGRAPH-F9DC' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-F9DC'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA0E, ucs => 0xFA0E,
code => 0x8ff4b4, code => 0x8ff4b4,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA0E' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA0E'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA0F, ucs => 0xFA0F,
code => 0x8ff4b7, code => 0x8ff4b7,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA0F' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA0F'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA10, ucs => 0xFA10,
code => 0x8ff4b8, code => 0x8ff4b8,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA10' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA10'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA11, ucs => 0xFA11,
code => 0x8ff4bd, code => 0x8ff4bd,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA11' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA11'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA12, ucs => 0xFA12,
code => 0x8ff4c4, code => 0x8ff4c4,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA12' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA12'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA13, ucs => 0xFA13,
code => 0x8ff4c7, code => 0x8ff4c7,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA13' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA13'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA14, ucs => 0xFA14,
code => 0x8ff4c8, code => 0x8ff4c8,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA14' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA14'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA15, ucs => 0xFA15,
code => 0x8ff4ce, code => 0x8ff4ce,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA15' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA15'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA16, ucs => 0xFA16,
code => 0x8ff4cf, code => 0x8ff4cf,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA16' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA16'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA17, ucs => 0xFA17,
code => 0x8ff4d3, code => 0x8ff4d3,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA17' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA17'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA18, ucs => 0xFA18,
code => 0x8ff4d5, code => 0x8ff4d5,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA18' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA18'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA19, ucs => 0xFA19,
code => 0x8ff4d6, code => 0x8ff4d6,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA19' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA19'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1A, ucs => 0xFA1A,
code => 0x8ff4d7, code => 0x8ff4d7,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1A' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1A'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1B, ucs => 0xFA1B,
code => 0x8ff4d8, code => 0x8ff4d8,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1B' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1B'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1C, ucs => 0xFA1C,
code => 0x8ff4da, code => 0x8ff4da,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1C' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1C'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1D, ucs => 0xFA1D,
code => 0x8ff4db, code => 0x8ff4db,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1D' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1D'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1E, ucs => 0xFA1E,
code => 0x8ff4de, code => 0x8ff4de,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1E' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1E'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA1F, ucs => 0xFA1F,
code => 0x8ff4e0, code => 0x8ff4e0,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1F' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA1F'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA20, ucs => 0xFA20,
code => 0x8ff4e2, code => 0x8ff4e2,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA20' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA20'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA21, ucs => 0xFA21,
code => 0x8ff4e3, code => 0x8ff4e3,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA21' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA21'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA22, ucs => 0xFA22,
code => 0x8ff4e5, code => 0x8ff4e5,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA22' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA22'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA23, ucs => 0xFA23,
code => 0x8ff4e9, code => 0x8ff4e9,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA23' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA23'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA24, ucs => 0xFA24,
code => 0x8ff4ea, code => 0x8ff4ea,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA24' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA24'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA25, ucs => 0xFA25,
code => 0x8ff4eb, code => 0x8ff4eb,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA25' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA25'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA26, ucs => 0xFA26,
code => 0x8ff4ed, code => 0x8ff4ed,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA26' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA26'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA27, ucs => 0xFA27,
code => 0x8ff4ef, code => 0x8ff4ef,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA27' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA27'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA28, ucs => 0xFA28,
code => 0x8ff4f0, code => 0x8ff4f0,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA28' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA28'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA29, ucs => 0xFA29,
code => 0x8ff4f3, code => 0x8ff4f3,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA29' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA29'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA2A, ucs => 0xFA2A,
code => 0x8ff4f7, code => 0x8ff4f7,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2A' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2A'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA2B, ucs => 0xFA2B,
code => 0x8ff4f8, code => 0x8ff4f8,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2B' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2B'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA2C, ucs => 0xFA2C,
code => 0x8ff4f9, code => 0x8ff4f9,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2C' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2C'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFA2D, ucs => 0xFA2D,
code => 0x8ff4fd, code => 0x8ff4fd,
comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2D' }, comment => '# CJK COMPATIBILITY IDEOGRAPH-FA2D'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFF07, ucs => 0xFF07,
code => 0x8ff4a9, code => 0x8ff4a9,
comment => '# FULLWIDTH APOSTROPHE' }, comment => '# FULLWIDTH APOSTROPHE'
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0xFFE4, ucs => 0xFFE4,
code => 0x8fa2c3, code => 0x8fa2c3,
comment => '# FULLWIDTH BROKEN BAR' }, comment => '# FULLWIDTH BROKEN BAR'
},
# additional conversions for EUC_JP -> UTF-8 conversion # additional conversions for EUC_JP -> UTF-8 conversion
{ direction => TO_UNICODE, {
direction => TO_UNICODE,
ucs => 0x2116, ucs => 0x2116,
code => 0x8ff4ac, code => 0x8ff4ac,
comment => '# NUMERO SIGN' }, comment => '# NUMERO SIGN'
{ direction => TO_UNICODE, },
{
direction => TO_UNICODE,
ucs => 0x2121, ucs => 0x2121,
code => 0x8ff4ad, code => 0x8ff4ad,
comment => '# TELEPHONE SIGN' }, comment => '# TELEPHONE SIGN'
{ direction => TO_UNICODE, },
{
direction => TO_UNICODE,
ucs => 0x3231, ucs => 0x3231,
code => 0x8ff4ab, code => 0x8ff4ab,
comment => '# PARENTHESIZED IDEOGRAPH STOCK' }); comment => '# PARENTHESIZED IDEOGRAPH STOCK'
});
print_conversion_tables($this_script, "EUC_JP", \@mapping); print_conversion_tables($this_script, "EUC_JP", \@mapping);

View File

@ -32,23 +32,29 @@ foreach my $i (@$mapping)
# Some extra characters that are not in KSX1001.TXT # Some extra characters that are not in KSX1001.TXT
push @$mapping, push @$mapping,
( { direction => BOTH, ( {
direction => BOTH,
ucs => 0x20AC, ucs => 0x20AC,
code => 0xa2e6, code => 0xa2e6,
comment => '# EURO SIGN', comment => '# EURO SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x00AE, ucs => 0x00AE,
code => 0xa2e7, code => 0xa2e7,
comment => '# REGISTERED SIGN', comment => '# REGISTERED SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x327E, ucs => 0x327E,
code => 0xa2e8, code => 0xa2e8,
comment => '# CIRCLED HANGUL IEUNG U', comment => '# CIRCLED HANGUL IEUNG U',
f => $this_script, f => $this_script,
l => __LINE__ }); l => __LINE__
});
print_conversion_tables($this_script, "EUC_KR", $mapping); print_conversion_tables($this_script, "EUC_KR", $mapping);

View File

@ -53,12 +53,14 @@ foreach my $i (@$mapping)
if ($origcode >= 0x12121 && $origcode <= 0x20000) if ($origcode >= 0x12121 && $origcode <= 0x20000)
{ {
push @extras, push @extras,
{ ucs => $i->{ucs}, {
ucs => $i->{ucs},
code => ($i->{code} + 0x8ea10000), code => ($i->{code} + 0x8ea10000),
rest => $i->{rest}, rest => $i->{rest},
direction => TO_UNICODE, direction => TO_UNICODE,
f => $i->{f}, f => $i->{f},
l => $i->{l} }; l => $i->{l}
};
} }
} }

View File

@ -36,11 +36,13 @@ while (<$in>)
if ($code >= 0x80 && $ucs >= 0x0080) if ($code >= 0x80 && $ucs >= 0x0080)
{ {
push @mapping, push @mapping,
{ ucs => $ucs, {
ucs => $ucs,
code => $code, code => $code,
direction => BOTH, direction => BOTH,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
} }
close($in); close($in);

View File

@ -26,23 +26,29 @@ my $mapping = &read_source("JOHAB.TXT");
# Some extra characters that are not in JOHAB.TXT # Some extra characters that are not in JOHAB.TXT
push @$mapping, push @$mapping,
( { direction => BOTH, ( {
direction => BOTH,
ucs => 0x20AC, ucs => 0x20AC,
code => 0xd9e6, code => 0xd9e6,
comment => '# EURO SIGN', comment => '# EURO SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x00AE, ucs => 0x00AE,
code => 0xd9e7, code => 0xd9e7,
comment => '# REGISTERED SIGN', comment => '# REGISTERED SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => BOTH, },
{
direction => BOTH,
ucs => 0x327E, ucs => 0x327E,
code => 0xd9e8, code => 0xd9e8,
comment => '# CIRCLED HANGUL IEUNG U', comment => '# CIRCLED HANGUL IEUNG U',
f => $this_script, f => $this_script,
l => __LINE__ }); l => __LINE__
});
print_conversion_tables($this_script, "JOHAB", $mapping); print_conversion_tables($this_script, "JOHAB", $mapping);

View File

@ -33,13 +33,15 @@ while (my $line = <$in>)
my $ucs2 = hex($u2); my $ucs2 = hex($u2);
push @mapping, push @mapping,
{ code => $code, {
code => $code,
ucs => $ucs1, ucs => $ucs1,
ucs_second => $ucs2, ucs_second => $ucs2,
comment => $rest, comment => $rest,
direction => BOTH, direction => BOTH,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
elsif ($line =~ /^0x(.*)[ \t]*U\+(.*)[ \t]*#(.*)$/) elsif ($line =~ /^0x(.*)[ \t]*U\+(.*)[ \t]*#(.*)$/)
{ {
@ -68,12 +70,14 @@ while (my $line = <$in>)
} }
push @mapping, push @mapping,
{ code => $code, {
code => $code,
ucs => $ucs, ucs => $ucs,
comment => $rest, comment => $rest,
direction => $direction, direction => $direction,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
} }
close($in); close($in);

View File

@ -36,53 +36,69 @@ foreach my $i (@$mapping)
# Add these UTF8->SJIS pairs to the table. # Add these UTF8->SJIS pairs to the table.
push @$mapping, push @$mapping,
( { direction => FROM_UNICODE, ( {
direction => FROM_UNICODE,
ucs => 0x00a2, ucs => 0x00a2,
code => 0x8191, code => 0x8191,
comment => '# CENT SIGN', comment => '# CENT SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x00a3, ucs => 0x00a3,
code => 0x8192, code => 0x8192,
comment => '# POUND SIGN', comment => '# POUND SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x00a5, ucs => 0x00a5,
code => 0x5c, code => 0x5c,
comment => '# YEN SIGN', comment => '# YEN SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x00ac, ucs => 0x00ac,
code => 0x81ca, code => 0x81ca,
comment => '# NOT SIGN', comment => '# NOT SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x2016, ucs => 0x2016,
code => 0x8161, code => 0x8161,
comment => '# DOUBLE VERTICAL LINE', comment => '# DOUBLE VERTICAL LINE',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x203e, ucs => 0x203e,
code => 0x7e, code => 0x7e,
comment => '# OVERLINE', comment => '# OVERLINE',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x2212, ucs => 0x2212,
code => 0x817c, code => 0x817c,
comment => '# MINUS SIGN', comment => '# MINUS SIGN',
f => $this_script, f => $this_script,
l => __LINE__ }, l => __LINE__
{ direction => FROM_UNICODE, },
{
direction => FROM_UNICODE,
ucs => 0x301c, ucs => 0x301c,
code => 0x8160, code => 0x8160,
comment => '# WAVE DASH', comment => '# WAVE DASH',
f => $this_script, f => $this_script,
l => __LINE__ }); l => __LINE__
});
print_conversion_tables($this_script, "SJIS", $mapping); print_conversion_tables($this_script, "SJIS", $mapping);

View File

@ -39,22 +39,26 @@ while (<$in>)
if ($code >= 0x80 && $ucs >= 0x0080) if ($code >= 0x80 && $ucs >= 0x0080)
{ {
push @mapping, push @mapping,
{ ucs => $ucs, {
ucs => $ucs,
code => $code, code => $code,
direction => BOTH, direction => BOTH,
f => $in_file, f => $in_file,
l => $. }; l => $.
};
} }
} }
close($in); close($in);
# One extra character that's not in the source file. # One extra character that's not in the source file.
push @mapping, push @mapping,
{ direction => BOTH, {
direction => BOTH,
code => 0xa2e8, code => 0xa2e8,
ucs => 0x327e, ucs => 0x327e,
comment => 'CIRCLED HANGUL IEUNG U', comment => 'CIRCLED HANGUL IEUNG U',
f => $this_script, f => $this_script,
l => __LINE__ }; l => __LINE__
};
print_conversion_tables($this_script, "UHC", \@mapping); print_conversion_tables($this_script, "UHC", \@mapping);

View File

@ -18,7 +18,8 @@ use constant {
NONE => 0, NONE => 0,
TO_UNICODE => 1, TO_UNICODE => 1,
FROM_UNICODE => 2, FROM_UNICODE => 2,
BOTH => 3 }; BOTH => 3
};
####################################################################### #######################################################################
# read_source - common routine to read source file # read_source - common routine to read source file
@ -56,7 +57,8 @@ sub read_source
comment => $4, comment => $4,
direction => BOTH, direction => BOTH,
f => $fname, f => $fname,
l => $. }; l => $.
};
# Ignore pure ASCII mappings. PostgreSQL character conversion code # Ignore pure ASCII mappings. PostgreSQL character conversion code
# never even passes these to the conversion code. # never even passes these to the conversion code.
@ -370,9 +372,11 @@ sub print_radix_table
} }
unshift @segments, unshift @segments,
{ header => "Dummy map, for invalid values", {
header => "Dummy map, for invalid values",
min_idx => 0, min_idx => 0,
max_idx => $widest_range }; max_idx => $widest_range
};
### ###
### Eliminate overlapping zeros ### Eliminate overlapping zeros
@ -655,12 +659,14 @@ sub build_segments_recurse
if ($level == $depth) if ($level == $depth)
{ {
push @segments, push @segments,
{ header => $header . ", leaf: ${path}xx", {
header => $header . ", leaf: ${path}xx",
label => $label, label => $label,
level => $level, level => $level,
depth => $depth, depth => $depth,
path => $path, path => $path,
values => $map }; values => $map
};
} }
else else
{ {
@ -678,12 +684,14 @@ sub build_segments_recurse
} }
push @segments, push @segments,
{ header => $header . ", byte #$level: ${path}xx", {
header => $header . ", byte #$level: ${path}xx",
label => $label, label => $label,
level => $level, level => $level,
depth => $depth, depth => $depth,
path => $path, path => $path,
values => \%children }; values => \%children
};
} }
return @segments; return @segments;
} }
@ -776,7 +784,8 @@ sub make_charmap_combined
code => $c->{code}, code => $c->{code},
comment => $c->{comment}, comment => $c->{comment},
f => $c->{f}, f => $c->{f},
l => $c->{l} }; l => $c->{l}
};
push @combined, $entry; push @combined, $entry;
} }
} }

View File

@ -73,8 +73,10 @@ sub run_check
create_files(); create_files();
command_ok( command_ok(
[ 'pg_archivecleanup', '-x', '.gz', $tempdir, [
$walfiles[2] . $suffix ], 'pg_archivecleanup', '-x', '.gz', $tempdir,
$walfiles[2] . $suffix
],
"$test_name: runs"); "$test_name: runs");
ok(!-f "$tempdir/$walfiles[0]", ok(!-f "$tempdir/$walfiles[0]",

View File

@ -159,8 +159,10 @@ isnt(slurp_file("$tempdir/backup/backup_label"),
rmtree("$tempdir/backup"); rmtree("$tempdir/backup");
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', "$tempdir/backup2", '--waldir', [
"$tempdir/xlog2" ], 'pg_basebackup', '-D', "$tempdir/backup2", '--waldir',
"$tempdir/xlog2"
],
'separate xlog directory'); 'separate xlog directory');
ok(-f "$tempdir/backup2/PG_VERSION", 'backup was created'); ok(-f "$tempdir/backup2/PG_VERSION", 'backup was created');
ok(-d "$tempdir/xlog2/", 'xlog directory was created'); ok(-d "$tempdir/xlog2/", 'xlog directory was created');
@ -179,8 +181,10 @@ $node->command_fails(
[ 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp', "-T/foo=" ], [ 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp', "-T/foo=" ],
'-T with empty new directory fails'); '-T with empty new directory fails');
$node->command_fails( $node->command_fails(
[ 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp', [
"-T/foo=/bar=/baz" ], 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp',
"-T/foo=/bar=/baz"
],
'-T with multiple = fails'); '-T with multiple = fails');
$node->command_fails( $node->command_fails(
[ 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp', "-Tfoo=/bar" ], [ 'pg_basebackup', '-D', "$tempdir/backup_foo", '-Fp', "-Tfoo=/bar" ],
@ -279,8 +283,10 @@ SKIP:
'plain format with tablespaces fails without tablespace mapping'); 'plain format with tablespaces fails without tablespace mapping');
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', "$tempdir/backup1", '-Fp', [
"-T$shorter_tempdir/tblspc1=$tempdir/tbackup/tblspc1" ], 'pg_basebackup', '-D', "$tempdir/backup1", '-Fp',
"-T$shorter_tempdir/tblspc1=$tempdir/tbackup/tblspc1"
],
'plain format with tablespaces succeeds with tablespace mapping'); 'plain format with tablespaces succeeds with tablespace mapping');
ok(-d "$tempdir/tbackup/tblspc1", 'tablespace was relocated'); ok(-d "$tempdir/tbackup/tblspc1", 'tablespace was relocated');
opendir(my $dh, "$pgdata/pg_tblspc") or die; opendir(my $dh, "$pgdata/pg_tblspc") or die;
@ -330,8 +336,10 @@ SKIP:
$node->safe_psql('postgres', $node->safe_psql('postgres',
"CREATE TABLESPACE tblspc2 LOCATION '$shorter_tempdir/tbl=spc2';"); "CREATE TABLESPACE tblspc2 LOCATION '$shorter_tempdir/tbl=spc2';");
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', "$tempdir/backup3", '-Fp', [
"-T$shorter_tempdir/tbl\\=spc2=$tempdir/tbackup/tbl\\=spc2" ], 'pg_basebackup', '-D', "$tempdir/backup3", '-Fp',
"-T$shorter_tempdir/tbl\\=spc2=$tempdir/tbackup/tbl\\=spc2"
],
'mapping tablespace with = sign in path'); 'mapping tablespace with = sign in path');
ok(-d "$tempdir/tbackup/tbl=spc2", ok(-d "$tempdir/tbackup/tbl=spc2",
'tablespace with = sign was relocated'); 'tablespace with = sign was relocated');
@ -389,17 +397,21 @@ $node->command_ok(
ok(-f "$tempdir/backupxst/pg_wal.tar", "tar file was created"); ok(-f "$tempdir/backupxst/pg_wal.tar", "tar file was created");
rmtree("$tempdir/backupxst"); rmtree("$tempdir/backupxst");
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', [
'pg_basebackup', '-D',
"$tempdir/backupnoslot", '-X', "$tempdir/backupnoslot", '-X',
'stream', '--no-slot' ], 'stream', '--no-slot'
],
'pg_basebackup -X stream runs with --no-slot'); 'pg_basebackup -X stream runs with --no-slot');
rmtree("$tempdir/backupnoslot"); rmtree("$tempdir/backupnoslot");
$node->command_fails( $node->command_fails(
[ 'pg_basebackup', '-D', [
'pg_basebackup', '-D',
"$tempdir/backupxs_sl_fail", '-X', "$tempdir/backupxs_sl_fail", '-X',
'stream', '-S', 'stream', '-S',
'slot0' ], 'slot0'
],
'pg_basebackup fails with nonexistent replication slot'); 'pg_basebackup fails with nonexistent replication slot');
$node->command_fails( $node->command_fails(
@ -407,10 +419,12 @@ $node->command_fails(
'pg_basebackup -C fails without slot name'); 'pg_basebackup -C fails without slot name');
$node->command_fails( $node->command_fails(
[ 'pg_basebackup', '-D', [
'pg_basebackup', '-D',
"$tempdir/backupxs_slot", '-C', "$tempdir/backupxs_slot", '-C',
'-S', 'slot0', '-S', 'slot0',
'--no-slot' ], '--no-slot'
],
'pg_basebackup fails with -C -S --no-slot'); 'pg_basebackup fails with -C -S --no-slot');
$node->command_ok( $node->command_ok(
@ -446,8 +460,10 @@ $node->command_fails(
[ 'pg_basebackup', '-D', "$tempdir/fail", '-S', 'slot1', '-X', 'none' ], [ 'pg_basebackup', '-D', "$tempdir/fail", '-S', 'slot1', '-X', 'none' ],
'pg_basebackup with replication slot fails without WAL streaming'); 'pg_basebackup with replication slot fails without WAL streaming');
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', "$tempdir/backupxs_sl", '-X', [
'stream', '-S', 'slot1' ], 'pg_basebackup', '-D', "$tempdir/backupxs_sl", '-X',
'stream', '-S', 'slot1'
],
'pg_basebackup -X stream with replication slot runs'); 'pg_basebackup -X stream with replication slot runs');
$lsn = $node->safe_psql('postgres', $lsn = $node->safe_psql('postgres',
q{SELECT restart_lsn FROM pg_replication_slots WHERE slot_name = 'slot1'} q{SELECT restart_lsn FROM pg_replication_slots WHERE slot_name = 'slot1'}
@ -456,8 +472,10 @@ like($lsn, qr!^0/[0-9A-Z]{7,8}$!, 'restart LSN of slot has advanced');
rmtree("$tempdir/backupxs_sl"); rmtree("$tempdir/backupxs_sl");
$node->command_ok( $node->command_ok(
[ 'pg_basebackup', '-D', "$tempdir/backupxs_sl_R", '-X', [
'stream', '-S', 'slot1', '-R' ], 'pg_basebackup', '-D', "$tempdir/backupxs_sl_R", '-X',
'stream', '-S', 'slot1', '-R'
],
'pg_basebackup with replication slot and -R runs'); 'pg_basebackup with replication slot and -R runs');
like( like(
slurp_file("$tempdir/backupxs_sl_R/recovery.conf"), slurp_file("$tempdir/backupxs_sl_R/recovery.conf"),

View File

@ -57,8 +57,10 @@ $primary->psql('postgres',
# Stream up to the given position. # Stream up to the given position.
$primary->command_ok( $primary->command_ok(
[ 'pg_receivewal', '-D', $stream_dir, '--verbose', [
'--endpos', $nextlsn, '--synchronous', '--no-loop' ], 'pg_receivewal', '-D', $stream_dir, '--verbose',
'--endpos', $nextlsn, '--synchronous', '--no-loop'
],
'streaming some WAL with --synchronous'); 'streaming some WAL with --synchronous');
# Permissions on WAL files should be default # Permissions on WAL files should be default

View File

@ -29,15 +29,19 @@ $node->command_fails([ 'pg_recvlogical', '-S', 'test' ],
$node->command_fails([ 'pg_recvlogical', '-S', 'test', '-d', 'postgres' ], $node->command_fails([ 'pg_recvlogical', '-S', 'test', '-d', 'postgres' ],
'pg_recvlogical needs an action'); 'pg_recvlogical needs an action');
$node->command_fails( $node->command_fails(
[ 'pg_recvlogical', '-S', [
'pg_recvlogical', '-S',
'test', '-d', 'test', '-d',
$node->connstr('postgres'), '--start' ], $node->connstr('postgres'), '--start'
],
'no destination file'); 'no destination file');
$node->command_ok( $node->command_ok(
[ 'pg_recvlogical', '-S', [
'pg_recvlogical', '-S',
'test', '-d', 'test', '-d',
$node->connstr('postgres'), '--create-slot' ], $node->connstr('postgres'), '--create-slot'
],
'slot created'); 'slot created');
my $slot = $node->slot('test'); my $slot = $node->slot('test');
@ -51,6 +55,8 @@ my $nextlsn =
chomp($nextlsn); chomp($nextlsn);
$node->command_ok( $node->command_ok(
[ 'pg_recvlogical', '-S', 'test', '-d', $node->connstr('postgres'), [
'--start', '--endpos', "$nextlsn", '--no-loop', '-f', '-' ], 'pg_recvlogical', '-S', 'test', '-d', $node->connstr('postgres'),
'--start', '--endpos', "$nextlsn", '--no-loop', '-f', '-'
],
'replayed a transaction'); 'replayed a transaction');

View File

@ -33,7 +33,9 @@ close $fh;
command_checks_all( command_checks_all(
[ 'pg_controldata', $node->data_dir ], [ 'pg_controldata', $node->data_dir ],
0, 0,
[ qr/WARNING: Calculated CRC checksum does not match value stored in file/, [
qr/WARNING: invalid WAL segment size/ ], qr/WARNING: Calculated CRC checksum does not match value stored in file/,
qr/WARNING: invalid WAL segment size/
],
[qr/^$/], [qr/^$/],
'pg_controldata with corrupted pg_control'); 'pg_controldata with corrupted pg_control');

View File

@ -36,7 +36,8 @@ else
close $conf; close $conf;
my $ctlcmd = [ my $ctlcmd = [
'pg_ctl', 'start', '-D', "$tempdir/data", '-l', 'pg_ctl', 'start', '-D', "$tempdir/data", '-l',
"$TestLib::log_path/001_start_stop_server.log" ]; "$TestLib::log_path/001_start_stop_server.log"
];
if ($Config{osname} ne 'msys') if ($Config{osname} ne 'msys')
{ {
command_like($ctlcmd, qr/done.*server started/s, 'pg_ctl start'); command_like($ctlcmd, qr/done.*server started/s, 'pg_ctl start');

File diff suppressed because it is too large Load Diff

View File

@ -34,9 +34,11 @@ $node->init(extra => [ '--locale=C', '--encoding=LATIN1' ]);
# prep pg_hba.conf and pg_ident.conf # prep pg_hba.conf and pg_ident.conf
$node->run_log( $node->run_log(
[ $ENV{PG_REGRESS}, '--config-auth', [
$ENV{PG_REGRESS}, '--config-auth',
$node->data_dir, '--create-role', $node->data_dir, '--create-role',
"$dbname1,$dbname2,$dbname3,$dbname4" ]); "$dbname1,$dbname2,$dbname3,$dbname4"
]);
$node->start; $node->start;
my $backupdir = $node->backup_dir; my $backupdir = $node->backup_dir;
@ -54,24 +56,32 @@ foreach my $dbname ($dbname1, $dbname2, $dbname3, $dbname4, 'CamelCase')
# For these tests, pg_dumpall -r is used because it produces a short # For these tests, pg_dumpall -r is used because it produces a short
# dump. # dump.
$node->command_ok( $node->command_ok(
[ 'pg_dumpall', '-r', '-f', $discard, '--dbname', [
'pg_dumpall', '-r', '-f', $discard, '--dbname',
$node->connstr($dbname1), $node->connstr($dbname1),
'-U', $dbname4 ], '-U', $dbname4
],
'pg_dumpall with long ASCII name 1'); 'pg_dumpall with long ASCII name 1');
$node->command_ok( $node->command_ok(
[ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname', [
'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
$node->connstr($dbname2), $node->connstr($dbname2),
'-U', $dbname3 ], '-U', $dbname3
],
'pg_dumpall with long ASCII name 2'); 'pg_dumpall with long ASCII name 2');
$node->command_ok( $node->command_ok(
[ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname', [
'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
$node->connstr($dbname3), $node->connstr($dbname3),
'-U', $dbname2 ], '-U', $dbname2
],
'pg_dumpall with long ASCII name 3'); 'pg_dumpall with long ASCII name 3');
$node->command_ok( $node->command_ok(
[ 'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname', [
'pg_dumpall', '--no-sync', '-r', '-f', $discard, '--dbname',
$node->connstr($dbname4), $node->connstr($dbname4),
'-U', $dbname1 ], '-U', $dbname1
],
'pg_dumpall with long ASCII name 4'); 'pg_dumpall with long ASCII name 4');
$node->command_ok( $node->command_ok(
[ 'pg_dumpall', '--no-sync', '-r', '-l', 'dbname=template1' ], [ 'pg_dumpall', '--no-sync', '-r', '-l', 'dbname=template1' ],
@ -91,8 +101,10 @@ $node->safe_psql($dbname1, 'CREATE TABLE t0()');
# XXX no printed message when this fails, just SIGPIPE termination # XXX no printed message when this fails, just SIGPIPE termination
$node->command_ok( $node->command_ok(
[ 'pg_dump', '-Fd', '--no-sync', '-j2', '-f', $dirfmt, '-U', $dbname1, [
$node->connstr($dbname1) ], 'pg_dump', '-Fd', '--no-sync', '-j2', '-f', $dirfmt, '-U', $dbname1,
$node->connstr($dbname1)
],
'parallel dump'); 'parallel dump');
# recreate $dbname1 for restore test # recreate $dbname1 for restore test
@ -106,9 +118,11 @@ $node->command_ok(
$node->run_log([ 'dropdb', $dbname1 ]); $node->run_log([ 'dropdb', $dbname1 ]);
$node->command_ok( $node->command_ok(
[ 'pg_restore', '-C', '-v', '-d', [
'pg_restore', '-C', '-v', '-d',
'template1', '-j2', '-U', $dbname1, 'template1', '-j2', '-U', $dbname1,
$dirfmt ], $dirfmt
],
'parallel restore with create'); 'parallel restore with create');
@ -127,9 +141,11 @@ my $envar_node = get_new_node('destination_envar');
$envar_node->init( $envar_node->init(
extra => [ '-U', $bootstrap_super, '--locale=C', '--encoding=LATIN1' ]); extra => [ '-U', $bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
$envar_node->run_log( $envar_node->run_log(
[ $ENV{PG_REGRESS}, '--config-auth', [
$ENV{PG_REGRESS}, '--config-auth',
$envar_node->data_dir, '--create-role', $envar_node->data_dir, '--create-role',
"$bootstrap_super,$restore_super" ]); "$bootstrap_super,$restore_super"
]);
$envar_node->start; $envar_node->start;
# make superuser for restore # make superuser for restore
@ -157,16 +173,20 @@ my $cmdline_node = get_new_node('destination_cmdline');
$cmdline_node->init( $cmdline_node->init(
extra => [ '-U', $bootstrap_super, '--locale=C', '--encoding=LATIN1' ]); extra => [ '-U', $bootstrap_super, '--locale=C', '--encoding=LATIN1' ]);
$cmdline_node->run_log( $cmdline_node->run_log(
[ $ENV{PG_REGRESS}, '--config-auth', [
$ENV{PG_REGRESS}, '--config-auth',
$cmdline_node->data_dir, '--create-role', $cmdline_node->data_dir, '--create-role',
"$bootstrap_super,$restore_super" ]); "$bootstrap_super,$restore_super"
]);
$cmdline_node->start; $cmdline_node->start;
$cmdline_node->run_log( $cmdline_node->run_log(
[ 'createuser', '-U', $bootstrap_super, '-s', $restore_super ]); [ 'createuser', '-U', $bootstrap_super, '-s', $restore_super ]);
{ {
$result = run_log( $result = run_log(
[ 'psql', '-p', $cmdline_node->port, '-U', [
$restore_super, '-X', '-f', $plain ], 'psql', '-p', $cmdline_node->port, '-U',
$restore_super, '-X', '-f', $plain
],
'2>', '2>',
\$stderr); \$stderr);
} }

View File

@ -31,7 +31,8 @@ command_checks_all(
[ 'pg_resetwal', '-n', $node->data_dir ], [ 'pg_resetwal', '-n', $node->data_dir ],
0, 0,
[qr/pg_control version number/], [qr/pg_control version number/],
[ qr/pg_resetwal: pg_control exists but is broken or wrong version; ignoring it/ [
qr/pg_resetwal: pg_control exists but is broken or wrong version; ignoring it/
], ],
'processes corrupted pg_control all zeroes'); 'processes corrupted pg_control all zeroes');
@ -46,6 +47,7 @@ command_checks_all(
[ 'pg_resetwal', '-n', $node->data_dir ], [ 'pg_resetwal', '-n', $node->data_dir ],
0, 0,
[qr/pg_control version number/], [qr/pg_control version number/],
[ qr/\Qpg_resetwal: pg_control specifies invalid WAL segment size (0 bytes); proceed with caution\E/ [
qr/\Qpg_resetwal: pg_control specifies invalid WAL segment size (0 bytes); proceed with caution\E/
], ],
'processes zero WAL segment size'); 'processes zero WAL segment size');

View File

@ -92,7 +92,8 @@ sub check_query
my $result = run [ my $result = run [
'psql', '-q', '-A', '-t', '--no-psqlrc', '-d', 'psql', '-q', '-A', '-t', '--no-psqlrc', '-d',
$node_master->connstr('postgres'), $node_master->connstr('postgres'),
'-c', $query ], '-c', $query
],
'>', \$stdout, '2>', \$stderr; '>', \$stdout, '2>', \$stderr;
# We don't use ok() for the exit code and stderr, because we want this # We don't use ok() for the exit code and stderr, because we want this
@ -214,10 +215,12 @@ sub run_pg_rewind
# Stop the master and be ready to perform the rewind # Stop the master and be ready to perform the rewind
$node_standby->stop; $node_standby->stop;
command_ok( command_ok(
[ 'pg_rewind', [
'pg_rewind',
"--debug", "--debug",
"--source-pgdata=$standby_pgdata", "--source-pgdata=$standby_pgdata",
"--target-pgdata=$master_pgdata" ], "--target-pgdata=$master_pgdata"
],
'pg_rewind local'); 'pg_rewind local');
} }
elsif ($test_mode eq "remote") elsif ($test_mode eq "remote")
@ -225,9 +228,11 @@ sub run_pg_rewind
# Do rewind using a remote connection as source # Do rewind using a remote connection as source
command_ok( command_ok(
[ 'pg_rewind', "--debug", [
'pg_rewind', "--debug",
"--source-server", $standby_connstr, "--source-server", $standby_connstr,
"--target-pgdata=$master_pgdata" ], "--target-pgdata=$master_pgdata"
],
'pg_rewind remote'); 'pg_rewind remote');
} }
else else

View File

@ -66,7 +66,8 @@ sub run_test
@paths = sort @paths; @paths = sort @paths;
is_deeply( is_deeply(
\@paths, \@paths,
[ "$test_master_datadir/tst_both_dir", [
"$test_master_datadir/tst_both_dir",
"$test_master_datadir/tst_both_dir/both_file1", "$test_master_datadir/tst_both_dir/both_file1",
"$test_master_datadir/tst_both_dir/both_file2", "$test_master_datadir/tst_both_dir/both_file2",
"$test_master_datadir/tst_both_dir/both_subdir", "$test_master_datadir/tst_both_dir/both_subdir",

View File

@ -59,8 +59,10 @@ pgbench(
[qr{processed: 125/125}], [qr{processed: 125/125}],
[qr{^$}], [qr{^$}],
'concurrency OID generation', 'concurrency OID generation',
{ '001_pgbench_concurrent_oid_generation' => {
'INSERT INTO oid_tbl SELECT FROM generate_series(1,1000);' }); '001_pgbench_concurrent_oid_generation' =>
'INSERT INTO oid_tbl SELECT FROM generate_series(1,1000);'
});
# cleanup # cleanup
$node->safe_psql('postgres', 'DROP TABLE oid_tbl;'); $node->safe_psql('postgres', 'DROP TABLE oid_tbl;');
@ -70,8 +72,10 @@ pgbench(
'no-such-database', 'no-such-database',
1, 1,
[qr{^$}], [qr{^$}],
[ qr{connection to database "no-such-database" failed}, [
qr{FATAL: database "no-such-database" does not exist} ], qr{connection to database "no-such-database" failed},
qr{FATAL: database "no-such-database" does not exist}
],
'no such database'); 'no such database');
pgbench( pgbench(
@ -83,8 +87,10 @@ pgbench(
pgbench( pgbench(
'-i', 0, '-i', 0,
[qr{^$}], [qr{^$}],
[ qr{creating tables}, qr{vacuuming}, [
qr{creating primary keys}, qr{done\.} ], qr{creating tables}, qr{vacuuming},
qr{creating primary keys}, qr{done\.}
],
'pgbench scale 1 initialization',); 'pgbench scale 1 initialization',);
# Again, with all possible options # Again, with all possible options
@ -92,12 +98,14 @@ pgbench(
'--initialize --init-steps=dtpvg --scale=1 --unlogged-tables --fillfactor=98 --foreign-keys --quiet --tablespace=pg_default --index-tablespace=pg_default', '--initialize --init-steps=dtpvg --scale=1 --unlogged-tables --fillfactor=98 --foreign-keys --quiet --tablespace=pg_default --index-tablespace=pg_default',
0, 0,
[qr{^$}i], [qr{^$}i],
[ qr{dropping old tables}, [
qr{dropping old tables},
qr{creating tables}, qr{creating tables},
qr{vacuuming}, qr{vacuuming},
qr{creating primary keys}, qr{creating primary keys},
qr{creating foreign keys}, qr{creating foreign keys},
qr{done\.} ], qr{done\.}
],
'pgbench scale 1 initialization'); 'pgbench scale 1 initialization');
# Test interaction of --init-steps with legacy step-selection options # Test interaction of --init-steps with legacy step-selection options
@ -105,12 +113,14 @@ pgbench(
'--initialize --init-steps=dtpvgvv --no-vacuum --foreign-keys --unlogged-tables', '--initialize --init-steps=dtpvgvv --no-vacuum --foreign-keys --unlogged-tables',
0, 0,
[qr{^$}], [qr{^$}],
[ qr{dropping old tables}, [
qr{dropping old tables},
qr{creating tables}, qr{creating tables},
qr{creating primary keys}, qr{creating primary keys},
qr{.* of .* tuples \(.*\) done}, qr{.* of .* tuples \(.*\) done},
qr{creating foreign keys}, qr{creating foreign keys},
qr{done\.} ], qr{done\.}
],
'pgbench --init-steps'); 'pgbench --init-steps');
# Run all builtin scripts, for a few transactions each # Run all builtin scripts, for a few transactions each
@ -118,34 +128,42 @@ pgbench(
'--transactions=5 -Dfoo=bla --client=2 --protocol=simple --builtin=t' '--transactions=5 -Dfoo=bla --client=2 --protocol=simple --builtin=t'
. ' --connect -n -v -n', . ' --connect -n -v -n',
0, 0,
[ qr{builtin: TPC-B}, [
qr{builtin: TPC-B},
qr{clients: 2\b}, qr{clients: 2\b},
qr{processed: 10/10}, qr{processed: 10/10},
qr{mode: simple} ], qr{mode: simple}
],
[qr{^$}], [qr{^$}],
'pgbench tpcb-like'); 'pgbench tpcb-like');
pgbench( pgbench(
'--transactions=20 --client=5 -M extended --builtin=si -C --no-vacuum -s 1', '--transactions=20 --client=5 -M extended --builtin=si -C --no-vacuum -s 1',
0, 0,
[ qr{builtin: simple update}, [
qr{builtin: simple update},
qr{clients: 5\b}, qr{clients: 5\b},
qr{threads: 1\b}, qr{threads: 1\b},
qr{processed: 100/100}, qr{processed: 100/100},
qr{mode: extended} ], qr{mode: extended}
],
[qr{scale option ignored}], [qr{scale option ignored}],
'pgbench simple update'); 'pgbench simple update');
pgbench( pgbench(
'-t 100 -c 7 -M prepared -b se --debug', '-t 100 -c 7 -M prepared -b se --debug',
0, 0,
[ qr{builtin: select only}, [
qr{builtin: select only},
qr{clients: 7\b}, qr{clients: 7\b},
qr{threads: 1\b}, qr{threads: 1\b},
qr{processed: 700/700}, qr{processed: 700/700},
qr{mode: prepared} ], qr{mode: prepared}
[ qr{vacuum}, qr{client 0}, qr{client 1}, qr{sending}, ],
qr{receiving}, qr{executing} ], [
qr{vacuum}, qr{client 0}, qr{client 1}, qr{sending},
qr{receiving}, qr{executing}
],
'pgbench select only'); 'pgbench select only');
# check if threads are supported # check if threads are supported
@ -161,16 +179,19 @@ my $nthreads = 2;
pgbench( pgbench(
"-t 100 -c 1 -j $nthreads -M prepared -n", "-t 100 -c 1 -j $nthreads -M prepared -n",
0, 0,
[ qr{type: multiple scripts}, [
qr{type: multiple scripts},
qr{mode: prepared}, qr{mode: prepared},
qr{script 1: .*/001_pgbench_custom_script_1}, qr{script 1: .*/001_pgbench_custom_script_1},
qr{weight: 2}, qr{weight: 2},
qr{script 2: .*/001_pgbench_custom_script_2}, qr{script 2: .*/001_pgbench_custom_script_2},
qr{weight: 1}, qr{weight: 1},
qr{processed: 100/100} ], qr{processed: 100/100}
],
[qr{^$}], [qr{^$}],
'pgbench custom scripts', 'pgbench custom scripts',
{ '001_pgbench_custom_script_1@1' => q{-- select only {
'001_pgbench_custom_script_1@1' => q{-- select only
\set aid random(1, :scale * 100000) \set aid random(1, :scale * 100000)
SELECT abalance::INTEGER AS balance SELECT abalance::INTEGER AS balance
FROM pgbench_accounts FROM pgbench_accounts
@ -182,41 +203,50 @@ BEGIN;
-- cast are needed for typing under -M prepared -- cast are needed for typing under -M prepared
SELECT :foo::INT + :scale::INT * :client_id::INT AS bla; SELECT :foo::INT + :scale::INT * :client_id::INT AS bla;
COMMIT; COMMIT;
} }); }
});
pgbench( pgbench(
'-n -t 10 -c 1 -M simple', '-n -t 10 -c 1 -M simple',
0, 0,
[ qr{type: .*/001_pgbench_custom_script_3}, [
qr{type: .*/001_pgbench_custom_script_3},
qr{processed: 10/10}, qr{processed: 10/10},
qr{mode: simple} ], qr{mode: simple}
],
[qr{^$}], [qr{^$}],
'pgbench custom script', 'pgbench custom script',
{ '001_pgbench_custom_script_3' => q{-- select only variant {
'001_pgbench_custom_script_3' => q{-- select only variant
\set aid random(1, :scale * 100000) \set aid random(1, :scale * 100000)
BEGIN; BEGIN;
SELECT abalance::INTEGER AS balance SELECT abalance::INTEGER AS balance
FROM pgbench_accounts FROM pgbench_accounts
WHERE aid=:aid; WHERE aid=:aid;
COMMIT; COMMIT;
} }); }
});
pgbench( pgbench(
'-n -t 10 -c 2 -M extended', '-n -t 10 -c 2 -M extended',
0, 0,
[ qr{type: .*/001_pgbench_custom_script_4}, [
qr{type: .*/001_pgbench_custom_script_4},
qr{processed: 20/20}, qr{processed: 20/20},
qr{mode: extended} ], qr{mode: extended}
],
[qr{^$}], [qr{^$}],
'pgbench custom script', 'pgbench custom script',
{ '001_pgbench_custom_script_4' => q{-- select only variant {
'001_pgbench_custom_script_4' => q{-- select only variant
\set aid random(1, :scale * 100000) \set aid random(1, :scale * 100000)
BEGIN; BEGIN;
SELECT abalance::INTEGER AS balance SELECT abalance::INTEGER AS balance
FROM pgbench_accounts FROM pgbench_accounts
WHERE aid=:aid; WHERE aid=:aid;
COMMIT; COMMIT;
} }); }
});
# test expressions # test expressions
# command 1..3 and 23 depend on random seed which is used to call srandom. # command 1..3 and 23 depend on random seed which is used to call srandom.
@ -224,7 +254,8 @@ pgbench(
'--random-seed=5432 -t 1 -Dfoo=-10.1 -Dbla=false -Di=+3 -Dminint=-9223372036854775808 -Dn=null -Dt=t -Df=of -Dd=1.0', '--random-seed=5432 -t 1 -Dfoo=-10.1 -Dbla=false -Di=+3 -Dminint=-9223372036854775808 -Dn=null -Dt=t -Df=of -Dd=1.0',
0, 0,
[ qr{type: .*/001_pgbench_expressions}, qr{processed: 1/1} ], [ qr{type: .*/001_pgbench_expressions}, qr{processed: 1/1} ],
[ qr{setting random seed to 5432\b}, [
qr{setting random seed to 5432\b},
# After explicit seeding, the four * random checks (1-3,20) should be # After explicit seeding, the four * random checks (1-3,20) should be
# deterministic, but not necessarily portable. # deterministic, but not necessarily portable.
@ -289,7 +320,8 @@ pgbench(
qr{command=98.: int 5432\b}, # :random_seed qr{command=98.: int 5432\b}, # :random_seed
], ],
'pgbench expressions', 'pgbench expressions',
{ '001_pgbench_expressions' => q{-- integer functions {
'001_pgbench_expressions' => q{-- integer functions
\set i1 debug(random(10, 19)) \set i1 debug(random(10, 19))
\set i2 debug(random_exponential(100, 199, 10.0)) \set i2 debug(random_exponential(100, 199, 10.0))
\set i3 debug(random_gaussian(1000, 1999, 10.0)) \set i3 debug(random_gaussian(1000, 1999, 10.0))
@ -411,7 +443,8 @@ SELECT :v0, :v1, :v2, :v3;
\set sc debug(:scale) \set sc debug(:scale)
\set ci debug(:client_id) \set ci debug(:client_id)
\set rs debug(:random_seed) \set rs debug(:random_seed)
} }); }
});
# random determinism when seeded # random determinism when seeded
$node->safe_psql('postgres', $node->safe_psql('postgres',
@ -428,7 +461,8 @@ for my $i (1, 2)
[qr{processed: 1/1}], [qr{processed: 1/1}],
[qr{setting random seed to $seed\b}], [qr{setting random seed to $seed\b}],
"random seeded with $seed", "random seeded with $seed",
{ "001_pgbench_random_seed_$i" => q{-- test random functions {
"001_pgbench_random_seed_$i" => q{-- test random functions
\set ur random(1000, 1999) \set ur random(1000, 1999)
\set er random_exponential(2000, 2999, 2.0) \set er random_exponential(2000, 2999, 2.0)
\set gr random_gaussian(3000, 3999, 3.0) \set gr random_gaussian(3000, 3999, 3.0)
@ -438,7 +472,8 @@ INSERT INTO seeded_random(seed, rand, val) VALUES
(:random_seed, 'exponential', :er), (:random_seed, 'exponential', :er),
(:random_seed, 'gaussian', :gr), (:random_seed, 'gaussian', :gr),
(:random_seed, 'zipfian', :zr); (:random_seed, 'zipfian', :zr);
} }); }
});
} }
# check that all runs generated the same 4 values # check that all runs generated the same 4 values
@ -462,12 +497,15 @@ $node->safe_psql('postgres', 'DROP TABLE seeded_random;');
# backslash commands # backslash commands
pgbench( pgbench(
'-t 1', 0, '-t 1', 0,
[ qr{type: .*/001_pgbench_backslash_commands}, [
qr{type: .*/001_pgbench_backslash_commands},
qr{processed: 1/1}, qr{processed: 1/1},
qr{shell-echo-output} ], qr{shell-echo-output}
],
[qr{command=8.: int 2\b}], [qr{command=8.: int 2\b}],
'pgbench backslash commands', 'pgbench backslash commands',
{ '001_pgbench_backslash_commands' => q{-- run set {
'001_pgbench_backslash_commands' => q{-- run set
\set zero 0 \set zero 0
\set one 1.0 \set one 1.0
-- sleep -- sleep
@ -482,36 +520,48 @@ pgbench(
\set n debug(:two) \set n debug(:two)
-- shell -- shell
\shell echo shell-echo-output \shell echo shell-echo-output
} }); }
});
# trigger many expression errors # trigger many expression errors
my @errors = ( my @errors = (
# [ test name, script number, status, stderr match ] # [ test name, script number, status, stderr match ]
# SQL # SQL
[ 'sql syntax error', [
'sql syntax error',
0, 0,
[ qr{ERROR: syntax error}, [
qr{prepared statement .* does not exist} ], qr{ERROR: syntax error},
qr{prepared statement .* does not exist}
],
q{-- SQL syntax error q{-- SQL syntax error
SELECT 1 + ; SELECT 1 + ;
} ], }
[ 'sql too many args', 1, [qr{statement has too many arguments.*\b9\b}], ],
[
'sql too many args', 1, [qr{statement has too many arguments.*\b9\b}],
q{-- MAX_ARGS=10 for prepared q{-- MAX_ARGS=10 for prepared
\set i 0 \set i 0
SELECT LEAST(:i, :i, :i, :i, :i, :i, :i, :i, :i, :i, :i); SELECT LEAST(:i, :i, :i, :i, :i, :i, :i, :i, :i, :i, :i);
} ], }
],
# SHELL # SHELL
[ 'shell bad command', 0, [
[qr{\(shell\) .* meta-command failed}], q{\shell no-such-command} ], 'shell bad command', 0,
[ 'shell undefined variable', 0, [qr{\(shell\) .* meta-command failed}], q{\shell no-such-command}
],
[
'shell undefined variable', 0,
[qr{undefined variable ":nosuchvariable"}], [qr{undefined variable ":nosuchvariable"}],
q{-- undefined variable in shell q{-- undefined variable in shell
\shell echo ::foo :nosuchvariable \shell echo ::foo :nosuchvariable
} ], }
],
[ 'shell missing command', 1, [qr{missing command }], q{\shell} ], [ 'shell missing command', 1, [qr{missing command }], q{\shell} ],
[ 'shell too many args', 1, [qr{too many arguments in command "shell"}], [
'shell too many args', 1, [qr{too many arguments in command "shell"}],
q{-- 257 arguments to \shell q{-- 257 arguments to \shell
\shell echo \ \shell echo \
0 1 2 3 4 5 6 7 8 9 A B C D E F \ 0 1 2 3 4 5 6 7 8 9 A B C D E F \
@ -530,95 +580,154 @@ SELECT LEAST(:i, :i, :i, :i, :i, :i, :i, :i, :i, :i, :i);
0 1 2 3 4 5 6 7 8 9 A B C D E F \ 0 1 2 3 4 5 6 7 8 9 A B C D E F \
0 1 2 3 4 5 6 7 8 9 A B C D E F \ 0 1 2 3 4 5 6 7 8 9 A B C D E F \
0 1 2 3 4 5 6 7 8 9 A B C D E F 0 1 2 3 4 5 6 7 8 9 A B C D E F
} ], }
],
# SET # SET
[ 'set syntax error', 1, [
[qr{syntax error in command "set"}], q{\set i 1 +} ], 'set syntax error', 1,
[ 'set no such function', 1, [qr{syntax error in command "set"}], q{\set i 1 +}
[qr{unexpected function name}], q{\set i noSuchFunction()} ], ],
[ 'set invalid variable name', 0, [
[qr{invalid variable name}], q{\set . 1} ], 'set no such function', 1,
[ 'set int overflow', 0, [qr{unexpected function name}], q{\set i noSuchFunction()}
[qr{double to int overflow for 100}], q{\set i int(1E32)} ], ],
[
'set invalid variable name', 0,
[qr{invalid variable name}], q{\set . 1}
],
[
'set int overflow', 0,
[qr{double to int overflow for 100}], q{\set i int(1E32)}
],
[ 'set division by zero', 0, [qr{division by zero}], q{\set i 1/0} ], [ 'set division by zero', 0, [qr{division by zero}], q{\set i 1/0} ],
[ 'set bigint out of range', 0, [
[qr{bigint out of range}], q{\set i 9223372036854775808 / -1} ], 'set bigint out of range', 0,
[ 'set undefined variable', [qr{bigint out of range}], q{\set i 9223372036854775808 / -1}
],
[
'set undefined variable',
0, 0,
[qr{undefined variable "nosuchvariable"}], [qr{undefined variable "nosuchvariable"}],
q{\set i :nosuchvariable} ], q{\set i :nosuchvariable}
],
[ 'set unexpected char', 1, [qr{unexpected character .;.}], q{\set i ;} ], [ 'set unexpected char', 1, [qr{unexpected character .;.}], q{\set i ;} ],
[ 'set too many args', [
'set too many args',
0, 0,
[qr{too many function arguments}], [qr{too many function arguments}],
q{\set i least(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)} ], q{\set i least(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)}
[ 'set empty random range', 0, ],
[qr{empty range given to random}], q{\set i random(5,3)} ], [
[ 'set random range too large', 'set empty random range', 0,
[qr{empty range given to random}], q{\set i random(5,3)}
],
[
'set random range too large',
0, 0,
[qr{random range is too large}], [qr{random range is too large}],
q{\set i random(-9223372036854775808, 9223372036854775807)} ], q{\set i random(-9223372036854775808, 9223372036854775807)}
[ 'set gaussian param too small', ],
[
'set gaussian param too small',
0, 0,
[qr{gaussian param.* at least 2}], [qr{gaussian param.* at least 2}],
q{\set i random_gaussian(0, 10, 1.0)} ], q{\set i random_gaussian(0, 10, 1.0)}
[ 'set exponential param greater 0', ],
[
'set exponential param greater 0',
0, 0,
[qr{exponential parameter must be greater }], [qr{exponential parameter must be greater }],
q{\set i random_exponential(0, 10, 0.0)} ], q{\set i random_exponential(0, 10, 0.0)}
[ 'set zipfian param to 1', ],
[
'set zipfian param to 1',
0, 0,
[qr{zipfian parameter must be in range \(0, 1\) U \(1, \d+\]}], [qr{zipfian parameter must be in range \(0, 1\) U \(1, \d+\]}],
q{\set i random_zipfian(0, 10, 1)} ], q{\set i random_zipfian(0, 10, 1)}
[ 'set zipfian param too large', ],
[
'set zipfian param too large',
0, 0,
[qr{zipfian parameter must be in range \(0, 1\) U \(1, \d+\]}], [qr{zipfian parameter must be in range \(0, 1\) U \(1, \d+\]}],
q{\set i random_zipfian(0, 10, 1000000)} ], q{\set i random_zipfian(0, 10, 1000000)}
[ 'set non numeric value', 0, ],
[qr{malformed variable "foo" value: "bla"}], q{\set i :foo + 1} ], [
'set non numeric value', 0,
[qr{malformed variable "foo" value: "bla"}], q{\set i :foo + 1}
],
[ 'set no expression', 1, [qr{syntax error}], q{\set i} ], [ 'set no expression', 1, [qr{syntax error}], q{\set i} ],
[ 'set missing argument', 1, [qr{missing argument}i], q{\set} ], [ 'set missing argument', 1, [qr{missing argument}i], q{\set} ],
[ 'set not a bool', 0, [
[qr{cannot coerce double to boolean}], q{\set b NOT 0.0} ], 'set not a bool', 0,
[ 'set not an int', 0, [qr{cannot coerce double to boolean}], q{\set b NOT 0.0}
[qr{cannot coerce boolean to int}], q{\set i TRUE + 2} ], ],
[ 'set not a double', 0, [
[qr{cannot coerce boolean to double}], q{\set d ln(TRUE)} ], 'set not an int', 0,
[ 'set case error', [qr{cannot coerce boolean to int}], q{\set i TRUE + 2}
],
[
'set not a double', 0,
[qr{cannot coerce boolean to double}], q{\set d ln(TRUE)}
],
[
'set case error',
1, 1,
[qr{syntax error in command "set"}], [qr{syntax error in command "set"}],
q{\set i CASE TRUE THEN 1 ELSE 0 END} ], q{\set i CASE TRUE THEN 1 ELSE 0 END}
[ 'set random error', 0, ],
[qr{cannot coerce boolean to int}], q{\set b random(FALSE, TRUE)} ], [
[ 'set number of args mismatch', 1, 'set random error', 0,
[qr{unexpected number of arguments}], q{\set d ln(1.0, 2.0))} ], [qr{cannot coerce boolean to int}], q{\set b random(FALSE, TRUE)}
[ 'set at least one arg', 1, ],
[qr{at least one argument expected}], q{\set i greatest())} ], [
'set number of args mismatch', 1,
[qr{unexpected number of arguments}], q{\set d ln(1.0, 2.0))}
],
[
'set at least one arg', 1,
[qr{at least one argument expected}], q{\set i greatest())}
],
# SETSHELL # SETSHELL
[ 'setshell not an int', 0, [
[qr{command must return an integer}], q{\setshell i echo -n one} ], 'setshell not an int', 0,
[qr{command must return an integer}], q{\setshell i echo -n one}
],
[ 'setshell missing arg', 1, [qr{missing argument }], q{\setshell var} ], [ 'setshell missing arg', 1, [qr{missing argument }], q{\setshell var} ],
[ 'setshell no such command', 0, [
[qr{could not read result }], q{\setshell var no-such-command} ], 'setshell no such command', 0,
[qr{could not read result }], q{\setshell var no-such-command}
],
# SLEEP # SLEEP
[ 'sleep undefined variable', 0, [
[qr{sleep: undefined variable}], q{\sleep :nosuchvariable} ], 'sleep undefined variable', 0,
[ 'sleep too many args', 1, [qr{sleep: undefined variable}], q{\sleep :nosuchvariable}
[qr{too many arguments}], q{\sleep too many args} ], ],
[ 'sleep missing arg', 1, [
[ qr{missing argument}, qr{\\sleep} ], q{\sleep} ], 'sleep too many args', 1,
[ 'sleep unknown unit', 1, [qr{too many arguments}], q{\sleep too many args}
[qr{unrecognized time unit}], q{\sleep 1 week} ], ],
[
'sleep missing arg', 1,
[ qr{missing argument}, qr{\\sleep} ], q{\sleep}
],
[
'sleep unknown unit', 1,
[qr{unrecognized time unit}], q{\sleep 1 week}
],
# MISC # MISC
[ 'misc invalid backslash command', 1, [
[qr{invalid command .* "nosuchcommand"}], q{\nosuchcommand} ], 'misc invalid backslash command', 1,
[qr{invalid command .* "nosuchcommand"}], q{\nosuchcommand}
],
[ 'misc empty script', 1, [qr{empty command list for script}], q{} ], [ 'misc empty script', 1, [qr{empty command list for script}], q{} ],
[ 'bad boolean', 0, [
[qr{malformed variable.*trueXXX}], q{\set b :badtrue or true} ],); 'bad boolean', 0,
[qr{malformed variable.*trueXXX}], q{\set b :badtrue or true}
],);
for my $e (@errors) for my $e (@errors)
@ -641,7 +750,8 @@ pgbench(
[ qr{processed: 1/1}, qr{zipfian cache array overflowed 1 time\(s\)} ], [ qr{processed: 1/1}, qr{zipfian cache array overflowed 1 time\(s\)} ],
[qr{^}], [qr{^}],
'pgbench zipfian array overflow on random_zipfian', 'pgbench zipfian array overflow on random_zipfian',
{ '001_pgbench_random_zipfian' => q{ {
'001_pgbench_random_zipfian' => q{
\set i random_zipfian(1, 100, 0.5) \set i random_zipfian(1, 100, 0.5)
\set i random_zipfian(2, 100, 0.5) \set i random_zipfian(2, 100, 0.5)
\set i random_zipfian(3, 100, 0.5) \set i random_zipfian(3, 100, 0.5)
@ -658,7 +768,8 @@ pgbench(
\set i random_zipfian(14, 100, 0.5) \set i random_zipfian(14, 100, 0.5)
\set i random_zipfian(15, 100, 0.5) \set i random_zipfian(15, 100, 0.5)
\set i random_zipfian(16, 100, 0.5) \set i random_zipfian(16, 100, 0.5)
} }); }
});
# throttling # throttling
pgbench( pgbench(
@ -673,9 +784,11 @@ pgbench(
# given the expected rate and the 2 ms tx duration, at most one is executed # given the expected rate and the 2 ms tx duration, at most one is executed
'-t 10 --rate=100000 --latency-limit=1 -n -r', '-t 10 --rate=100000 --latency-limit=1 -n -r',
0, 0,
[ qr{processed: [01]/10}, [
qr{processed: [01]/10},
qr{type: .*/001_pgbench_sleep}, qr{type: .*/001_pgbench_sleep},
qr{above the 1.0 ms latency limit: [01]/} ], qr{above the 1.0 ms latency limit: [01]/}
],
[qr{^$}i], [qr{^$}i],
'pgbench late throttling', 'pgbench late throttling',
{ '001_pgbench_sleep' => q{\sleep 2ms} }); { '001_pgbench_sleep' => q{\sleep 2ms} });

View File

@ -57,81 +57,126 @@ sub pgbench_scripts
my @options = ( my @options = (
# name, options, stderr checks # name, options, stderr checks
[ 'bad option', [
'bad option',
'-h home -p 5432 -U calvin -d --bad-option', '-h home -p 5432 -U calvin -d --bad-option',
[ qr{(unrecognized|illegal) option}, qr{--help.*more information} ] ], [ qr{(unrecognized|illegal) option}, qr{--help.*more information} ]
[ 'no file', ],
[
'no file',
'-f no-such-file', '-f no-such-file',
[qr{could not open file "no-such-file":}] ], [qr{could not open file "no-such-file":}]
[ 'no builtin', ],
[
'no builtin',
'-b no-such-builtin', '-b no-such-builtin',
[qr{no builtin script .* "no-such-builtin"}] ], [qr{no builtin script .* "no-such-builtin"}]
[ 'invalid weight', ],
[
'invalid weight',
'--builtin=select-only@one', '--builtin=select-only@one',
[qr{invalid weight specification: \@one}] ], [qr{invalid weight specification: \@one}]
[ 'invalid weight', ],
[
'invalid weight',
'-b select-only@-1', '-b select-only@-1',
[qr{weight spec.* out of range .*: -1}] ], [qr{weight spec.* out of range .*: -1}]
],
[ 'too many scripts', '-S ' x 129, [qr{at most 128 SQL scripts}] ], [ 'too many scripts', '-S ' x 129, [qr{at most 128 SQL scripts}] ],
[ 'bad #clients', '-c three', [qr{invalid number of clients: "three"}] ], [ 'bad #clients', '-c three', [qr{invalid number of clients: "three"}] ],
[ 'bad #threads', '-j eleven', [qr{invalid number of threads: "eleven"}] [
'bad #threads', '-j eleven', [qr{invalid number of threads: "eleven"}]
], ],
[ 'bad scale', '-i -s two', [qr{invalid scaling factor: "two"}] ], [ 'bad scale', '-i -s two', [qr{invalid scaling factor: "two"}] ],
[ 'invalid #transactions', [
'invalid #transactions',
'-t zil', '-t zil',
[qr{invalid number of transactions: "zil"}] ], [qr{invalid number of transactions: "zil"}]
],
[ 'invalid duration', '-T ten', [qr{invalid duration: "ten"}] ], [ 'invalid duration', '-T ten', [qr{invalid duration: "ten"}] ],
[ '-t XOR -T', [
'-t XOR -T',
'-N -l --aggregate-interval=5 --log-prefix=notused -t 1000 -T 1', '-N -l --aggregate-interval=5 --log-prefix=notused -t 1000 -T 1',
[qr{specify either }] ], [qr{specify either }]
[ '-T XOR -t', ],
[
'-T XOR -t',
'-P 1 --progress-timestamp -l --sampling-rate=0.001 -T 10 -t 1000', '-P 1 --progress-timestamp -l --sampling-rate=0.001 -T 10 -t 1000',
[qr{specify either }] ], [qr{specify either }]
],
[ 'bad variable', '--define foobla', [qr{invalid variable definition}] ], [ 'bad variable', '--define foobla', [qr{invalid variable definition}] ],
[ 'invalid fillfactor', '-F 1', [qr{invalid fillfactor}] ], [ 'invalid fillfactor', '-F 1', [qr{invalid fillfactor}] ],
[ 'invalid query mode', '-M no-such-mode', [qr{invalid query mode}] ], [ 'invalid query mode', '-M no-such-mode', [qr{invalid query mode}] ],
[ 'invalid progress', '--progress=0', [
[qr{invalid thread progress delay}] ], 'invalid progress', '--progress=0',
[qr{invalid thread progress delay}]
],
[ 'invalid rate', '--rate=0.0', [qr{invalid rate limit}] ], [ 'invalid rate', '--rate=0.0', [qr{invalid rate limit}] ],
[ 'invalid latency', '--latency-limit=0.0', [qr{invalid latency limit}] ], [ 'invalid latency', '--latency-limit=0.0', [qr{invalid latency limit}] ],
[ 'invalid sampling rate', '--sampling-rate=0', [
[qr{invalid sampling rate}] ], 'invalid sampling rate', '--sampling-rate=0',
[ 'invalid aggregate interval', '--aggregate-interval=-3', [qr{invalid sampling rate}]
[qr{invalid .* seconds for}] ], ],
[ 'weight zero', [
'invalid aggregate interval', '--aggregate-interval=-3',
[qr{invalid .* seconds for}]
],
[
'weight zero',
'-b se@0 -b si@0 -b tpcb@0', '-b se@0 -b si@0 -b tpcb@0',
[qr{weight must not be zero}] ], [qr{weight must not be zero}]
],
[ 'init vs run', '-i -S', [qr{cannot be used in initialization}] ], [ 'init vs run', '-i -S', [qr{cannot be used in initialization}] ],
[ 'run vs init', '-S -F 90', [qr{cannot be used in benchmarking}] ], [ 'run vs init', '-S -F 90', [qr{cannot be used in benchmarking}] ],
[ 'ambiguous builtin', '-b s', [qr{ambiguous}] ], [ 'ambiguous builtin', '-b s', [qr{ambiguous}] ],
[ '--progress-timestamp => --progress', '--progress-timestamp', [
[qr{allowed only under}] ], '--progress-timestamp => --progress', '--progress-timestamp',
[ '-I without init option', [qr{allowed only under}]
],
[
'-I without init option',
'-I dtg', '-I dtg',
[qr{cannot be used in benchmarking mode}] ], [qr{cannot be used in benchmarking mode}]
[ 'invalid init step', ],
[
'invalid init step',
'-i -I dta', '-i -I dta',
[ qr{unrecognized initialization step}, qr{allowed steps are} ] ], [ qr{unrecognized initialization step}, qr{allowed steps are} ]
[ 'bad random seed', ],
[
'bad random seed',
'--random-seed=one', '--random-seed=one',
[ qr{unrecognized random seed option "one": expecting an unsigned integer, "time" or "rand"}, [
qr{error while setting random seed from --random-seed option} ] ], qr{unrecognized random seed option "one": expecting an unsigned integer, "time" or "rand"},
qr{error while setting random seed from --random-seed option}
]
],
# loging sub-options # loging sub-options
[ 'sampling => log', '--sampling-rate=0.01', [
[qr{log sampling .* only when}] ], 'sampling => log', '--sampling-rate=0.01',
[ 'sampling XOR aggregate', [qr{log sampling .* only when}]
],
[
'sampling XOR aggregate',
'-l --sampling-rate=0.1 --aggregate-interval=3', '-l --sampling-rate=0.1 --aggregate-interval=3',
[qr{sampling .* aggregation .* cannot be used at the same time}] ], [qr{sampling .* aggregation .* cannot be used at the same time}]
[ 'aggregate => log', '--aggregate-interval=3', ],
[qr{aggregation .* only when}] ], [
'aggregate => log', '--aggregate-interval=3',
[qr{aggregation .* only when}]
],
[ 'log-prefix => log', '--log-prefix=x', [qr{prefix .* only when}] ], [ 'log-prefix => log', '--log-prefix=x', [qr{prefix .* only when}] ],
[ 'duration & aggregation', [
'duration & aggregation',
'-l -T 1 --aggregate-interval=3', '-l -T 1 --aggregate-interval=3',
[qr{aggr.* not be higher}] ], [qr{aggr.* not be higher}]
[ 'duration % aggregation', ],
[
'duration % aggregation',
'-l -T 5 --aggregate-interval=3', '-l -T 5 --aggregate-interval=3',
[qr{multiple}] ],); [qr{multiple}]
],);
for my $o (@options) for my $o (@options)
{ {
@ -143,11 +188,13 @@ for my $o (@options)
# Help # Help
pgbench( pgbench(
'--help', 0, '--help', 0,
[ qr{benchmarking tool for PostgreSQL}, [
qr{benchmarking tool for PostgreSQL},
qr{Usage}, qr{Usage},
qr{Initialization options:}, qr{Initialization options:},
qr{Common options:}, qr{Common options:},
qr{Report bugs to} ], qr{Report bugs to}
],
[qr{^$}], [qr{^$}],
'pgbench help'); 'pgbench help');
@ -159,43 +206,65 @@ pgbench(
'-b list', '-b list',
0, 0,
[qr{^$}], [qr{^$}],
[ qr{Available builtin scripts:}, qr{tpcb-like}, [
qr{simple-update}, qr{select-only} ], qr{Available builtin scripts:}, qr{tpcb-like},
qr{simple-update}, qr{select-only}
],
'pgbench builtin list'); 'pgbench builtin list');
my @script_tests = ( my @script_tests = (
# name, err, { file => contents } # name, err, { file => contents }
[ 'missing endif', [
'missing endif',
[qr{\\if without matching \\endif}], [qr{\\if without matching \\endif}],
{ 'if-noendif.sql' => '\if 1' } ], { 'if-noendif.sql' => '\if 1' }
[ 'missing if on elif', ],
[
'missing if on elif',
[qr{\\elif without matching \\if}], [qr{\\elif without matching \\if}],
{ 'elif-noif.sql' => '\elif 1' } ], { 'elif-noif.sql' => '\elif 1' }
[ 'missing if on else', ],
[
'missing if on else',
[qr{\\else without matching \\if}], [qr{\\else without matching \\if}],
{ 'else-noif.sql' => '\else' } ], { 'else-noif.sql' => '\else' }
[ 'missing if on endif', ],
[
'missing if on endif',
[qr{\\endif without matching \\if}], [qr{\\endif without matching \\if}],
{ 'endif-noif.sql' => '\endif' } ], { 'endif-noif.sql' => '\endif' }
[ 'elif after else', ],
[
'elif after else',
[qr{\\elif after \\else}], [qr{\\elif after \\else}],
{ 'else-elif.sql' => "\\if 1\n\\else\n\\elif 0\n\\endif" } ], { 'else-elif.sql' => "\\if 1\n\\else\n\\elif 0\n\\endif" }
[ 'else after else', ],
[
'else after else',
[qr{\\else after \\else}], [qr{\\else after \\else}],
{ 'else-else.sql' => "\\if 1\n\\else\n\\else\n\\endif" } ], { 'else-else.sql' => "\\if 1\n\\else\n\\else\n\\endif" }
[ 'if syntax error', ],
[
'if syntax error',
[qr{syntax error in command "if"}], [qr{syntax error in command "if"}],
{ 'if-bad.sql' => "\\if\n\\endif\n" } ], { 'if-bad.sql' => "\\if\n\\endif\n" }
[ 'elif syntax error', ],
[
'elif syntax error',
[qr{syntax error in command "elif"}], [qr{syntax error in command "elif"}],
{ 'elif-bad.sql' => "\\if 0\n\\elif +\n\\endif\n" } ], { 'elif-bad.sql' => "\\if 0\n\\elif +\n\\endif\n" }
[ 'else syntax error', ],
[
'else syntax error',
[qr{unexpected argument in command "else"}], [qr{unexpected argument in command "else"}],
{ 'else-bad.sql' => "\\if 0\n\\else BAD\n\\endif\n" } ], { 'else-bad.sql' => "\\if 0\n\\else BAD\n\\endif\n" }
[ 'endif syntax error', ],
[
'endif syntax error',
[qr{unexpected argument in command "endif"}], [qr{unexpected argument in command "endif"}],
{ 'endif-bad.sql' => "\\if 0\n\\endif BAD\n" } ],); { 'endif-bad.sql' => "\\if 0\n\\endif BAD\n" }
],);
for my $t (@script_tests) for my $t (@script_tests)
{ {

View File

@ -149,7 +149,8 @@ foreach my $file (sort readdir DIR)
cmddesc => $cmddesc, cmddesc => $cmddesc,
cmdsynopsis => $cmdsynopsis, cmdsynopsis => $cmdsynopsis,
params => \@params, params => \@params,
nl_count => $nl_count }; nl_count => $nl_count
};
$maxlen = $maxlen =
($maxlen >= length $cmdname) ? $maxlen : length $cmdname; ($maxlen >= length $cmdname) ? $maxlen : length $cmdname;
} }

View File

@ -161,7 +161,8 @@ sub test_access
'SELECT 1', 'SELECT 1',
extra_params => [ extra_params => [
'-d', $node->connstr('postgres') . ' host=localhost', '-d', $node->connstr('postgres') . ' host=localhost',
'-U', $role ]); '-U', $role
]);
is($res, $expected_res, $test_name); is($res, $expected_res, $test_name);
} }

View File

@ -43,12 +43,16 @@ my %pgdump_runs = (
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', 'pg_dump', '--no-sync',
"--file=$tempdir/binary_upgrade.sql", '--schema-only', "--file=$tempdir/binary_upgrade.sql", '--schema-only',
'--binary-upgrade', '--dbname=postgres', ], }, '--binary-upgrade', '--dbname=postgres',
],
},
clean => { clean => {
dump_cmd => [ dump_cmd => [
'pg_dump', "--file=$tempdir/clean.sql", 'pg_dump', "--file=$tempdir/clean.sql",
'-c', '--no-sync', '-c', '--no-sync',
'--dbname=postgres', ], }, '--dbname=postgres',
],
},
clean_if_exists => { clean_if_exists => {
dump_cmd => [ dump_cmd => [
'pg_dump', 'pg_dump',
@ -57,7 +61,9 @@ my %pgdump_runs = (
'-c', '-c',
'--if-exists', '--if-exists',
'--encoding=UTF8', # no-op, just tests that option is accepted '--encoding=UTF8', # no-op, just tests that option is accepted
'postgres', ], }, 'postgres',
],
},
createdb => { createdb => {
dump_cmd => [ dump_cmd => [
'pg_dump', 'pg_dump',
@ -65,7 +71,9 @@ my %pgdump_runs = (
"--file=$tempdir/createdb.sql", "--file=$tempdir/createdb.sql",
'-C', '-C',
'-R', # no-op, just for testing '-R', # no-op, just for testing
'postgres', ], }, 'postgres',
],
},
data_only => { data_only => {
dump_cmd => [ dump_cmd => [
'pg_dump', 'pg_dump',
@ -73,7 +81,9 @@ my %pgdump_runs = (
"--file=$tempdir/data_only.sql", "--file=$tempdir/data_only.sql",
'-a', '-a',
'-v', # no-op, just make sure it works '-v', # no-op, just make sure it works
'postgres', ], }, 'postgres',
],
},
defaults => { defaults => {
dump_cmd => [ 'pg_dump', '-f', "$tempdir/defaults.sql", 'postgres', ], dump_cmd => [ 'pg_dump', '-f', "$tempdir/defaults.sql", 'postgres', ],
}, },
@ -81,70 +91,96 @@ my %pgdump_runs = (
test_key => 'defaults', test_key => 'defaults',
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', '-Fc', '-Z6', 'pg_dump', '--no-sync', '-Fc', '-Z6',
"--file=$tempdir/defaults_custom_format.dump", 'postgres', ], "--file=$tempdir/defaults_custom_format.dump", 'postgres',
],
restore_cmd => [ restore_cmd => [
'pg_restore', 'pg_restore',
"--file=$tempdir/defaults_custom_format.sql", "--file=$tempdir/defaults_custom_format.sql",
"$tempdir/defaults_custom_format.dump", ], }, "$tempdir/defaults_custom_format.dump",
],
},
defaults_dir_format => { defaults_dir_format => {
test_key => 'defaults', test_key => 'defaults',
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', '-Fd', 'pg_dump', '--no-sync', '-Fd',
"--file=$tempdir/defaults_dir_format", 'postgres', ], "--file=$tempdir/defaults_dir_format", 'postgres',
],
restore_cmd => [ restore_cmd => [
'pg_restore', 'pg_restore',
"--file=$tempdir/defaults_dir_format.sql", "--file=$tempdir/defaults_dir_format.sql",
"$tempdir/defaults_dir_format", ], }, "$tempdir/defaults_dir_format",
],
},
defaults_parallel => { defaults_parallel => {
test_key => 'defaults', test_key => 'defaults',
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', '-Fd', '-j2', 'pg_dump', '--no-sync', '-Fd', '-j2',
"--file=$tempdir/defaults_parallel", 'postgres', ], "--file=$tempdir/defaults_parallel", 'postgres',
],
restore_cmd => [ restore_cmd => [
'pg_restore', 'pg_restore',
"--file=$tempdir/defaults_parallel.sql", "--file=$tempdir/defaults_parallel.sql",
"$tempdir/defaults_parallel", ], }, "$tempdir/defaults_parallel",
],
},
defaults_tar_format => { defaults_tar_format => {
test_key => 'defaults', test_key => 'defaults',
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', '-Ft', 'pg_dump', '--no-sync', '-Ft',
"--file=$tempdir/defaults_tar_format.tar", 'postgres', ], "--file=$tempdir/defaults_tar_format.tar", 'postgres',
],
restore_cmd => [ restore_cmd => [
'pg_restore', 'pg_restore',
"--file=$tempdir/defaults_tar_format.sql", "--file=$tempdir/defaults_tar_format.sql",
"$tempdir/defaults_tar_format.tar", ], }, "$tempdir/defaults_tar_format.tar",
],
},
pg_dumpall_globals => { pg_dumpall_globals => {
dump_cmd => [ dump_cmd => [
'pg_dumpall', '--no-sync', 'pg_dumpall', '--no-sync',
"--file=$tempdir/pg_dumpall_globals.sql", '-g', ], }, "--file=$tempdir/pg_dumpall_globals.sql", '-g',
],
},
no_privs => { no_privs => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', 'pg_dump', '--no-sync',
"--file=$tempdir/no_privs.sql", '-x', "--file=$tempdir/no_privs.sql", '-x',
'postgres', ], }, 'postgres',
],
},
no_owner => { no_owner => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', 'pg_dump', '--no-sync',
"--file=$tempdir/no_owner.sql", '-O', "--file=$tempdir/no_owner.sql", '-O',
'postgres', ], }, 'postgres',
],
},
schema_only => { schema_only => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', "--file=$tempdir/schema_only.sql", 'pg_dump', '--no-sync', "--file=$tempdir/schema_only.sql",
'-s', 'postgres', ], }, '-s', 'postgres',
],
},
section_pre_data => { section_pre_data => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', 'pg_dump', '--no-sync',
"--file=$tempdir/section_pre_data.sql", '--section=pre-data', "--file=$tempdir/section_pre_data.sql", '--section=pre-data',
'postgres', ], }, 'postgres',
],
},
section_data => { section_data => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', 'pg_dump', '--no-sync',
"--file=$tempdir/section_data.sql", '--section=data', "--file=$tempdir/section_data.sql", '--section=data',
'postgres', ], }, 'postgres',
],
},
section_post_data => { section_post_data => {
dump_cmd => [ dump_cmd => [
'pg_dump', '--no-sync', "--file=$tempdir/section_post_data.sql", 'pg_dump', '--no-sync', "--file=$tempdir/section_post_data.sql",
'--section=post-data', 'postgres', ], },); '--section=post-data', 'postgres',
],
},);
############################################################### ###############################################################
# Definition of the tests to run. # Definition of the tests to run.
@ -196,7 +232,8 @@ my %tests = (
\n\s+\Qcol1 integer NOT NULL,\E \n\s+\Qcol1 integer NOT NULL,\E
\n\s+\Qcol2 integer\E \n\s+\Qcol2 integer\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE EXTENSION test_pg_dump' => { 'CREATE EXTENSION test_pg_dump' => {
create_order => 2, create_order => 2,
@ -207,14 +244,17 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, section_pre_data => 1,
unlike => { binary_upgrade => 1, }, }, },
unlike => { binary_upgrade => 1, },
},
'CREATE ROLE regress_dump_test_role' => { 'CREATE ROLE regress_dump_test_role' => {
create_order => 1, create_order => 1,
create_sql => 'CREATE ROLE regress_dump_test_role;', create_sql => 'CREATE ROLE regress_dump_test_role;',
regexp => qr/^CREATE ROLE regress_dump_test_role;\n/m, regexp => qr/^CREATE ROLE regress_dump_test_role;\n/m,
like => { pg_dumpall_globals => 1, }, }, like => { pg_dumpall_globals => 1, },
},
'CREATE SEQUENCE regress_pg_dump_table_col1_seq' => { 'CREATE SEQUENCE regress_pg_dump_table_col1_seq' => {
regexp => qr/^ regexp => qr/^
@ -226,7 +266,8 @@ my %tests = (
\n\s+\QNO MAXVALUE\E \n\s+\QNO MAXVALUE\E
\n\s+\QCACHE 1;\E \n\s+\QCACHE 1;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE TABLE regress_pg_dump_table_added' => { 'CREATE TABLE regress_pg_dump_table_added' => {
create_order => 7, create_order => 7,
@ -237,7 +278,8 @@ my %tests = (
\n\s+\Qcol1 integer NOT NULL,\E \n\s+\Qcol1 integer NOT NULL,\E
\n\s+\Qcol2 integer\E \n\s+\Qcol2 integer\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE SEQUENCE regress_pg_dump_seq' => { 'CREATE SEQUENCE regress_pg_dump_seq' => {
regexp => qr/^ regexp => qr/^
@ -248,7 +290,8 @@ my %tests = (
\n\s+\QNO MAXVALUE\E \n\s+\QNO MAXVALUE\E
\n\s+\QCACHE 1;\E \n\s+\QCACHE 1;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'SETVAL SEQUENCE regress_seq_dumpable' => { 'SETVAL SEQUENCE regress_seq_dumpable' => {
create_order => 6, create_order => 6,
@ -259,7 +302,9 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
data_only => 1, data_only => 1,
section_data => 1, }, }, section_data => 1,
},
},
'CREATE TABLE regress_pg_dump_table' => { 'CREATE TABLE regress_pg_dump_table' => {
regexp => qr/^ regexp => qr/^
@ -267,13 +312,15 @@ my %tests = (
\n\s+\Qcol1 integer NOT NULL,\E \n\s+\Qcol1 integer NOT NULL,\E
\n\s+\Qcol2 integer\E \n\s+\Qcol2 integer\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE ACCESS METHOD regress_test_am' => { 'CREATE ACCESS METHOD regress_test_am' => {
regexp => qr/^ regexp => qr/^
\QCREATE ACCESS METHOD regress_test_am TYPE INDEX HANDLER bthandler;\E \QCREATE ACCESS METHOD regress_test_am TYPE INDEX HANDLER bthandler;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'COMMENT ON EXTENSION test_pg_dump' => { 'COMMENT ON EXTENSION test_pg_dump' => {
regexp => qr/^ regexp => qr/^
@ -283,7 +330,9 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, }, section_pre_data => 1,
},
},
'GRANT SELECT regress_pg_dump_table_added pre-ALTER EXTENSION' => { 'GRANT SELECT regress_pg_dump_table_added pre-ALTER EXTENSION' => {
create_order => 8, create_order => 8,
@ -292,7 +341,8 @@ my %tests = (
regexp => qr/^ regexp => qr/^
\QGRANT SELECT ON TABLE public.regress_pg_dump_table_added TO regress_dump_test_role;\E \QGRANT SELECT ON TABLE public.regress_pg_dump_table_added TO regress_dump_test_role;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'REVOKE SELECT regress_pg_dump_table_added post-ALTER EXTENSION' => { 'REVOKE SELECT regress_pg_dump_table_added post-ALTER EXTENSION' => {
create_order => 10, create_order => 10,
@ -304,8 +354,10 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, section_pre_data => 1,
unlike => { no_privs => 1, }, }, },
unlike => { no_privs => 1, },
},
'GRANT SELECT ON TABLE regress_pg_dump_table' => { 'GRANT SELECT ON TABLE regress_pg_dump_table' => {
regexp => qr/^ regexp => qr/^
@ -313,7 +365,8 @@ my %tests = (
\QGRANT SELECT ON TABLE public.regress_pg_dump_table TO regress_dump_test_role;\E\n \QGRANT SELECT ON TABLE public.regress_pg_dump_table TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT SELECT(col1) ON regress_pg_dump_table' => { 'GRANT SELECT(col1) ON regress_pg_dump_table' => {
regexp => qr/^ regexp => qr/^
@ -321,7 +374,8 @@ my %tests = (
\QGRANT SELECT(col1) ON TABLE public.regress_pg_dump_table TO PUBLIC;\E\n \QGRANT SELECT(col1) ON TABLE public.regress_pg_dump_table TO PUBLIC;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT SELECT(col2) ON regress_pg_dump_table TO regress_dump_test_role' 'GRANT SELECT(col2) ON regress_pg_dump_table TO regress_dump_test_role'
=> { => {
@ -334,8 +388,10 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, section_pre_data => 1,
unlike => { no_privs => 1, }, }, },
unlike => { no_privs => 1, },
},
'GRANT USAGE ON regress_pg_dump_table_col1_seq TO regress_dump_test_role' 'GRANT USAGE ON regress_pg_dump_table_col1_seq TO regress_dump_test_role'
=> { => {
@ -348,14 +404,17 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, section_pre_data => 1,
unlike => { no_privs => 1, }, }, },
unlike => { no_privs => 1, },
},
'GRANT USAGE ON regress_pg_dump_seq TO regress_dump_test_role' => { 'GRANT USAGE ON regress_pg_dump_seq TO regress_dump_test_role' => {
regexp => qr/^ regexp => qr/^
\QGRANT USAGE ON SEQUENCE public.regress_pg_dump_seq TO regress_dump_test_role;\E \QGRANT USAGE ON SEQUENCE public.regress_pg_dump_seq TO regress_dump_test_role;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'REVOKE SELECT(col1) ON regress_pg_dump_table' => { 'REVOKE SELECT(col1) ON regress_pg_dump_table' => {
create_order => 3, create_order => 3,
@ -367,8 +426,10 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, section_pre_data => 1,
unlike => { no_privs => 1, }, }, },
unlike => { no_privs => 1, },
},
# Objects included in extension part of a schema created by this extension */ # Objects included in extension part of a schema created by this extension */
'CREATE TABLE regress_pg_dump_schema.test_table' => { 'CREATE TABLE regress_pg_dump_schema.test_table' => {
@ -377,7 +438,8 @@ my %tests = (
\n\s+\Qcol1 integer,\E \n\s+\Qcol1 integer,\E
\n\s+\Qcol2 integer\E \n\s+\Qcol2 integer\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT SELECT ON regress_pg_dump_schema.test_table' => { 'GRANT SELECT ON regress_pg_dump_schema.test_table' => {
regexp => qr/^ regexp => qr/^
@ -385,7 +447,8 @@ my %tests = (
\QGRANT SELECT ON TABLE regress_pg_dump_schema.test_table TO regress_dump_test_role;\E\n \QGRANT SELECT ON TABLE regress_pg_dump_schema.test_table TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE SEQUENCE regress_pg_dump_schema.test_seq' => { 'CREATE SEQUENCE regress_pg_dump_schema.test_seq' => {
regexp => qr/^ regexp => qr/^
@ -396,7 +459,8 @@ my %tests = (
\n\s+\QNO MAXVALUE\E \n\s+\QNO MAXVALUE\E
\n\s+\QCACHE 1;\E \n\s+\QCACHE 1;\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT USAGE ON regress_pg_dump_schema.test_seq' => { 'GRANT USAGE ON regress_pg_dump_schema.test_seq' => {
regexp => qr/^ regexp => qr/^
@ -404,14 +468,16 @@ my %tests = (
\QGRANT USAGE ON SEQUENCE regress_pg_dump_schema.test_seq TO regress_dump_test_role;\E\n \QGRANT USAGE ON SEQUENCE regress_pg_dump_schema.test_seq TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE TYPE regress_pg_dump_schema.test_type' => { 'CREATE TYPE regress_pg_dump_schema.test_type' => {
regexp => qr/^ regexp => qr/^
\QCREATE TYPE regress_pg_dump_schema.test_type AS (\E \QCREATE TYPE regress_pg_dump_schema.test_type AS (\E
\n\s+\Qcol1 integer\E \n\s+\Qcol1 integer\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT USAGE ON regress_pg_dump_schema.test_type' => { 'GRANT USAGE ON regress_pg_dump_schema.test_type' => {
regexp => qr/^ regexp => qr/^
@ -419,14 +485,16 @@ my %tests = (
\QGRANT ALL ON TYPE regress_pg_dump_schema.test_type TO regress_dump_test_role;\E\n \QGRANT ALL ON TYPE regress_pg_dump_schema.test_type TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE FUNCTION regress_pg_dump_schema.test_func' => { 'CREATE FUNCTION regress_pg_dump_schema.test_func' => {
regexp => qr/^ regexp => qr/^
\QCREATE FUNCTION regress_pg_dump_schema.test_func() RETURNS integer\E \QCREATE FUNCTION regress_pg_dump_schema.test_func() RETURNS integer\E
\n\s+\QLANGUAGE sql\E \n\s+\QLANGUAGE sql\E
\n/xm, \n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT ALL ON regress_pg_dump_schema.test_func' => { 'GRANT ALL ON regress_pg_dump_schema.test_func' => {
regexp => qr/^ regexp => qr/^
@ -434,7 +502,8 @@ my %tests = (
\QGRANT ALL ON FUNCTION regress_pg_dump_schema.test_func() TO regress_dump_test_role;\E\n \QGRANT ALL ON FUNCTION regress_pg_dump_schema.test_func() TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'CREATE AGGREGATE regress_pg_dump_schema.test_agg' => { 'CREATE AGGREGATE regress_pg_dump_schema.test_agg' => {
regexp => qr/^ regexp => qr/^
@ -442,7 +511,8 @@ my %tests = (
\n\s+\QSFUNC = int2_sum,\E \n\s+\QSFUNC = int2_sum,\E
\n\s+\QSTYPE = bigint\E \n\s+\QSTYPE = bigint\E
\n\);\n/xm, \n\);\n/xm,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
'GRANT ALL ON regress_pg_dump_schema.test_agg' => { 'GRANT ALL ON regress_pg_dump_schema.test_agg' => {
regexp => qr/^ regexp => qr/^
@ -450,7 +520,8 @@ my %tests = (
\QGRANT ALL ON FUNCTION regress_pg_dump_schema.test_agg(smallint) TO regress_dump_test_role;\E\n \QGRANT ALL ON FUNCTION regress_pg_dump_schema.test_agg(smallint) TO regress_dump_test_role;\E\n
\QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E \QSELECT pg_catalog.binary_upgrade_set_record_init_privs(false);\E
\n/xms, \n/xms,
like => { binary_upgrade => 1, }, }, like => { binary_upgrade => 1, },
},
# Objects not included in extension, part of schema created by extension # Objects not included in extension, part of schema created by extension
'CREATE TABLE regress_pg_dump_schema.external_tab' => { 'CREATE TABLE regress_pg_dump_schema.external_tab' => {
@ -464,7 +535,9 @@ my %tests = (
like => { like => {
%full_runs, %full_runs,
schema_only => 1, schema_only => 1,
section_pre_data => 1, }, },); section_pre_data => 1,
},
},);
######################################### #########################################
# Create a PG instance to test actually dumping from # Create a PG instance to test actually dumping from

View File

@ -155,7 +155,8 @@ sub new
_host => $pghost, _host => $pghost,
_basedir => "$TestLib::tmp_check/t_${testname}_${name}_data", _basedir => "$TestLib::tmp_check/t_${testname}_${name}_data",
_name => $name, _name => $name,
_logfile => "$TestLib::log_path/${testname}_${name}.log" }; _logfile => "$TestLib::log_path/${testname}_${name}.log"
};
bless $self, $class; bless $self, $class;
mkdir $self->{_basedir} mkdir $self->{_basedir}

View File

@ -256,7 +256,8 @@ sub check_mode_recursive
my $result = 1; my $result = 1;
find( find(
{ follow_fast => 1, {
follow_fast => 1,
wanted => sub { wanted => sub {
my $file_stat = stat($File::Find::name); my $file_stat = stat($File::Find::name);
@ -322,7 +323,8 @@ sub chmod_recursive
my ($dir, $dir_mode, $file_mode) = @_; my ($dir, $dir_mode, $file_mode) = @_;
find( find(
{ follow_fast => 1, {
follow_fast => 1,
wanted => sub { wanted => sub {
my $file_stat = stat($File::Find::name); my $file_stat = stat($File::Find::name);

View File

@ -112,8 +112,10 @@ SKIP:
skip "Test fails on Windows perl", 2 if $Config{osname} eq 'MSWin32'; skip "Test fails on Windows perl", 2 if $Config{osname} eq 'MSWin32';
my $pg_recvlogical = IPC::Run::start( my $pg_recvlogical = IPC::Run::start(
[ 'pg_recvlogical', '-d', $node_master->connstr('otherdb'), [
'-S', 'otherdb_slot', '-f', '-', '--start' ]); 'pg_recvlogical', '-d', $node_master->connstr('otherdb'),
'-S', 'otherdb_slot', '-f', '-', '--start'
]);
$node_master->poll_query_until('otherdb', $node_master->poll_query_until('otherdb',
"SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'otherdb_slot' AND active_pid IS NOT NULL)" "SELECT EXISTS (SELECT 1 FROM pg_replication_slots WHERE slot_name = 'otherdb_slot' AND active_pid IS NOT NULL)"
) or die "slot never became active"; ) or die "slot never became active";

View File

@ -29,8 +29,10 @@ my ($stdin, $stdout, $stderr) = ('', '', '');
# an xact to be in-progress when we crash and we need to know # an xact to be in-progress when we crash and we need to know
# its xid. # its xid.
my $tx = IPC::Run::start( my $tx = IPC::Run::start(
[ 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d', [
$node->connstr('postgres') ], 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d',
$node->connstr('postgres')
],
'<', '<',
\$stdin, \$stdin,
'>', '>',

View File

@ -39,8 +39,10 @@ $node->safe_psql(
# Run psql, keeping session alive, so we have an alive backend to kill. # Run psql, keeping session alive, so we have an alive backend to kill.
my ($killme_stdin, $killme_stdout, $killme_stderr) = ('', '', ''); my ($killme_stdin, $killme_stdout, $killme_stderr) = ('', '', '');
my $killme = IPC::Run::start( my $killme = IPC::Run::start(
[ 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d', [
$node->connstr('postgres') ], 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d',
$node->connstr('postgres')
],
'<', '<',
\$killme_stdin, \$killme_stdin,
'>', '>',
@ -52,8 +54,10 @@ my $killme = IPC::Run::start(
# Need a second psql to check if crash-restart happened. # Need a second psql to check if crash-restart happened.
my ($monitor_stdin, $monitor_stdout, $monitor_stderr) = ('', '', ''); my ($monitor_stdin, $monitor_stdout, $monitor_stderr) = ('', '', '');
my $monitor = IPC::Run::start( my $monitor = IPC::Run::start(
[ 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d', [
$node->connstr('postgres') ], 'psql', '-X', '-qAt', '-v', 'ON_ERROR_STOP=1', '-f', '-', '-d',
$node->connstr('postgres')
],
'<', '<',
\$monitor_stdin, \$monitor_stdin,
'>', '>',

View File

@ -43,7 +43,8 @@ sub test_connect_ok
my $cmd = [ my $cmd = [
'psql', '-X', '-A', '-t', '-c', 'psql', '-X', '-A', '-t', '-c',
"SELECT \$\$connected with $connstr\$\$", "SELECT \$\$connected with $connstr\$\$",
'-d', "$common_connstr $connstr" ]; '-d', "$common_connstr $connstr"
];
command_ok($cmd, $test_name); command_ok($cmd, $test_name);
} }
@ -55,7 +56,8 @@ sub test_connect_fails
my $cmd = [ my $cmd = [
'psql', '-X', '-A', '-t', '-c', 'psql', '-X', '-A', '-t', '-c',
"SELECT \$\$connected with $connstr\$\$", "SELECT \$\$connected with $connstr\$\$",
'-d', "$common_connstr $connstr" ]; '-d', "$common_connstr $connstr"
];
command_fails_like($cmd, $expected_stderr, $test_name); command_fails_like($cmd, $expected_stderr, $test_name);
} }

View File

@ -317,7 +317,8 @@ sub push_commit
'message' => $c->{'message'}, 'message' => $c->{'message'},
'commit' => $c->{'commit'}, 'commit' => $c->{'commit'},
'commits' => [], 'commits' => [],
'timestamp' => $ts }; 'timestamp' => $ts
};
push @{ $all_commits{$ht} }, $cc; push @{ $all_commits{$ht} }, $cc;
} }
@ -326,7 +327,8 @@ sub push_commit
'branch' => $c->{'branch'}, 'branch' => $c->{'branch'},
'commit' => $c->{'commit'}, 'commit' => $c->{'commit'},
'date' => $c->{'date'}, 'date' => $c->{'date'},
'last_tag' => $c->{'last_tag'} }; 'last_tag' => $c->{'last_tag'}
};
push @{ $cc->{'commits'} }, $smallc; push @{ $cc->{'commits'} }, $smallc;
push @{ $all_commits_by_branch{ $c->{'branch'} } }, $cc; push @{ $all_commits_by_branch{ $c->{'branch'} } }, $cc;
$cc->{'branch_position'}{ $c->{'branch'} } = $cc->{'branch_position'}{ $c->{'branch'} } =

View File

@ -95,7 +95,8 @@ sub Install
my @top_dir = ("src"); my @top_dir = ("src");
@top_dir = ("src\\bin", "src\\interfaces") if ($insttype eq "client"); @top_dir = ("src\\bin", "src\\interfaces") if ($insttype eq "client");
File::Find::find( File::Find::find(
{ wanted => sub { {
wanted => sub {
/^.*\.sample\z/s /^.*\.sample\z/s
&& push(@$sample_files, $File::Find::name); && push(@$sample_files, $File::Find::name);
@ -155,7 +156,8 @@ sub Install
push @pldirs, "src/pl/plpython" if $config->{python}; push @pldirs, "src/pl/plpython" if $config->{python};
push @pldirs, "src/pl/tcl" if $config->{tcl}; push @pldirs, "src/pl/tcl" if $config->{tcl};
File::Find::find( File::Find::find(
{ wanted => sub { {
wanted => sub {
/^(.*--.*\.sql|.*\.control)\z/s /^(.*--.*\.sql|.*\.control)\z/s
&& push(@$pl_extension_files, $File::Find::name); && push(@$pl_extension_files, $File::Find::name);
@ -686,7 +688,8 @@ sub GenerateNLSFiles
EnsureDirectories($target, "share/locale"); EnsureDirectories($target, "share/locale");
my @flist; my @flist;
File::Find::find( File::Find::find(
{ wanted => sub { {
wanted => sub {
/^nls\.mk\z/s /^nls\.mk\z/s
&& !push(@flist, $File::Find::name); && !push(@flist, $File::Find::name);
} }

View File

@ -65,17 +65,21 @@ EOF
$self->WriteItemDefinitionGroup( $self->WriteItemDefinitionGroup(
$f, 'Debug', $f, 'Debug',
{ defs => "_DEBUG;DEBUG=1", {
defs => "_DEBUG;DEBUG=1",
opt => 'Disabled', opt => 'Disabled',
strpool => 'false', strpool => 'false',
runtime => 'MultiThreadedDebugDLL' }); runtime => 'MultiThreadedDebugDLL'
});
$self->WriteItemDefinitionGroup( $self->WriteItemDefinitionGroup(
$f, $f,
'Release', 'Release',
{ defs => "", {
defs => "",
opt => 'Full', opt => 'Full',
strpool => 'true', strpool => 'true',
runtime => 'MultiThreadedDLL' }); runtime => 'MultiThreadedDLL'
});
} }
sub AddDefine sub AddDefine

View File

@ -39,7 +39,8 @@ my $contrib_extralibs = undef;
my $contrib_extraincludes = { 'dblink' => ['src/backend'] }; my $contrib_extraincludes = { 'dblink' => ['src/backend'] };
my $contrib_extrasource = { my $contrib_extrasource = {
'cube' => [ 'contrib/cube/cubescan.l', 'contrib/cube/cubeparse.y' ], 'cube' => [ 'contrib/cube/cubescan.l', 'contrib/cube/cubeparse.y' ],
'seg' => [ 'contrib/seg/segscan.l', 'contrib/seg/segparse.y' ], }; 'seg' => [ 'contrib/seg/segscan.l', 'contrib/seg/segparse.y' ],
};
my @contrib_excludes = ( my @contrib_excludes = (
'commit_ts', 'hstore_plperl', 'commit_ts', 'hstore_plperl',
'hstore_plpython', 'intagg', 'hstore_plpython', 'intagg',
@ -64,14 +65,17 @@ my $frontend_extralibs = {
'initdb' => ['ws2_32.lib'], 'initdb' => ['ws2_32.lib'],
'pg_restore' => ['ws2_32.lib'], 'pg_restore' => ['ws2_32.lib'],
'pgbench' => ['ws2_32.lib'], 'pgbench' => ['ws2_32.lib'],
'psql' => ['ws2_32.lib'] }; 'psql' => ['ws2_32.lib']
};
my $frontend_extraincludes = { my $frontend_extraincludes = {
'initdb' => ['src/timezone'], 'initdb' => ['src/timezone'],
'psql' => ['src/backend'] }; 'psql' => ['src/backend']
};
my $frontend_extrasource = { my $frontend_extrasource = {
'psql' => ['src/bin/psql/psqlscanslash.l'], 'psql' => ['src/bin/psql/psqlscanslash.l'],
'pgbench' => 'pgbench' =>
[ 'src/bin/pgbench/exprscan.l', 'src/bin/pgbench/exprparse.y' ] }; [ 'src/bin/pgbench/exprscan.l', 'src/bin/pgbench/exprparse.y' ]
};
my @frontend_excludes = ( my @frontend_excludes = (
'pgevent', 'pg_basebackup', 'pg_rewind', 'pg_dump', 'pgevent', 'pg_basebackup', 'pg_rewind', 'pg_dump',
'pg_waldump', 'scripts'); 'pg_waldump', 'scripts');

View File

@ -16,7 +16,8 @@ sub _new
my $good_types = { my $good_types = {
lib => 1, lib => 1,
exe => 1, exe => 1,
dll => 1, }; dll => 1,
};
confess("Bad project type: $type\n") unless exists $good_types->{$type}; confess("Bad project type: $type\n") unless exists $good_types->{$type};
my $self = { my $self = {
name => $name, name => $name,
@ -32,7 +33,8 @@ sub _new
solution => $solution, solution => $solution,
disablewarnings => '4018;4244;4273;4102;4090;4267', disablewarnings => '4018;4244;4273;4102;4090;4267',
disablelinkerwarnings => '', disablelinkerwarnings => '',
platform => $solution->{platform}, }; platform => $solution->{platform},
};
bless($self, $classname); bless($self, $classname);
return $self; return $self;

View File

@ -22,7 +22,8 @@ sub _new
VisualStudioVersion => undef, VisualStudioVersion => undef,
MinimumVisualStudioVersion => undef, MinimumVisualStudioVersion => undef,
vcver => undef, vcver => undef,
platform => undef, }; platform => undef,
};
bless($self, $classname); bless($self, $classname);
$self->DeterminePlatform(); $self->DeterminePlatform();

View File

@ -35,19 +35,23 @@ EOF
$self->WriteConfiguration( $self->WriteConfiguration(
$f, 'Debug', $f, 'Debug',
{ defs => "_DEBUG;DEBUG=1", {
defs => "_DEBUG;DEBUG=1",
wholeopt => 0, wholeopt => 0,
opt => 0, opt => 0,
strpool => 'false', strpool => 'false',
runtime => 3 }); runtime => 3
});
$self->WriteConfiguration( $self->WriteConfiguration(
$f, $f,
'Release', 'Release',
{ defs => "", {
defs => "",
wholeopt => 0, wholeopt => 0,
opt => 3, opt => 3,
strpool => 'true', strpool => 'true',
runtime => 2 }); runtime => 2
});
print $f <<EOF; print $f <<EOF;
</Configurations> </Configurations>
EOF EOF

View File

@ -11,5 +11,5 @@
--opening-brace-on-new-line --opening-brace-on-new-line
--output-line-ending=unix --output-line-ending=unix
--paren-tightness=2 --paren-tightness=2
--vertical-tightness=2 --paren-vertical-tightness=2
--vertical-tightness-closing=2 --paren-vertical-tightness-closing=2

View File

@ -389,7 +389,8 @@ sub build_clean
# get the list of files under code base, if it's set # get the list of files under code base, if it's set
File::Find::find( File::Find::find(
{ wanted => sub { {
wanted => sub {
my ($dev, $ino, $mode, $nlink, $uid, $gid); my ($dev, $ino, $mode, $nlink, $uid, $gid);
(($dev, $ino, $mode, $nlink, $uid, $gid) = lstat($_)) (($dev, $ino, $mode, $nlink, $uid, $gid) = lstat($_))
&& -f _ && -f _

View File

@ -47,9 +47,11 @@ foreach my $keyname (@subkeys)
die "Incomplete timezone data for $keyname!\n" die "Incomplete timezone data for $keyname!\n"
unless ($vals{Std} && $vals{Dlt} && $vals{Display}); unless ($vals{Std} && $vals{Dlt} && $vals{Display});
push @system_zones, push @system_zones,
{ 'std' => $vals{Std}->[2], {
'std' => $vals{Std}->[2],
'dlt' => $vals{Dlt}->[2], 'dlt' => $vals{Dlt}->[2],
'display' => clean_displayname($vals{Display}->[2]), }; 'display' => clean_displayname($vals{Display}->[2]),
};
} }
$basekey->Close(); $basekey->Close();
@ -75,10 +77,12 @@ while ($pgtz =~
m/{\s+"([^"]+)",\s+"([^"]+)",\s+"([^"]+)",?\s+},\s+\/\*(.+?)\*\//gs) m/{\s+"([^"]+)",\s+"([^"]+)",\s+"([^"]+)",?\s+},\s+\/\*(.+?)\*\//gs)
{ {
push @file_zones, push @file_zones,
{ 'std' => $1, {
'std' => $1,
'dlt' => $2, 'dlt' => $2,
'match' => $3, 'match' => $3,
'display' => clean_displayname($4), }; 'display' => clean_displayname($4),
};
} }
# #