summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/bin/pg_dump/meson.build1
-rw-r--r--src/bin/pg_dump/t/002_pg_dump.pl408
-rw-r--r--src/bin/pg_dump/t/006_pg_dump_compress.pl611
3 files changed, 612 insertions, 408 deletions
diff --git a/src/bin/pg_dump/meson.build b/src/bin/pg_dump/meson.build
index a2233b0a1b4..f3c669f484e 100644
--- a/src/bin/pg_dump/meson.build
+++ b/src/bin/pg_dump/meson.build
@@ -102,6 +102,7 @@ tests += {
't/003_pg_dump_with_server.pl',
't/004_pg_dump_parallel.pl',
't/005_pg_dump_filterfile.pl',
+ 't/006_pg_dump_compress.pl',
't/010_dump_connstr.pl',
],
},
diff --git a/src/bin/pg_dump/t/002_pg_dump.pl b/src/bin/pg_dump/t/002_pg_dump.pl
index 6be6888b977..b789cd2e863 100644
--- a/src/bin/pg_dump/t/002_pg_dump.pl
+++ b/src/bin/pg_dump/t/002_pg_dump.pl
@@ -20,22 +20,12 @@ my $tempdir = PostgreSQL::Test::Utils::tempdir;
# test_key indicates that a given run should simply use the same
# set of like/unlike tests as another run, and which run that is.
#
-# compile_option indicates if the commands run depend on a compilation
-# option, if any. This can be used to control if tests should be
-# skipped when a build dependency is not satisfied.
-#
# dump_cmd is the pg_dump command to run, which is an array of
# the full command and arguments to run. Note that this is run
# using $node->command_ok(), so the port does not need to be
# specified and is pulled from $PGPORT, which is set by the
# PostgreSQL::Test::Cluster system.
#
-# compress_cmd is the utility command for (de)compression, if any.
-# Note that this should generally be used on pg_dump's output
-# either to generate a text file to run the through the tests, or
-# to test pg_restore's ability to parse manually compressed files
-# that otherwise pg_dump does not compress on its own (e.g. *.toc).
-#
# glob_patterns is an optional array consisting of strings compilable
# with glob() to check the files generated after a dump.
#
@@ -55,8 +45,6 @@ my $tempdir = PostgreSQL::Test::Utils::tempdir;
my $supports_icu = ($ENV{with_icu} eq 'yes');
my $supports_gzip = check_pg_config("#define HAVE_LIBZ 1");
-my $supports_lz4 = check_pg_config("#define USE_LZ4 1");
-my $supports_zstd = check_pg_config("#define USE_ZSTD 1");
my %pgdump_runs = (
binary_upgrade => {
@@ -81,256 +69,6 @@ my %pgdump_runs = (
],
},
- # Do not use --no-sync to give test coverage for data sync.
- compression_gzip_custom => {
- test_key => 'compression',
- compile_option => 'gzip',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'custom',
- '--compress' => '1',
- '--file' => "$tempdir/compression_gzip_custom.dump",
- '--statistics',
- 'postgres',
- ],
- restore_cmd => [
- 'pg_restore',
- '--file' => "$tempdir/compression_gzip_custom.sql",
- '--statistics',
- "$tempdir/compression_gzip_custom.dump",
- ],
- command_like => {
- command => [
- 'pg_restore', '--list',
- "$tempdir/compression_gzip_custom.dump",
- ],
- expected => qr/Compression: gzip/,
- name => 'data content is gzip-compressed'
- },
- },
-
- # Do not use --no-sync to give test coverage for data sync.
- compression_gzip_dir => {
- test_key => 'compression',
- compile_option => 'gzip',
- dump_cmd => [
- 'pg_dump',
- '--jobs' => '2',
- '--format' => 'directory',
- '--compress' => 'gzip:1',
- '--file' => "$tempdir/compression_gzip_dir",
- '--statistics',
- 'postgres',
- ],
- # Give coverage for manually compressed blobs.toc files during
- # restore.
- compress_cmd => {
- program => $ENV{'GZIP_PROGRAM'},
- args => [ '-f', "$tempdir/compression_gzip_dir/blobs_*.toc", ],
- },
- # Verify that only data files were compressed
- glob_patterns => [
- "$tempdir/compression_gzip_dir/toc.dat",
- "$tempdir/compression_gzip_dir/*.dat.gz",
- ],
- restore_cmd => [
- 'pg_restore',
- '--jobs' => '2',
- '--file' => "$tempdir/compression_gzip_dir.sql",
- '--statistics',
- "$tempdir/compression_gzip_dir",
- ],
- },
-
- compression_gzip_plain => {
- test_key => 'compression',
- compile_option => 'gzip',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'plain',
- '--compress' => '1',
- '--file' => "$tempdir/compression_gzip_plain.sql.gz",
- '--statistics',
- 'postgres',
- ],
- # Decompress the generated file to run through the tests.
- compress_cmd => {
- program => $ENV{'GZIP_PROGRAM'},
- args => [ '-d', "$tempdir/compression_gzip_plain.sql.gz", ],
- },
- },
-
- # Do not use --no-sync to give test coverage for data sync.
- compression_lz4_custom => {
- test_key => 'compression',
- compile_option => 'lz4',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'custom',
- '--compress' => 'lz4',
- '--file' => "$tempdir/compression_lz4_custom.dump",
- '--statistics',
- 'postgres',
- ],
- restore_cmd => [
- 'pg_restore',
- '--file' => "$tempdir/compression_lz4_custom.sql",
- '--statistics',
- "$tempdir/compression_lz4_custom.dump",
- ],
- command_like => {
- command => [
- 'pg_restore', '--list',
- "$tempdir/compression_lz4_custom.dump",
- ],
- expected => qr/Compression: lz4/,
- name => 'data content is lz4 compressed'
- },
- },
-
- # Do not use --no-sync to give test coverage for data sync.
- compression_lz4_dir => {
- test_key => 'compression',
- compile_option => 'lz4',
- dump_cmd => [
- 'pg_dump',
- '--jobs' => '2',
- '--format' => 'directory',
- '--compress' => 'lz4:1',
- '--file' => "$tempdir/compression_lz4_dir",
- '--statistics',
- 'postgres',
- ],
- # Give coverage for manually compressed blobs.toc files during
- # restore.
- compress_cmd => {
- program => $ENV{'LZ4'},
- args => [
- '-z', '-f', '-m', '--rm',
- "$tempdir/compression_lz4_dir/blobs_*.toc",
- ],
- },
- # Verify that data files were compressed
- glob_patterns => [
- "$tempdir/compression_lz4_dir/toc.dat",
- "$tempdir/compression_lz4_dir/*.dat.lz4",
- ],
- restore_cmd => [
- 'pg_restore',
- '--jobs' => '2',
- '--file' => "$tempdir/compression_lz4_dir.sql",
- '--statistics',
- "$tempdir/compression_lz4_dir",
- ],
- },
-
- compression_lz4_plain => {
- test_key => 'compression',
- compile_option => 'lz4',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'plain',
- '--compress' => 'lz4',
- '--file' => "$tempdir/compression_lz4_plain.sql.lz4",
- '--statistics',
- 'postgres',
- ],
- # Decompress the generated file to run through the tests.
- compress_cmd => {
- program => $ENV{'LZ4'},
- args => [
- '-d', '-f',
- "$tempdir/compression_lz4_plain.sql.lz4",
- "$tempdir/compression_lz4_plain.sql",
- ],
- },
- },
-
- compression_zstd_custom => {
- test_key => 'compression',
- compile_option => 'zstd',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'custom',
- '--compress' => 'zstd',
- '--file' => "$tempdir/compression_zstd_custom.dump",
- '--statistics',
- 'postgres',
- ],
- restore_cmd => [
- 'pg_restore',
- '--file' => "$tempdir/compression_zstd_custom.sql",
- '--statistics',
- "$tempdir/compression_zstd_custom.dump",
- ],
- command_like => {
- command => [
- 'pg_restore', '--list',
- "$tempdir/compression_zstd_custom.dump",
- ],
- expected => qr/Compression: zstd/,
- name => 'data content is zstd compressed'
- },
- },
-
- compression_zstd_dir => {
- test_key => 'compression',
- compile_option => 'zstd',
- dump_cmd => [
- 'pg_dump',
- '--jobs' => '2',
- '--format' => 'directory',
- '--compress' => 'zstd:1',
- '--file' => "$tempdir/compression_zstd_dir",
- '--statistics',
- 'postgres',
- ],
- # Give coverage for manually compressed blobs.toc files during
- # restore.
- compress_cmd => {
- program => $ENV{'ZSTD'},
- args => [
- '-z', '-f',
- '--rm', "$tempdir/compression_zstd_dir/blobs_*.toc",
- ],
- },
- # Verify that data files were compressed
- glob_patterns => [
- "$tempdir/compression_zstd_dir/toc.dat",
- "$tempdir/compression_zstd_dir/*.dat.zst",
- ],
- restore_cmd => [
- 'pg_restore',
- '--jobs' => '2',
- '--file' => "$tempdir/compression_zstd_dir.sql",
- '--statistics',
- "$tempdir/compression_zstd_dir",
- ],
- },
-
- # Exercise long mode for test coverage
- compression_zstd_plain => {
- test_key => 'compression',
- compile_option => 'zstd',
- dump_cmd => [
- 'pg_dump',
- '--format' => 'plain',
- '--compress' => 'zstd:long',
- '--file' => "$tempdir/compression_zstd_plain.sql.zst",
- '--statistics',
- 'postgres',
- ],
- # Decompress the generated file to run through the tests.
- compress_cmd => {
- program => $ENV{'ZSTD'},
- args => [
- '-d', '-f',
- "$tempdir/compression_zstd_plain.sql.zst", "-o",
- "$tempdir/compression_zstd_plain.sql",
- ],
- },
- },
-
clean => {
dump_cmd => [
'pg_dump', '--no-sync',
@@ -891,10 +629,6 @@ my %pgdump_runs = (
# of the pg_dump runs happening. This is what "seeds" the
# system with objects to be dumped out.
#
-# There can be a flag called 'lz4', which can be set if the test
-# case depends on LZ4. Tests marked with this flag are skipped if
-# the build used does not support LZ4.
-#
# Building of this hash takes a bit of time as all of the regexps
# included in it are compiled. This greatly improves performance
# as the regexps are used for each run the test applies to.
@@ -911,7 +645,6 @@ my %full_runs = (
binary_upgrade => 1,
clean => 1,
clean_if_exists => 1,
- compression => 1,
createdb => 1,
defaults => 1,
exclude_dump_test_schema => 1,
@@ -3210,31 +2943,6 @@ my %tests = (
},
},
- 'CREATE MATERIALIZED VIEW matview_compression' => {
- create_order => 20,
- create_sql => 'CREATE MATERIALIZED VIEW
- dump_test.matview_compression (col2) AS
- SELECT col2 FROM dump_test.test_table;
- ALTER MATERIALIZED VIEW dump_test.matview_compression
- ALTER COLUMN col2 SET COMPRESSION lz4;',
- regexp => qr/^
- \QCREATE MATERIALIZED VIEW dump_test.matview_compression AS\E
- \n\s+\QSELECT col2\E
- \n\s+\QFROM dump_test.test_table\E
- \n\s+\QWITH NO DATA;\E
- .*
- \QALTER TABLE ONLY dump_test.matview_compression ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
- /xms,
- lz4 => 1,
- like =>
- { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
- unlike => {
- exclude_dump_test_schema => 1,
- no_toast_compression => 1,
- only_dump_measurement => 1,
- },
- },
-
'Check ordering of a matview that depends on a primary key' => {
create_order => 42,
create_sql => '
@@ -3691,51 +3399,6 @@ my %tests = (
},
},
- 'CREATE TABLE test_compression_method' => {
- create_order => 110,
- create_sql => 'CREATE TABLE dump_test.test_compression_method (
- col1 text
- );',
- regexp => qr/^
- \QCREATE TABLE dump_test.test_compression_method (\E\n
- \s+\Qcol1 text\E\n
- \Q);\E
- /xm,
- like => {
- %full_runs, %dump_test_schema_runs, section_pre_data => 1,
- },
- unlike => {
- exclude_dump_test_schema => 1,
- only_dump_measurement => 1,
- },
- },
-
- # Insert enough data to surpass DEFAULT_IO_BUFFER_SIZE during
- # (de)compression operations
- 'COPY test_compression_method' => {
- create_order => 111,
- create_sql => 'INSERT INTO dump_test.test_compression_method (col1) '
- . 'SELECT string_agg(a::text, \'\') FROM generate_series(1,4096) a;',
- regexp => qr/^
- \QCOPY dump_test.test_compression_method (col1) FROM stdin;\E
- \n(?:\d{15277}\n){1}\\\.\n
- /xm,
- like => {
- %full_runs,
- data_only => 1,
- no_schema => 1,
- section_data => 1,
- only_dump_test_schema => 1,
- test_schema_plus_large_objects => 1,
- },
- unlike => {
- binary_upgrade => 1,
- exclude_dump_test_schema => 1,
- schema_only => 1,
- schema_only_with_statistics => 1,
- },
- },
-
'CREATE TABLE fk_reference_test_table' => {
create_order => 21,
create_sql => 'CREATE TABLE dump_test.fk_reference_test_table (
@@ -3774,30 +3437,6 @@ my %tests = (
},
},
- 'CREATE TABLE test_compression' => {
- create_order => 3,
- create_sql => 'CREATE TABLE dump_test.test_compression (
- col1 int,
- col2 text COMPRESSION lz4
- );',
- regexp => qr/^
- \QCREATE TABLE dump_test.test_compression (\E\n
- \s+\Qcol1 integer,\E\n
- \s+\Qcol2 text\E\n
- \);\n
- .*
- \QALTER TABLE ONLY dump_test.test_compression ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
- /xms,
- lz4 => 1,
- like =>
- { %full_runs, %dump_test_schema_runs, section_pre_data => 1, },
- unlike => {
- exclude_dump_test_schema => 1,
- no_toast_compression => 1,
- only_dump_measurement => 1,
- },
- },
-
'CREATE TABLE measurement PARTITIONED BY' => {
create_order => 90,
create_sql => 'CREATE TABLE dump_test.measurement (
@@ -5280,13 +4919,6 @@ foreach my $test (
next;
}
- # Skip tests specific to LZ4 if this build does not support
- # this option.
- if (!$supports_lz4 && defined($tests{$test}->{lz4}))
- {
- next;
- }
-
# Normalize command ending: strip all line endings, add
# semicolon if missing, add two newlines.
my $create_sql = $tests{$test}->{create_sql};
@@ -5463,42 +5095,9 @@ foreach my $run (sort keys %pgdump_runs)
my $test_key = $run;
my $run_db = 'postgres';
- # Skip command-level tests for gzip/lz4/zstd if the tool is not supported
- if ($pgdump_runs{$run}->{compile_option}
- && (($pgdump_runs{$run}->{compile_option} eq 'gzip'
- && !$supports_gzip)
- || ($pgdump_runs{$run}->{compile_option} eq 'lz4'
- && !$supports_lz4)
- || ($pgdump_runs{$run}->{compile_option} eq 'zstd'
- && !$supports_zstd)))
- {
- note
- "$run: skipped due to no $pgdump_runs{$run}->{compile_option} support";
- next;
- }
-
$node->command_ok(\@{ $pgdump_runs{$run}->{dump_cmd} },
"$run: pg_dump runs");
- if ($pgdump_runs{$run}->{compress_cmd})
- {
- my ($compress_cmd) = $pgdump_runs{$run}->{compress_cmd};
- my $compress_program = $compress_cmd->{program};
-
- # Skip the rest of the test if the compression program is
- # not defined.
- next if (!defined($compress_program) || $compress_program eq '');
-
- # Arguments may require globbing.
- my @full_compress_cmd = ($compress_program);
- foreach my $arg (@{ $compress_cmd->{args} })
- {
- push @full_compress_cmd, glob($arg);
- }
-
- command_ok(\@full_compress_cmd, "$run: compression commands");
- }
-
if ($pgdump_runs{$run}->{glob_patterns})
{
my $glob_patterns = $pgdump_runs{$run}->{glob_patterns};
@@ -5579,13 +5178,6 @@ foreach my $run (sort keys %pgdump_runs)
next;
}
- # Skip tests specific to LZ4 if this build does not support
- # this option.
- if (!$supports_lz4 && defined($tests{$test}->{lz4}))
- {
- next;
- }
-
if ($run_db ne $test_db)
{
next;
diff --git a/src/bin/pg_dump/t/006_pg_dump_compress.pl b/src/bin/pg_dump/t/006_pg_dump_compress.pl
new file mode 100644
index 00000000000..3737132645b
--- /dev/null
+++ b/src/bin/pg_dump/t/006_pg_dump_compress.pl
@@ -0,0 +1,611 @@
+
+# Copyright (c) 2021-2025, PostgreSQL Global Development Group
+
+###############################################################
+# This test script uses essentially the same structure as
+# 002_pg_dump.pl, but is specialized to deal with compression
+# concerns. As such, some of the test cases here are large
+# and would contribute undue amounts of runtime if they were
+# included in 002_pg_dump.pl.
+###############################################################
+
+use strict;
+use warnings FATAL => 'all';
+
+use PostgreSQL::Test::Cluster;
+use PostgreSQL::Test::Utils;
+use Test::More;
+
+my $tempdir = PostgreSQL::Test::Utils::tempdir;
+
+###############################################################
+# Definition of the pg_dump runs to make.
+#
+# In addition to the facilities explained in 002_pg_dump.pl,
+# these entries can include:
+#
+# compile_option indicates if the test depends on a compilation
+# option, if any. This can be used to control if tests should be
+# skipped when a build dependency is not satisfied.
+#
+# compress_cmd is the utility command for (de)compression, if any.
+# Note that this should generally be used on pg_dump's output
+# either to generate a text file to run the through the tests, or
+# to test pg_restore's ability to parse manually compressed files
+# that otherwise pg_dump does not compress on its own (e.g. *.toc).
+
+my $supports_gzip = check_pg_config("#define HAVE_LIBZ 1");
+my $supports_lz4 = check_pg_config("#define USE_LZ4 1");
+my $supports_zstd = check_pg_config("#define USE_ZSTD 1");
+
+my %pgdump_runs = (
+ compression_gzip_custom => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'custom',
+ '--compress' => '1',
+ '--file' => "$tempdir/compression_gzip_custom.dump",
+ '--statistics',
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--file' => "$tempdir/compression_gzip_custom.sql",
+ '--statistics',
+ "$tempdir/compression_gzip_custom.dump",
+ ],
+ command_like => {
+ command => [
+ 'pg_restore', '--list',
+ "$tempdir/compression_gzip_custom.dump",
+ ],
+ expected => qr/Compression: gzip/,
+ name => 'data content is gzip-compressed'
+ },
+ },
+
+ compression_gzip_dir => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--jobs' => '2',
+ '--format' => 'directory',
+ '--compress' => 'gzip:1',
+ '--file' => "$tempdir/compression_gzip_dir",
+ '--statistics',
+ 'postgres',
+ ],
+ # Give coverage for manually compressed blobs.toc files during
+ # restore.
+ compress_cmd => {
+ program => $ENV{'GZIP_PROGRAM'},
+ args => [ '-f', "$tempdir/compression_gzip_dir/blobs_*.toc", ],
+ },
+ # Verify that only data files were compressed
+ glob_patterns => [
+ "$tempdir/compression_gzip_dir/toc.dat",
+ "$tempdir/compression_gzip_dir/*.dat.gz",
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--jobs' => '2',
+ '--file' => "$tempdir/compression_gzip_dir.sql",
+ '--statistics',
+ "$tempdir/compression_gzip_dir",
+ ],
+ },
+
+ compression_gzip_plain => {
+ test_key => 'compression',
+ compile_option => 'gzip',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'plain',
+ '--compress' => '1',
+ '--file' => "$tempdir/compression_gzip_plain.sql.gz",
+ '--statistics',
+ 'postgres',
+ ],
+ # Decompress the generated file to run through the tests.
+ compress_cmd => {
+ program => $ENV{'GZIP_PROGRAM'},
+ args => [ '-d', "$tempdir/compression_gzip_plain.sql.gz", ],
+ },
+ },
+
+ compression_lz4_custom => {
+ test_key => 'compression',
+ compile_option => 'lz4',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'custom',
+ '--compress' => 'lz4',
+ '--file' => "$tempdir/compression_lz4_custom.dump",
+ '--statistics',
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--file' => "$tempdir/compression_lz4_custom.sql",
+ '--statistics',
+ "$tempdir/compression_lz4_custom.dump",
+ ],
+ command_like => {
+ command => [
+ 'pg_restore', '--list',
+ "$tempdir/compression_lz4_custom.dump",
+ ],
+ expected => qr/Compression: lz4/,
+ name => 'data content is lz4 compressed'
+ },
+ },
+
+ compression_lz4_dir => {
+ test_key => 'compression',
+ compile_option => 'lz4',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--jobs' => '2',
+ '--format' => 'directory',
+ '--compress' => 'lz4:1',
+ '--file' => "$tempdir/compression_lz4_dir",
+ '--statistics',
+ 'postgres',
+ ],
+ # Give coverage for manually compressed blobs.toc files during
+ # restore.
+ compress_cmd => {
+ program => $ENV{'LZ4'},
+ args => [
+ '-z', '-f', '-m', '--rm',
+ "$tempdir/compression_lz4_dir/blobs_*.toc",
+ ],
+ },
+ # Verify that data files were compressed
+ glob_patterns => [
+ "$tempdir/compression_lz4_dir/toc.dat",
+ "$tempdir/compression_lz4_dir/*.dat.lz4",
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--jobs' => '2',
+ '--file' => "$tempdir/compression_lz4_dir.sql",
+ '--statistics',
+ "$tempdir/compression_lz4_dir",
+ ],
+ },
+
+ compression_lz4_plain => {
+ test_key => 'compression',
+ compile_option => 'lz4',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'plain',
+ '--compress' => 'lz4',
+ '--file' => "$tempdir/compression_lz4_plain.sql.lz4",
+ '--statistics',
+ 'postgres',
+ ],
+ # Decompress the generated file to run through the tests.
+ compress_cmd => {
+ program => $ENV{'LZ4'},
+ args => [
+ '-d', '-f',
+ "$tempdir/compression_lz4_plain.sql.lz4",
+ "$tempdir/compression_lz4_plain.sql",
+ ],
+ },
+ },
+
+ compression_zstd_custom => {
+ test_key => 'compression',
+ compile_option => 'zstd',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'custom',
+ '--compress' => 'zstd',
+ '--file' => "$tempdir/compression_zstd_custom.dump",
+ '--statistics',
+ 'postgres',
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--file' => "$tempdir/compression_zstd_custom.sql",
+ '--statistics',
+ "$tempdir/compression_zstd_custom.dump",
+ ],
+ command_like => {
+ command => [
+ 'pg_restore', '--list',
+ "$tempdir/compression_zstd_custom.dump",
+ ],
+ expected => qr/Compression: zstd/,
+ name => 'data content is zstd compressed'
+ },
+ },
+
+ compression_zstd_dir => {
+ test_key => 'compression',
+ compile_option => 'zstd',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--jobs' => '2',
+ '--format' => 'directory',
+ '--compress' => 'zstd:1',
+ '--file' => "$tempdir/compression_zstd_dir",
+ '--statistics',
+ 'postgres',
+ ],
+ # Give coverage for manually compressed blobs.toc files during
+ # restore.
+ compress_cmd => {
+ program => $ENV{'ZSTD'},
+ args => [
+ '-z', '-f',
+ '--rm', "$tempdir/compression_zstd_dir/blobs_*.toc",
+ ],
+ },
+ # Verify that data files were compressed
+ glob_patterns => [
+ "$tempdir/compression_zstd_dir/toc.dat",
+ "$tempdir/compression_zstd_dir/*.dat.zst",
+ ],
+ restore_cmd => [
+ 'pg_restore',
+ '--jobs' => '2',
+ '--file' => "$tempdir/compression_zstd_dir.sql",
+ '--statistics',
+ "$tempdir/compression_zstd_dir",
+ ],
+ },
+
+ # Exercise long mode for test coverage
+ compression_zstd_plain => {
+ test_key => 'compression',
+ compile_option => 'zstd',
+ dump_cmd => [
+ 'pg_dump', '--no-sync',
+ '--format' => 'plain',
+ '--compress' => 'zstd:long',
+ '--file' => "$tempdir/compression_zstd_plain.sql.zst",
+ '--statistics',
+ 'postgres',
+ ],
+ # Decompress the generated file to run through the tests.
+ compress_cmd => {
+ program => $ENV{'ZSTD'},
+ args => [
+ '-d', '-f',
+ "$tempdir/compression_zstd_plain.sql.zst", "-o",
+ "$tempdir/compression_zstd_plain.sql",
+ ],
+ },
+ },);
+
+###############################################################
+# Definition of the tests to run.
+#
+# In addition to the facilities explained in 002_pg_dump.pl,
+# these entries can include:
+#
+# compile_option indicates if the test depends on a compilation
+# option, if any. This can be used to control if tests should be
+# skipped when a build dependency is not satisfied.
+
+# Tests which are considered 'full' dumps by pg_dump, but there
+# are flags used to exclude specific items (ACLs, LOs, etc).
+my %full_runs = (compression => 1,);
+
+# This is where the actual tests are defined.
+my %tests = (
+ 'CREATE MATERIALIZED VIEW matview_compression_lz4' => {
+ create_order => 20,
+ create_sql => 'CREATE MATERIALIZED VIEW
+ matview_compression_lz4 (col2) AS
+ SELECT repeat(\'xyzzy\', 10000);
+ ALTER MATERIALIZED VIEW matview_compression_lz4
+ ALTER COLUMN col2 SET COMPRESSION lz4;',
+ regexp => qr/^
+ \QCREATE MATERIALIZED VIEW public.matview_compression_lz4 AS\E
+ \n\s+\QSELECT repeat('xyzzy'::text, 10000) AS col2\E
+ \n\s+\QWITH NO DATA;\E
+ .*
+ \QALTER TABLE ONLY public.matview_compression_lz4 ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
+ /xms,
+ compile_option => 'lz4',
+ like => {%full_runs},
+ },
+
+ 'CREATE TABLE test_compression_method' => {
+ create_order => 110,
+ create_sql => 'CREATE TABLE test_compression_method (
+ col1 text
+ );',
+ regexp => qr/^
+ \QCREATE TABLE public.test_compression_method (\E\n
+ \s+\Qcol1 text\E\n
+ \Q);\E
+ /xm,
+ like => { %full_runs, },
+ },
+
+ # Insert enough data to surpass DEFAULT_IO_BUFFER_SIZE during
+ # (de)compression operations
+ 'COPY test_compression_method' => {
+ create_order => 111,
+ create_sql => 'INSERT INTO test_compression_method (col1) '
+ . 'SELECT string_agg(a::text, \'\') FROM generate_series(1,4096) a;',
+ regexp => qr/^
+ \QCOPY public.test_compression_method (col1) FROM stdin;\E
+ \n(?:\d{15277}\n){1}\\\.\n
+ /xm,
+ like => { %full_runs, },
+ },
+
+ 'CREATE TABLE test_compression' => {
+ create_order => 3,
+ create_sql => 'CREATE TABLE test_compression (
+ col1 int,
+ col2 text COMPRESSION lz4
+ );',
+ regexp => qr/^
+ \QCREATE TABLE public.test_compression (\E\n
+ \s+\Qcol1 integer,\E\n
+ \s+\Qcol2 text\E\n
+ \);\n
+ .*
+ \QALTER TABLE ONLY public.test_compression ALTER COLUMN col2 SET COMPRESSION lz4;\E\n
+ /xms,
+ compile_option => 'lz4',
+ like => {%full_runs},
+ },
+
+ # Create a large object so we can test compression of blobs.toc
+ 'LO create (using lo_from_bytea)' => {
+ create_order => 50,
+ create_sql =>
+ 'SELECT pg_catalog.lo_from_bytea(0, \'\\x310a320a330a340a350a360a370a380a390a\');',
+ regexp => qr/^SELECT pg_catalog\.lo_create\('\d+'\);/m,
+ like => { %full_runs, },
+ },
+
+ 'LO load (using lo_from_bytea)' => {
+ regexp => qr/^
+ \QSELECT pg_catalog.lo_open\E \('\d+',\ \d+\);\n
+ \QSELECT pg_catalog.lowrite(0, \E
+ \Q'\x310a320a330a340a350a360a370a380a390a');\E\n
+ \QSELECT pg_catalog.lo_close(0);\E
+ /xm,
+ like => { %full_runs, },
+ },);
+
+#########################################
+# Create a PG instance to test actually dumping from
+
+my $node = PostgreSQL::Test::Cluster->new('main');
+$node->init;
+$node->start;
+
+my $port = $node->port;
+
+#########################################
+# Set up schemas, tables, etc, to be dumped.
+
+# Build up the create statements
+my %create_sql = ();
+
+foreach my $test (
+ sort {
+ if ($tests{$a}->{create_order} and $tests{$b}->{create_order})
+ {
+ $tests{$a}->{create_order} <=> $tests{$b}->{create_order};
+ }
+ elsif ($tests{$a}->{create_order})
+ {
+ -1;
+ }
+ elsif ($tests{$b}->{create_order})
+ {
+ 1;
+ }
+ else
+ {
+ 0;
+ }
+ } keys %tests)
+{
+ my $test_db = 'postgres';
+
+ if (defined($tests{$test}->{database}))
+ {
+ $test_db = $tests{$test}->{database};
+ }
+
+ # Skip tests that require an unsupported compile option
+ if ($tests{$test}->{compile_option}
+ && (($tests{$test}->{compile_option} eq 'gzip' && !$supports_gzip)
+ || ($tests{$test}->{compile_option} eq 'lz4'
+ && !$supports_lz4)
+ || ($tests{$test}->{compile_option} eq 'zstd'
+ && !$supports_zstd)))
+ {
+ next;
+ }
+
+ if ($tests{$test}->{create_sql})
+ {
+ # Normalize command ending: strip all line endings, add
+ # semicolon if missing, add two newlines.
+ my $create_sql = $tests{$test}->{create_sql};
+ chomp $create_sql;
+ $create_sql .= ';' unless substr($create_sql, -1) eq ';';
+ $create_sql{$test_db} .= $create_sql . "\n\n";
+ }
+}
+
+# Send the combined set of commands to psql
+foreach my $db (sort keys %create_sql)
+{
+ $node->safe_psql($db, $create_sql{$db});
+}
+
+#########################################
+# Run all runs
+
+foreach my $run (sort keys %pgdump_runs)
+{
+ my $test_key = $run;
+ my $run_db = 'postgres';
+
+ # Skip runs that require an unsupported compile option
+ if ($pgdump_runs{$run}->{compile_option}
+ && (($pgdump_runs{$run}->{compile_option} eq 'gzip'
+ && !$supports_gzip)
+ || ($pgdump_runs{$run}->{compile_option} eq 'lz4'
+ && !$supports_lz4)
+ || ($pgdump_runs{$run}->{compile_option} eq 'zstd'
+ && !$supports_zstd)))
+ {
+ note
+ "$run: skipped due to no $pgdump_runs{$run}->{compile_option} support";
+ next;
+ }
+
+ $node->command_ok(\@{ $pgdump_runs{$run}->{dump_cmd} },
+ "$run: pg_dump runs");
+
+ if ($pgdump_runs{$run}->{compress_cmd})
+ {
+ my ($compress_cmd) = $pgdump_runs{$run}->{compress_cmd};
+ my $compress_program = $compress_cmd->{program};
+
+ # Skip the rest of the test if the compression program is
+ # not defined.
+ next if (!defined($compress_program) || $compress_program eq '');
+
+ # Arguments may require globbing.
+ my @full_compress_cmd = ($compress_program);
+ foreach my $arg (@{ $compress_cmd->{args} })
+ {
+ push @full_compress_cmd, glob($arg);
+ }
+
+ command_ok(\@full_compress_cmd, "$run: compression commands");
+ }
+
+ if ($pgdump_runs{$run}->{glob_patterns})
+ {
+ my $glob_patterns = $pgdump_runs{$run}->{glob_patterns};
+ foreach my $glob_pattern (@{$glob_patterns})
+ {
+ my @glob_output = glob($glob_pattern);
+ is(scalar(@glob_output) > 0,
+ 1, "$run: glob check for $glob_pattern");
+ }
+ }
+
+ if ($pgdump_runs{$run}->{command_like})
+ {
+ my $cmd_like = $pgdump_runs{$run}->{command_like};
+ $node->command_like(
+ \@{ $cmd_like->{command} },
+ $cmd_like->{expected},
+ "$run: " . $cmd_like->{name});
+ }
+
+ if ($pgdump_runs{$run}->{restore_cmd})
+ {
+ $node->command_ok(\@{ $pgdump_runs{$run}->{restore_cmd} },
+ "$run: pg_restore runs");
+ }
+
+ if ($pgdump_runs{$run}->{test_key})
+ {
+ $test_key = $pgdump_runs{$run}->{test_key};
+ }
+
+ my $output_file = slurp_file("$tempdir/${run}.sql");
+
+ #########################################
+ # Run all tests where this run is included
+ # as either a 'like' or 'unlike' test.
+
+ foreach my $test (sort keys %tests)
+ {
+ my $test_db = 'postgres';
+
+ if (defined($pgdump_runs{$run}->{database}))
+ {
+ $run_db = $pgdump_runs{$run}->{database};
+ }
+
+ if (defined($tests{$test}->{database}))
+ {
+ $test_db = $tests{$test}->{database};
+ }
+
+ # Check for proper test definitions
+ #
+ # Either "all_runs" should be set or there should be a "like" list,
+ # even if it is empty. (This makes the test more self-documenting.)
+ if ( !defined($tests{$test}->{all_runs})
+ && !defined($tests{$test}->{like}))
+ {
+ die "missing \"like\" in test \"$test\"";
+ }
+ # Check for useless entries in "unlike" list. Runs that are
+ # not listed in "like" don't need to be excluded in "unlike".
+ if ($tests{$test}->{unlike}->{$test_key}
+ && !defined($tests{$test}->{like}->{$test_key}))
+ {
+ die "useless \"unlike\" entry \"$test_key\" in test \"$test\"";
+ }
+
+ # Skip tests that require an unsupported compile option
+ if ($tests{$test}->{compile_option}
+ && (($tests{$test}->{compile_option} eq 'gzip' && !$supports_gzip)
+ || ($tests{$test}->{compile_option} eq 'lz4'
+ && !$supports_lz4)
+ || ($tests{$test}->{compile_option} eq 'zstd'
+ && !$supports_zstd)))
+ {
+ next;
+ }
+
+ if ($run_db ne $test_db)
+ {
+ next;
+ }
+
+ # Run the test if all_runs is set or if listed as a like, unless it is
+ # specifically noted as an unlike (generally due to an explicit
+ # exclusion or similar).
+ if (($tests{$test}->{like}->{$test_key} || $tests{$test}->{all_runs})
+ && !defined($tests{$test}->{unlike}->{$test_key}))
+ {
+ if (!ok($output_file =~ $tests{$test}->{regexp},
+ "$run: should dump $test"))
+ {
+ diag("Review $run results in $tempdir");
+ }
+ }
+ else
+ {
+ if (!ok($output_file !~ $tests{$test}->{regexp},
+ "$run: should not dump $test"))
+ {
+ diag("Review $run results in $tempdir");
+ }
+ }
+ }
+}
+
+#########################################
+# Stop the database instance, which will be removed at the end of the tests.
+
+$node->stop('fast');
+
+done_testing();