Location via proxy:   [ UP ]  
[Report a bug]   [Manage cookies]                
Skip to content

Commit 1a05c1d

Browse files
committed
Advance input pointer when LZ4 compressing data
LZ4File_write() did not advance the input pointer on subsequent invocations of LZ4F_compressUpdate(). As a result the generated compressed output would be a compressed version of the same input chunk. Tests failed to catch this error because the data would comfortably fit within the default buffer size, as a single chunk. Tests have been added to provide adequate coverage of multi-chunk compression. WriteDataToArchiveLZ4() which is also using LZ4F_compressUpdate() did not suffer from this omission. Author: Georgios Kokolatos <gkokolatos@pm.me> Reported-by: Michael Paquier <michael@paquier.xyz> Discussion: https://postgr.es/m/ZFhCyn4Gm2eu60rB%40paquier.xyz
1 parent 3c18d90 commit 1a05c1d

File tree

3 files changed

+55
-1
lines changed

3 files changed

+55
-1
lines changed

src/bin/pg_dump/compress_io.h

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,13 @@
1717

1818
#include "pg_backup_archiver.h"
1919

20-
/* Default size used for IO buffers */
20+
/*
21+
* Default size used for IO buffers
22+
*
23+
* When changing this value, it's necessary to check the relevant test cases
24+
* still exercise all the branches. This applies especially if the value is
25+
* increased, in which case the overflow buffer may not be needed.
26+
*/
2127
#define DEFAULT_IO_BUFFER_SIZE 4096
2228

2329
extern char *supports_compression(const pg_compress_specification compression_spec);

src/bin/pg_dump/compress_lz4.c

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -588,6 +588,8 @@ LZ4Stream_write(const void *ptr, size_t size, CompressFileHandle *CFH)
588588
errno = (errno) ? errno : ENOSPC;
589589
return false;
590590
}
591+
592+
ptr = ((const char *) ptr) + chunk;
591593
}
592594

593595
return true;

src/bin/pg_dump/t/002_pg_dump.pl

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3108,6 +3108,52 @@
31083108
},
31093109
},
31103110
3111+
'CREATE TABLE test_compression_method' => {
3112+
create_order => 110,
3113+
create_sql => 'CREATE TABLE dump_test.test_compression_method (
3114+
col1 text
3115+
);',
3116+
regexp => qr/^
3117+
\QCREATE TABLE dump_test.test_compression_method (\E\n
3118+
\s+\Qcol1 text\E\n
3119+
\Q);\E
3120+
/xm,
3121+
like => {
3122+
%full_runs,
3123+
%dump_test_schema_runs,
3124+
section_pre_data => 1,
3125+
},
3126+
unlike => {
3127+
exclude_dump_test_schema => 1,
3128+
only_dump_measurement => 1,
3129+
},
3130+
},
3131+
3132+
# Insert enough data to surpass DEFAULT_IO_BUFFER_SIZE during
3133+
# (de)compression operations
3134+
'COPY test_compression_method' => {
3135+
create_order => 111,
3136+
create_sql => 'INSERT INTO dump_test.test_compression_method (col1) '
3137+
. 'SELECT string_agg(a::text, \'\') FROM generate_series(1,4096) a;',
3138+
regexp => qr/^
3139+
\QCOPY dump_test.test_compression_method (col1) FROM stdin;\E
3140+
\n(?:\d{15277}\n){1}\\\.\n
3141+
/xm,
3142+
like => {
3143+
%full_runs,
3144+
data_only => 1,
3145+
section_data => 1,
3146+
only_dump_test_schema => 1,
3147+
test_schema_plus_large_objects => 1,
3148+
},
3149+
unlike => {
3150+
binary_upgrade => 1,
3151+
exclude_dump_test_schema => 1,
3152+
schema_only => 1,
3153+
only_dump_measurement => 1,
3154+
},
3155+
},
3156+
31113157
'CREATE TABLE fk_reference_test_table' => {
31123158
create_order => 21,
31133159
create_sql => 'CREATE TABLE dump_test.fk_reference_test_table (

0 commit comments

Comments
 (0)