@@ -66,7 +66,7 @@ typedef struct check_network_data
66
66
#define token_matches (t , k ) (strcmp(t->string, k) == 0)
67
67
68
68
/*
69
- * A single string token lexed from the HBA config file, together with whether
69
+ * A single string token lexed from a config file, together with whether
70
70
* the token had been quoted.
71
71
*/
72
72
typedef struct HbaToken
@@ -75,6 +75,19 @@ typedef struct HbaToken
75
75
bool quoted ;
76
76
} HbaToken ;
77
77
78
+ /*
79
+ * TokenizedLine represents one line lexed from a config file.
80
+ * Each item in the "fields" list is a sub-list of HbaTokens.
81
+ * We don't emit a TokenizedLine for empty or all-comment lines,
82
+ * so "fields" is never NIL (nor are any of its sub-lists).
83
+ */
84
+ typedef struct TokenizedLine
85
+ {
86
+ List * fields ; /* List of lists of HbaTokens */
87
+ int line_num ; /* Line number */
88
+ char * raw_line ; /* Raw line text */
89
+ } TokenizedLine ;
90
+
78
91
/*
79
92
* pre-parsed content of HBA config file: list of HbaLine structs.
80
93
* parsed_hba_context is the memory context where it lives.
@@ -95,7 +108,7 @@ static MemoryContext parsed_ident_context = NULL;
95
108
96
109
97
110
static MemoryContext tokenize_file (const char * filename , FILE * file ,
98
- List * * lines , List * * line_nums , List * * raw_lines );
111
+ List * * tok_lines );
99
112
static List * tokenize_inc_file (List * tokens , const char * outer_filename ,
100
113
const char * inc_filename );
101
114
static bool parse_hba_auth_opt (char * name , char * val , HbaLine * hbaline ,
@@ -305,7 +318,6 @@ tokenize_inc_file(List *tokens,
305
318
char * inc_fullname ;
306
319
FILE * inc_file ;
307
320
List * inc_lines ;
308
- List * inc_line_nums ;
309
321
ListCell * inc_line ;
310
322
MemoryContext linecxt ;
311
323
@@ -337,17 +349,18 @@ tokenize_inc_file(List *tokens,
337
349
}
338
350
339
351
/* There is possible recursion here if the file contains @ */
340
- linecxt = tokenize_file (inc_fullname , inc_file , & inc_lines , & inc_line_nums , NULL );
352
+ linecxt = tokenize_file (inc_fullname , inc_file , & inc_lines );
341
353
342
354
FreeFile (inc_file );
343
355
pfree (inc_fullname );
344
356
357
+ /* Copy all tokens found in the file and append to the tokens list */
345
358
foreach (inc_line , inc_lines )
346
359
{
347
- List * inc_fields = lfirst (inc_line );
360
+ TokenizedLine * tok_line = ( TokenizedLine * ) lfirst (inc_line );
348
361
ListCell * inc_field ;
349
362
350
- foreach (inc_field , inc_fields )
363
+ foreach (inc_field , tok_line -> fields )
351
364
{
352
365
List * inc_tokens = lfirst (inc_field );
353
366
ListCell * inc_token ;
@@ -366,23 +379,18 @@ tokenize_inc_file(List *tokens,
366
379
}
367
380
368
381
/*
369
- * Tokenize the given file, storing the resulting data into three Lists: a
370
- * List of lines, a List of line numbers, and a List of raw line contents.
382
+ * Tokenize the given file.
371
383
*
372
- * The list of lines is a triple-nested List structure. Each line is a List of
373
- * fields, and each field is a List of HbaTokens.
384
+ * The output is a list of TokenizedLine structs; see struct definition above.
374
385
*
375
386
* filename must be the absolute path to the target file.
376
387
*
377
388
* Return value is a memory context which contains all memory allocated by
378
- * this function.
389
+ * this function (it's a child of caller's context) .
379
390
*/
380
391
static MemoryContext
381
- tokenize_file (const char * filename , FILE * file ,
382
- List * * lines , List * * line_nums , List * * raw_lines )
392
+ tokenize_file (const char * filename , FILE * file , List * * tok_lines )
383
393
{
384
- List * current_line = NIL ;
385
- List * current_field = NIL ;
386
394
int line_number = 1 ;
387
395
MemoryContext linecxt ;
388
396
MemoryContext oldcxt ;
@@ -392,12 +400,13 @@ tokenize_file(const char *filename, FILE *file,
392
400
ALLOCSET_SMALL_SIZES );
393
401
oldcxt = MemoryContextSwitchTo (linecxt );
394
402
395
- * lines = * line_nums = NIL ;
403
+ * tok_lines = NIL ;
396
404
397
405
while (!feof (file ) && !ferror (file ))
398
406
{
399
407
char rawline [MAX_LINE ];
400
408
char * lineptr ;
409
+ List * current_line = NIL ;
401
410
402
411
if (!fgets (rawline , sizeof (rawline ), file ))
403
412
break ;
@@ -414,32 +423,30 @@ tokenize_file(const char *filename, FILE *file,
414
423
while (lineptr >= rawline && (* lineptr == '\n' || * lineptr == '\r' ))
415
424
* lineptr -- = '\0' ;
416
425
426
+ /* Parse fields */
417
427
lineptr = rawline ;
418
- while (strlen ( lineptr ) > 0 )
428
+ while (* lineptr )
419
429
{
430
+ List * current_field ;
431
+
420
432
current_field = next_field_expand (filename , & lineptr );
433
+ /* add field to line, unless we are at EOL or comment start */
434
+ if (current_field != NIL )
435
+ current_line = lappend (current_line , current_field );
436
+ }
421
437
422
- /* add tokens to list, unless we are at EOL or comment start */
423
- if (list_length (current_field ) > 0 )
424
- {
425
- if (current_line == NIL )
426
- {
427
- /* make a new line List, record its line number */
428
- current_line = lappend (current_line , current_field );
429
- * lines = lappend (* lines , current_line );
430
- * line_nums = lappend_int (* line_nums , line_number );
431
- if (raw_lines )
432
- * raw_lines = lappend (* raw_lines , pstrdup (rawline ));
433
- }
434
- else
435
- {
436
- /* append tokens to current line's list */
437
- current_line = lappend (current_line , current_field );
438
- }
439
- }
438
+ /* Reached EOL; emit line to TokenizedLine list unless it's boring */
439
+ if (current_line != NIL )
440
+ {
441
+ TokenizedLine * tok_line ;
442
+
443
+ tok_line = (TokenizedLine * ) palloc (sizeof (TokenizedLine ));
444
+ tok_line -> fields = current_line ;
445
+ tok_line -> line_num = line_number ;
446
+ tok_line -> raw_line = pstrdup (rawline );
447
+ * tok_lines = lappend (* tok_lines , tok_line );
440
448
}
441
- /* we are at real or logical EOL, so force a new line List */
442
- current_line = NIL ;
449
+
443
450
line_number ++ ;
444
451
}
445
452
@@ -789,7 +796,7 @@ check_same_host_or_net(SockAddr *raddr, IPCompareMethod method)
789
796
ereport(LOG, \
790
797
(errcode(ERRCODE_CONFIG_FILE_ERROR), \
791
798
errmsg("missing entry in file \"%s\" at end of line %d", \
792
- IdentFileName, line_number ))); \
799
+ IdentFileName, line_num ))); \
793
800
return NULL; \
794
801
} \
795
802
} while (0);
@@ -800,26 +807,26 @@ check_same_host_or_net(SockAddr *raddr, IPCompareMethod method)
800
807
(errcode(ERRCODE_CONFIG_FILE_ERROR), \
801
808
errmsg("multiple values in ident field"), \
802
809
errcontext("line %d of configuration file \"%s\"", \
803
- line_number , IdentFileName))); \
810
+ line_num , IdentFileName))); \
804
811
return NULL; \
805
812
} \
806
813
} while (0);
807
814
808
815
809
816
/*
810
817
* Parse one tokenised line from the hba config file and store the result in a
811
- * HbaLine structure, or NULL if parsing fails .
818
+ * HbaLine structure.
812
819
*
813
- * The tokenised line is a List of fields, each field being a List of
814
- * HbaTokens.
820
+ * Return NULL if parsing fails.
815
821
*
816
822
* Note: this function leaks memory when an error occurs. Caller is expected
817
823
* to have set a memory context that will be reset if this function returns
818
824
* NULL.
819
825
*/
820
826
static HbaLine *
821
- parse_hba_line (List * line , int line_num , char * raw_line )
827
+ parse_hba_line (TokenizedLine * tok_line )
822
828
{
829
+ int line_num = tok_line -> line_num ;
823
830
char * str ;
824
831
struct addrinfo * gai_result ;
825
832
struct addrinfo hints ;
@@ -834,10 +841,11 @@ parse_hba_line(List *line, int line_num, char *raw_line)
834
841
835
842
parsedline = palloc0 (sizeof (HbaLine ));
836
843
parsedline -> linenumber = line_num ;
837
- parsedline -> rawline = pstrdup (raw_line );
844
+ parsedline -> rawline = pstrdup (tok_line -> raw_line );
838
845
839
846
/* Check the record type. */
840
- field = list_head (line );
847
+ Assert (tok_line -> fields != NIL );
848
+ field = list_head (tok_line -> fields );
841
849
tokens = lfirst (field );
842
850
if (tokens -> length > 1 )
843
851
{
@@ -1769,11 +1777,7 @@ load_hba(void)
1769
1777
{
1770
1778
FILE * file ;
1771
1779
List * hba_lines = NIL ;
1772
- List * hba_line_nums = NIL ;
1773
- List * hba_raw_lines = NIL ;
1774
- ListCell * line ,
1775
- * line_num ,
1776
- * raw_line ;
1780
+ ListCell * line ;
1777
1781
List * new_parsed_lines = NIL ;
1778
1782
bool ok = true;
1779
1783
MemoryContext linecxt ;
@@ -1790,7 +1794,7 @@ load_hba(void)
1790
1794
return false;
1791
1795
}
1792
1796
1793
- linecxt = tokenize_file (HbaFileName , file , & hba_lines , & hba_line_nums , & hba_raw_lines );
1797
+ linecxt = tokenize_file (HbaFileName , file , & hba_lines );
1794
1798
FreeFile (file );
1795
1799
1796
1800
/* Now parse all the lines */
@@ -1799,11 +1803,12 @@ load_hba(void)
1799
1803
"hba parser context" ,
1800
1804
ALLOCSET_SMALL_SIZES );
1801
1805
oldcxt = MemoryContextSwitchTo (hbacxt );
1802
- forthree (line , hba_lines , line_num , hba_line_nums , raw_line , hba_raw_lines )
1806
+ foreach (line , hba_lines )
1803
1807
{
1808
+ TokenizedLine * tok_line = (TokenizedLine * ) lfirst (line );
1804
1809
HbaLine * newline ;
1805
1810
1806
- if ((newline = parse_hba_line (lfirst ( line ), lfirst_int ( line_num ), lfirst ( raw_line ) )) == NULL )
1811
+ if ((newline = parse_hba_line (tok_line )) == NULL )
1807
1812
{
1808
1813
/*
1809
1814
* Parse error in the file, so indicate there's a problem. NB: a
@@ -1861,9 +1866,9 @@ load_hba(void)
1861
1866
1862
1867
/*
1863
1868
* Parse one tokenised line from the ident config file and store the result in
1864
- * an IdentLine structure, or NULL if parsing fails .
1869
+ * an IdentLine structure.
1865
1870
*
1866
- * The tokenised line is a nested List of fields and tokens .
1871
+ * Return NULL if parsing fails .
1867
1872
*
1868
1873
* If ident_user is a regular expression (ie. begins with a slash), it is
1869
1874
* compiled and stored in IdentLine structure.
@@ -1873,18 +1878,19 @@ load_hba(void)
1873
1878
* NULL.
1874
1879
*/
1875
1880
static IdentLine *
1876
- parse_ident_line (List * line , int line_number )
1881
+ parse_ident_line (TokenizedLine * tok_line )
1877
1882
{
1883
+ int line_num = tok_line -> line_num ;
1878
1884
ListCell * field ;
1879
1885
List * tokens ;
1880
1886
HbaToken * token ;
1881
1887
IdentLine * parsedline ;
1882
1888
1883
- Assert (line != NIL );
1884
- field = list_head (line );
1889
+ Assert (tok_line -> fields != NIL );
1890
+ field = list_head (tok_line -> fields );
1885
1891
1886
1892
parsedline = palloc0 (sizeof (IdentLine ));
1887
- parsedline -> linenumber = line_number ;
1893
+ parsedline -> linenumber = line_num ;
1888
1894
1889
1895
/* Get the map token (must exist) */
1890
1896
tokens = lfirst (field );
@@ -2144,9 +2150,7 @@ load_ident(void)
2144
2150
{
2145
2151
FILE * file ;
2146
2152
List * ident_lines = NIL ;
2147
- List * ident_line_nums = NIL ;
2148
2153
ListCell * line_cell ,
2149
- * num_cell ,
2150
2154
* parsed_line_cell ;
2151
2155
List * new_parsed_lines = NIL ;
2152
2156
bool ok = true;
@@ -2166,7 +2170,7 @@ load_ident(void)
2166
2170
return false;
2167
2171
}
2168
2172
2169
- linecxt = tokenize_file (IdentFileName , file , & ident_lines , & ident_line_nums , NULL );
2173
+ linecxt = tokenize_file (IdentFileName , file , & ident_lines );
2170
2174
FreeFile (file );
2171
2175
2172
2176
/* Now parse all the lines */
@@ -2175,9 +2179,11 @@ load_ident(void)
2175
2179
"ident parser context" ,
2176
2180
ALLOCSET_SMALL_SIZES );
2177
2181
oldcxt = MemoryContextSwitchTo (ident_context );
2178
- forboth (line_cell , ident_lines , num_cell , ident_line_nums )
2182
+ foreach (line_cell , ident_lines )
2179
2183
{
2180
- if ((newline = parse_ident_line (lfirst (line_cell ), lfirst_int (num_cell ))) == NULL )
2184
+ TokenizedLine * tok_line = (TokenizedLine * ) lfirst (line_cell );
2185
+
2186
+ if ((newline = parse_ident_line (tok_line )) == NULL )
2181
2187
{
2182
2188
/*
2183
2189
* Parse error in the file, so indicate there's a problem. Free
0 commit comments