Commit 1e5a5d03 authored by Tom Lane's avatar Tom Lane

Simplify some long-obsolete code in hba.c's next_token().

next_token() oddly set its buffer space consumption limit to one before
the last char position in the buffer, not the last as you'd expect.
The reason is there was once an ugly kluge to mark keywords by appending
a newline to them, potentially requiring one more byte.  Commit e5e2fc84
removed that kluge, but failed to notice that the length limit could be
increased.

Also, remove some vestigial handling of newline characters in the buffer.
That was left over from when this function read the file directly using
getc().  Commit 7f49a67f changed it to read from a buffer, from which
tokenize_file had already removed the only possible occurrence of newline,
but did not simplify this function in consequence.

Also, ensure that we don't return with *lineptr set to someplace past the
terminating '\0'; that would be catastrophic if a caller were to ask for
another token from the same line.  This is just latent since no callers
actually do call again after a "false" return; but considering that it was
actually costing us extra code to do it wrong, we might as well make it
bulletproof.

Noted while reviewing pg_hba_file_rules patch.
parent de16ab72
...@@ -197,42 +197,32 @@ next_token(char **lineptr, char *buf, int bufsz, ...@@ -197,42 +197,32 @@ next_token(char **lineptr, char *buf, int bufsz,
{ {
int c; int c;
char *start_buf = buf; char *start_buf = buf;
char *end_buf = buf + (bufsz - 2); char *end_buf = buf + (bufsz - 1);
bool in_quote = false; bool in_quote = false;
bool was_quote = false; bool was_quote = false;
bool saw_quote = false; bool saw_quote = false;
/* end_buf reserves two bytes to ensure we can append \n and \0 */
Assert(end_buf > start_buf); Assert(end_buf > start_buf);
*initial_quote = false; *initial_quote = false;
*terminating_comma = false; *terminating_comma = false;
/* Move over initial whitespace and commas */ /* Move over any whitespace and commas preceding the next token */
while ((c = (*(*lineptr)++)) != '\0' && (pg_isblank(c) || c == ',')) while ((c = (*(*lineptr)++)) != '\0' && (pg_isblank(c) || c == ','))
; ;
if (c == '\0' || c == '\n')
{
*buf = '\0';
return false;
}
/* /*
* Build a token in buf of next characters up to EOF, EOL, unquoted comma, * Build a token in buf of next characters up to EOL, unquoted comma, or
* or unquoted whitespace. * unquoted whitespace.
*/ */
while (c != '\0' && c != '\n' && while (c != '\0' &&
(!pg_isblank(c) || in_quote)) (!pg_isblank(c) || in_quote))
{ {
/* skip comments to EOL */ /* skip comments to EOL */
if (c == '#' && !in_quote) if (c == '#' && !in_quote)
{ {
while ((c = (*(*lineptr)++)) != '\0' && c != '\n') while ((c = (*(*lineptr)++)) != '\0')
; ;
/* If only comment, consume EOL too; return EOL */
if (c != '\0' && buf == start_buf)
(*lineptr)++;
break; break;
} }
...@@ -245,12 +235,14 @@ next_token(char **lineptr, char *buf, int bufsz, ...@@ -245,12 +235,14 @@ next_token(char **lineptr, char *buf, int bufsz,
start_buf))); start_buf)));
*err_msg = "authentication file token too long"; *err_msg = "authentication file token too long";
/* Discard remainder of line */ /* Discard remainder of line */
while ((c = (*(*lineptr)++)) != '\0' && c != '\n') while ((c = (*(*lineptr)++)) != '\0')
; ;
/* Un-eat the '\0', in case we're called again */
(*lineptr)--;
return false; return false;
} }
/* we do not pass back the comma in the token */ /* we do not pass back a terminating comma in the token */
if (c == ',' && !in_quote) if (c == ',' && !in_quote)
{ {
*terminating_comma = true; *terminating_comma = true;
...@@ -278,8 +270,8 @@ next_token(char **lineptr, char *buf, int bufsz, ...@@ -278,8 +270,8 @@ next_token(char **lineptr, char *buf, int bufsz,
} }
/* /*
* Put back the char right after the token (critical in case it is EOL, * Un-eat the char right after the token (critical in case it is '\0',
* since we need to detect end-of-line at next call). * else next call will read past end of string).
*/ */
(*lineptr)--; (*lineptr)--;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment