Eliminate fixed token-length limit in hba.c.

Historically, hba.c limited tokens in the authentication configuration
files (pg_hba.conf and pg_ident.conf) to less than 256 bytes.  We have
seen a few reports of this limit causing problems; notably, for
moderately-complex LDAP configurations.  Let's get rid of the fixed
limit by using a StringInfo instead of a fixed-size buffer.
This actually takes less code than before, since we can get rid of
a nontrivial error recovery stanza.  It's doubtless a hair slower,
but parsing the content of the HBA files should in no way be
performance-critical.

Although this is a pretty straightforward patch, it doesn't seem
worth the risk to back-patch given the small number of complaints
to date.  In released branches, we'll just raise MAX_TOKEN to
ameliorate the problem.

Discussion: https://postgr.es/m/1588937.1690221208@sss.pgh.pa.us
This commit is contained in:
Tom Lane 2023-07-27 11:56:35 -04:00
parent c1308ce2d9
commit de3f0e3fe0
1 changed files with 26 additions and 48 deletions

View File

@ -56,8 +56,6 @@
#endif
#define MAX_TOKEN 256
/* callback data for check_network_callback */
typedef struct check_network_data
{
@ -161,8 +159,8 @@ pg_isblank(const char c)
* commas, beginning of line, and end of line. Blank means space or tab.
*
* Tokens can be delimited by double quotes (this allows the inclusion of
* blanks or '#', but not newlines). As in SQL, write two double-quotes
* to represent a double quote.
* commas, blanks, and '#', but not newlines). As in SQL, write two
* double-quotes to represent a double quote.
*
* Comments (started by an unquoted '#') are skipped, i.e. the remainder
* of the line is ignored.
@ -171,8 +169,8 @@ pg_isblank(const char c)
* Thus, if a continuation occurs within quoted text or a comment, the
* quoted text or comment is considered to continue to the next line.)
*
* The token, if any, is returned at *buf (a buffer of size bufsz), and
* *lineptr is advanced past the token.
* The token, if any, is returned into buf (replacing any previous
* contents), and *lineptr is advanced past the token.
*
* Also, we set *initial_quote to indicate whether there was quoting before
* the first character. (We use that to prevent "@x" from being treated
@ -180,30 +178,25 @@ pg_isblank(const char c)
* we want to allow that to support embedded spaces in file paths.)
*
* We set *terminating_comma to indicate whether the token is terminated by a
* comma (which is not returned).
* comma (which is not returned, nor advanced over).
*
* In event of an error, log a message at ereport level elevel, and also
* set *err_msg to a string describing the error. Currently the only
* possible error is token too long for buf.
* The only possible error condition is lack of terminating quote, but we
* currently do not detect that, but just return the rest of the line.
*
* If successful: store null-terminated token at *buf and return true.
* If no more tokens on line: set *buf = '\0' and return false.
* If error: fill buf with truncated or misformatted token and return false.
* If successful: store dequoted token in buf and return true.
* If no more tokens on line: set buf to empty and return false.
*/
static bool
next_token(char **lineptr, char *buf, int bufsz,
bool *initial_quote, bool *terminating_comma,
int elevel, char **err_msg)
next_token(char **lineptr, StringInfo buf,
bool *initial_quote, bool *terminating_comma)
{
int c;
char *start_buf = buf;
char *end_buf = buf + (bufsz - 1);
bool in_quote = false;
bool was_quote = false;
bool saw_quote = false;
Assert(end_buf > start_buf);
/* Initialize output parameters */
resetStringInfo(buf);
*initial_quote = false;
*terminating_comma = false;
@ -226,22 +219,6 @@ next_token(char **lineptr, char *buf, int bufsz,
break;
}
if (buf >= end_buf)
{
*buf = '\0';
ereport(elevel,
(errcode(ERRCODE_CONFIG_FILE_ERROR),
errmsg("authentication file token too long, skipping: \"%s\"",
start_buf)));
*err_msg = "authentication file token too long";
/* Discard remainder of line */
while ((c = (*(*lineptr)++)) != '\0')
;
/* Un-eat the '\0', in case we're called again */
(*lineptr)--;
return false;
}
/* we do not pass back a terminating comma in the token */
if (c == ',' && !in_quote)
{
@ -250,7 +227,7 @@ next_token(char **lineptr, char *buf, int bufsz,
}
if (c != '"' || was_quote)
*buf++ = c;
appendStringInfoChar(buf, c);
/* Literal double-quote is two double-quotes */
if (in_quote && c == '"')
@ -262,7 +239,7 @@ next_token(char **lineptr, char *buf, int bufsz,
{
in_quote = !in_quote;
saw_quote = true;
if (buf == start_buf)
if (buf->len == 0)
*initial_quote = true;
}
@ -275,9 +252,7 @@ next_token(char **lineptr, char *buf, int bufsz,
*/
(*lineptr)--;
*buf = '\0';
return (saw_quote || buf > start_buf);
return (saw_quote || buf->len > 0);
}
/*
@ -409,21 +384,22 @@ static List *
next_field_expand(const char *filename, char **lineptr,
int elevel, int depth, char **err_msg)
{
char buf[MAX_TOKEN];
StringInfoData buf;
bool trailing_comma;
bool initial_quote;
List *tokens = NIL;
initStringInfo(&buf);
do
{
if (!next_token(lineptr, buf, sizeof(buf),
&initial_quote, &trailing_comma,
elevel, err_msg))
if (!next_token(lineptr, &buf,
&initial_quote, &trailing_comma))
break;
/* Is this referencing a file? */
if (!initial_quote && buf[0] == '@' && buf[1] != '\0')
tokens = tokenize_expand_file(tokens, filename, buf + 1,
if (!initial_quote && buf.len > 1 && buf.data[0] == '@')
tokens = tokenize_expand_file(tokens, filename, buf.data + 1,
elevel, depth + 1, err_msg);
else
{
@ -434,11 +410,13 @@ next_field_expand(const char *filename, char **lineptr,
* for the list of tokens.
*/
oldcxt = MemoryContextSwitchTo(tokenize_context);
tokens = lappend(tokens, make_auth_token(buf, initial_quote));
tokens = lappend(tokens, make_auth_token(buf.data, initial_quote));
MemoryContextSwitchTo(oldcxt);
}
} while (trailing_comma && (*err_msg == NULL));
pfree(buf.data);
return tokens;
}