Further -Wshadow=compatible-local warning fixes

These should have been included in 421892a19 as these shadowed variable
warnings can also be fixed by adjusting the scope of the shadowed variable
to put the declaration for it in an inner scope.

This is part of the same effort as f01592f91.

By my count, this takes the warning count from 114 down to 106.

Author: David Rowley and Justin Pryzby
Discussion: https://postgr.es/m/CAApHDvrwLGBP%2BYw9vriayyf%3DXR4uPWP5jr6cQhP9au_kaDUhbA%40mail.gmail.com
This commit is contained in:
David Rowley 2022-08-24 22:04:28 +12:00
parent 161355ee6d
commit f959bf9a5b
8 changed files with 15 additions and 12 deletions

View File

@ -395,7 +395,6 @@ moveLeafs(Relation index, SpGistState *state,
size;
Buffer nbuf;
Page npage;
SpGistLeafTuple it;
OffsetNumber r = InvalidOffsetNumber,
startOffset = InvalidOffsetNumber;
bool replaceDead = false;
@ -467,6 +466,8 @@ moveLeafs(Relation index, SpGistState *state,
{
for (i = 0; i < nDelete; i++)
{
SpGistLeafTuple it;
it = (SpGistLeafTuple) PageGetItem(current->page,
PageGetItemId(current->page, toDelete[i]));
Assert(it->tupstate == SPGIST_LIVE);

View File

@ -1149,7 +1149,6 @@ CreateTriggerFiringOn(CreateTrigStmt *stmt, const char *queryString,
PartitionDesc partdesc = RelationGetPartitionDesc(rel, true);
List *idxs = NIL;
List *childTbls = NIL;
ListCell *l;
int i;
MemoryContext oldcxt,
perChildCxt;
@ -1181,6 +1180,7 @@ CreateTriggerFiringOn(CreateTrigStmt *stmt, const char *queryString,
for (i = 0; i < partdesc->nparts; i++)
{
Oid indexOnChild = InvalidOid;
ListCell *l;
ListCell *l2;
CreateTrigStmt *childStmt;
Relation childTbl;

View File

@ -1080,7 +1080,6 @@ static void
ExecParallelHashIncreaseNumBatches(HashJoinTable hashtable)
{
ParallelHashJoinState *pstate = hashtable->parallel_state;
int i;
Assert(BarrierPhase(&pstate->build_barrier) == PHJ_BUILD_HASHING_INNER);
@ -1244,7 +1243,7 @@ ExecParallelHashIncreaseNumBatches(HashJoinTable hashtable)
ExecParallelHashTableSetCurrentBatch(hashtable, 0);
/* Are any of the new generation of batches exhausted? */
for (i = 0; i < hashtable->nbatch; ++i)
for (int i = 0; i < hashtable->nbatch; ++i)
{
ParallelHashJoinBatch *batch = hashtable->batches[i].shared;

View File

@ -1981,7 +1981,6 @@ preprocess_grouping_sets(PlannerInfo *root)
Query *parse = root->parse;
List *sets;
int maxref = 0;
ListCell *lc;
ListCell *lc_set;
grouping_sets_data *gd = palloc0(sizeof(grouping_sets_data));
@ -2024,6 +2023,7 @@ preprocess_grouping_sets(PlannerInfo *root)
if (!bms_is_empty(gd->unsortable_refs))
{
List *sortable_sets = NIL;
ListCell *lc;
foreach(lc, parse->groupingSets)
{

View File

@ -161,7 +161,6 @@ compute_tsvector_stats(VacAttrStats *stats,
int vector_no,
lexeme_no;
LexemeHashKey hash_key;
TrackItem *item;
/*
* We want statistics_target * 10 lexemes in the MCELEM array. This
@ -240,6 +239,7 @@ compute_tsvector_stats(VacAttrStats *stats,
curentryptr = ARRPTR(vector);
for (j = 0; j < vector->size; j++)
{
TrackItem *item;
bool found;
/*
@ -296,6 +296,7 @@ compute_tsvector_stats(VacAttrStats *stats,
int nonnull_cnt = samplerows - null_cnt;
int i;
TrackItem **sort_table;
TrackItem *item;
int track_len;
int cutoff_freq;
int minfreq,

View File

@ -81,8 +81,7 @@ varstr_levenshtein(const char *source, int slen,
int *prev;
int *curr;
int *s_char_len = NULL;
int i,
j;
int j;
const char *y;
/*
@ -217,7 +216,7 @@ varstr_levenshtein(const char *source, int slen,
* To transform the first i characters of s into the first 0 characters of
* t, we must perform i deletions.
*/
for (i = START_COLUMN; i < STOP_COLUMN; i++)
for (int i = START_COLUMN; i < STOP_COLUMN; i++)
prev[i] = i * del_c;
/* Loop through rows of the notional array */
@ -226,6 +225,7 @@ varstr_levenshtein(const char *source, int slen,
int *temp;
const char *x = source;
int y_char_len = n != tlen + 1 ? pg_mblen(y) : 1;
int i;
#ifdef LEVENSHTEIN_LESS_EQUAL

View File

@ -1322,8 +1322,7 @@ range_gist_double_sorting_split(TypeCacheEntry *typcache,
ConsiderSplitContext context;
OffsetNumber i,
maxoff;
RangeType *range,
*left_range = NULL,
RangeType *left_range = NULL,
*right_range = NULL;
int common_entries_count;
NonEmptyRange *by_lower,
@ -1518,6 +1517,7 @@ range_gist_double_sorting_split(TypeCacheEntry *typcache,
*/
for (i = FirstOffsetNumber; i <= maxoff; i = OffsetNumberNext(i))
{
RangeType *range;
RangeBound lower,
upper;
bool empty;
@ -1593,6 +1593,7 @@ range_gist_double_sorting_split(TypeCacheEntry *typcache,
*/
for (i = 0; i < common_entries_count; i++)
{
RangeType *range;
int idx = common_entries[i].index;
range = DatumGetRangeTypeP(entryvec->vector[idx].key);

View File

@ -1615,7 +1615,6 @@ pg_get_statisticsobj_worker(Oid statextid, bool columns_only, bool missing_ok)
ArrayType *arr;
char *enabled;
Datum datum;
bool isnull;
bool ndistinct_enabled;
bool dependencies_enabled;
bool mcv_enabled;
@ -1668,6 +1667,8 @@ pg_get_statisticsobj_worker(Oid statextid, bool columns_only, bool missing_ok)
if (!columns_only)
{
bool isnull;
nsp = get_namespace_name_or_temp(statextrec->stxnamespace);
appendStringInfo(&buf, "CREATE STATISTICS %s",
quote_qualified_identifier(nsp,