In the recent changes to make the planner account better for cache

effects in a nestloop inner indexscan, I had only dealt with plain index
scans and the index portion of bitmap scans.  But there will be cache
benefits for the heap accesses of bitmap scans too, so fix
cost_bitmap_heap_scan() to account for that.
This commit is contained in:
Tom Lane 2006-07-22 15:41:56 +00:00
parent b0dc1fbbc5
commit 98359c3e3f
5 changed files with 62 additions and 32 deletions

View File

@ -54,7 +54,7 @@
* Portions Copyright (c) 1994, Regents of the University of California
*
* IDENTIFICATION
* $PostgreSQL: pgsql/src/backend/optimizer/path/costsize.c,v 1.162 2006/07/14 14:52:20 momjian Exp $
* $PostgreSQL: pgsql/src/backend/optimizer/path/costsize.c,v 1.163 2006/07/22 15:41:55 tgl Exp $
*
*-------------------------------------------------------------------------
*/
@ -437,13 +437,17 @@ index_pages_fetched(double tuples_fetched, BlockNumber pages,
*
* 'baserel' is the relation to be scanned
* 'bitmapqual' is a tree of IndexPaths, BitmapAndPaths, and BitmapOrPaths
* 'outer_rel' is the outer relation when we are considering using the bitmap
* scan as the inside of a nestloop join (hence, some of the indexQuals
* are join clauses, and we should expect repeated scans of the table);
* NULL for a plain bitmap scan
*
* Note: we take no explicit notice here of whether this is a join inner path.
* If it is, the component IndexPaths should have been costed accordingly.
* Note: if this is a join inner path, the component IndexPaths in bitmapqual
* should have been costed accordingly.
*/
void
cost_bitmap_heap_scan(Path *path, PlannerInfo *root, RelOptInfo *baserel,
Path *bitmapqual)
Path *bitmapqual, RelOptInfo *outer_rel)
{
Cost startup_cost = 0;
Cost run_cost = 0;
@ -472,14 +476,36 @@ cost_bitmap_heap_scan(Path *path, PlannerInfo *root, RelOptInfo *baserel,
startup_cost += indexTotalCost;
/*
* The number of heap pages that need to be fetched is the same as the
* Mackert and Lohman formula for the case T <= b (ie, no re-reads
* needed).
* Estimate number of main-table pages fetched.
*/
tuples_fetched = clamp_row_est(indexSelectivity * baserel->tuples);
T = (baserel->pages > 1) ? (double) baserel->pages : 1.0;
pages_fetched = (2.0 * T * tuples_fetched) / (2.0 * T + tuples_fetched);
if (outer_rel != NULL && outer_rel->rows > 1)
{
/*
* For repeated bitmap scans, scale up the number of tuples fetched
* in the Mackert and Lohman formula by the number of scans, so
* that we estimate the number of pages fetched by all the scans.
* Then pro-rate for one scan.
*/
double num_scans = outer_rel->rows;
pages_fetched = index_pages_fetched(tuples_fetched * num_scans,
baserel->pages,
0 /* XXX total index size? */);
pages_fetched /= num_scans;
}
else
{
/*
* For a single scan, the number of heap pages that need to be fetched
* is the same as the Mackert and Lohman formula for the case T <= b
* (ie, no re-reads needed).
*/
pages_fetched = (2.0 * T * tuples_fetched) / (2.0 * T + tuples_fetched);
}
if (pages_fetched >= T)
pages_fetched = T;
else

View File

@ -9,7 +9,7 @@
*
*
* IDENTIFICATION
* $PostgreSQL: pgsql/src/backend/optimizer/path/indxpath.c,v 1.210 2006/07/13 17:47:01 momjian Exp $
* $PostgreSQL: pgsql/src/backend/optimizer/path/indxpath.c,v 1.211 2006/07/22 15:41:55 tgl Exp $
*
*-------------------------------------------------------------------------
*/
@ -53,9 +53,11 @@ static List *find_usable_indexes(PlannerInfo *root, RelOptInfo *rel,
static List *find_saop_paths(PlannerInfo *root, RelOptInfo *rel,
List *clauses, List *outer_clauses,
bool istoplevel, RelOptInfo *outer_rel);
static Path *choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel, List *paths);
static Path *choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel,
List *paths, RelOptInfo *outer_rel);
static int bitmap_path_comparator(const void *a, const void *b);
static Cost bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel, List *paths);
static Cost bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel,
List *paths, RelOptInfo *outer_rel);
static List *pull_indexpath_quals(Path *bitmapqual);
static bool lists_intersect_ptr(List *list1, List *list2);
static bool match_clause_to_indexcol(IndexOptInfo *index,
@ -210,8 +212,8 @@ create_index_paths(PlannerInfo *root, RelOptInfo *rel)
Path *bitmapqual;
BitmapHeapPath *bpath;
bitmapqual = choose_bitmap_and(root, rel, bitindexpaths);
bpath = create_bitmap_heap_path(root, rel, bitmapqual, false);
bitmapqual = choose_bitmap_and(root, rel, bitindexpaths, NULL);
bpath = create_bitmap_heap_path(root, rel, bitmapqual, NULL);
add_path(rel, (Path *) bpath);
}
}
@ -536,7 +538,7 @@ generate_bitmap_or_paths(PlannerInfo *root, RelOptInfo *rel,
* OK, pick the most promising AND combination, and add it to
* pathlist.
*/
bitmapqual = choose_bitmap_and(root, rel, indlist);
bitmapqual = choose_bitmap_and(root, rel, indlist, outer_rel);
pathlist = lappend(pathlist, bitmapqual);
}
@ -567,7 +569,8 @@ generate_bitmap_or_paths(PlannerInfo *root, RelOptInfo *rel,
* combining multiple inputs.
*/
static Path *
choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel, List *paths)
choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel,
List *paths, RelOptInfo *outer_rel)
{
int npaths = list_length(paths);
Path **patharray;
@ -629,7 +632,7 @@ choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel, List *paths)
qsort(patharray, npaths, sizeof(Path *), bitmap_path_comparator);
paths = list_make1(patharray[0]);
costsofar = bitmap_and_cost_est(root, rel, paths);
costsofar = bitmap_and_cost_est(root, rel, paths, outer_rel);
qualsofar = pull_indexpath_quals(patharray[0]);
lastcell = list_head(paths); /* for quick deletions */
@ -644,7 +647,7 @@ choose_bitmap_and(PlannerInfo *root, RelOptInfo *rel, List *paths)
continue; /* consider it redundant */
/* tentatively add newpath to paths, so we can estimate cost */
paths = lappend(paths, newpath);
newcost = bitmap_and_cost_est(root, rel, paths);
newcost = bitmap_and_cost_est(root, rel, paths, outer_rel);
if (newcost < costsofar)
{
/* keep newpath in paths, update subsidiary variables */
@ -702,7 +705,8 @@ bitmap_path_comparator(const void *a, const void *b)
* inputs.
*/
static Cost
bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel, List *paths)
bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel,
List *paths, RelOptInfo *outer_rel)
{
BitmapAndPath apath;
Path bpath;
@ -714,7 +718,7 @@ bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel, List *paths)
cost_bitmap_and_node(&apath, root);
/* Now we can do cost_bitmap_heap_scan */
cost_bitmap_heap_scan(&bpath, root, rel, (Path *) &apath);
cost_bitmap_heap_scan(&bpath, root, rel, (Path *) &apath, outer_rel);
return bpath.total_cost;
}
@ -1486,8 +1490,8 @@ best_inner_indexscan(PlannerInfo *root, RelOptInfo *rel,
Path *bitmapqual;
BitmapHeapPath *bpath;
bitmapqual = choose_bitmap_and(root, rel, bitindexpaths);
bpath = create_bitmap_heap_path(root, rel, bitmapqual, true);
bitmapqual = choose_bitmap_and(root, rel, bitindexpaths, outer_rel);
bpath = create_bitmap_heap_path(root, rel, bitmapqual, outer_rel);
indexpaths = lappend(indexpaths, bpath);
}

View File

@ -8,7 +8,7 @@
*
*
* IDENTIFICATION
* $PostgreSQL: pgsql/src/backend/optimizer/util/pathnode.c,v 1.130 2006/07/14 14:52:21 momjian Exp $
* $PostgreSQL: pgsql/src/backend/optimizer/util/pathnode.c,v 1.131 2006/07/22 15:41:55 tgl Exp $
*
*-------------------------------------------------------------------------
*/
@ -513,14 +513,14 @@ create_index_path(PlannerInfo *root,
*
* 'bitmapqual' is a tree of IndexPath, BitmapAndPath, and BitmapOrPath nodes.
*
* If this is a join inner indexscan path, the component IndexPaths should
* have been costed accordingly, and TRUE should be passed for isjoininner.
* If this is a join inner indexscan path, 'outer_rel' is the outer relation,
* and all the component IndexPaths should have been costed accordingly.
*/
BitmapHeapPath *
create_bitmap_heap_path(PlannerInfo *root,
RelOptInfo *rel,
Path *bitmapqual,
bool isjoininner)
RelOptInfo *outer_rel)
{
BitmapHeapPath *pathnode = makeNode(BitmapHeapPath);
@ -529,9 +529,9 @@ create_bitmap_heap_path(PlannerInfo *root,
pathnode->path.pathkeys = NIL; /* always unordered */
pathnode->bitmapqual = bitmapqual;
pathnode->isjoininner = isjoininner;
pathnode->isjoininner = (outer_rel != NULL);
if (isjoininner)
if (pathnode->isjoininner)
{
/*
* We must compute the estimated number of output rows for the
@ -560,7 +560,7 @@ create_bitmap_heap_path(PlannerInfo *root,
pathnode->rows = rel->rows;
}
cost_bitmap_heap_scan(&pathnode->path, root, rel, bitmapqual);
cost_bitmap_heap_scan(&pathnode->path, root, rel, bitmapqual, outer_rel);
return pathnode;
}

View File

@ -7,7 +7,7 @@
* Portions Copyright (c) 1996-2006, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $PostgreSQL: pgsql/src/include/optimizer/cost.h,v 1.76 2006/06/06 17:59:58 tgl Exp $
* $PostgreSQL: pgsql/src/include/optimizer/cost.h,v 1.77 2006/07/22 15:41:56 tgl Exp $
*
*-------------------------------------------------------------------------
*/
@ -61,7 +61,7 @@ extern void cost_seqscan(Path *path, PlannerInfo *root, RelOptInfo *baserel);
extern void cost_index(IndexPath *path, PlannerInfo *root, IndexOptInfo *index,
List *indexQuals, RelOptInfo *outer_rel);
extern void cost_bitmap_heap_scan(Path *path, PlannerInfo *root, RelOptInfo *baserel,
Path *bitmapqual);
Path *bitmapqual, RelOptInfo *outer_rel);
extern void cost_bitmap_and_node(BitmapAndPath *path, PlannerInfo *root);
extern void cost_bitmap_or_node(BitmapOrPath *path, PlannerInfo *root);
extern void cost_bitmap_tree_node(Path *path, Cost *cost, Selectivity *selec);

View File

@ -7,7 +7,7 @@
* Portions Copyright (c) 1996-2006, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $PostgreSQL: pgsql/src/include/optimizer/pathnode.h,v 1.69 2006/07/01 18:38:33 tgl Exp $
* $PostgreSQL: pgsql/src/include/optimizer/pathnode.h,v 1.70 2006/07/22 15:41:56 tgl Exp $
*
*-------------------------------------------------------------------------
*/
@ -37,7 +37,7 @@ extern IndexPath *create_index_path(PlannerInfo *root,
extern BitmapHeapPath *create_bitmap_heap_path(PlannerInfo *root,
RelOptInfo *rel,
Path *bitmapqual,
bool isjoininner);
RelOptInfo *outer_rel);
extern BitmapAndPath *create_bitmap_and_path(PlannerInfo *root,
RelOptInfo *rel,
List *bitmapquals);