Skip to content

Commit

Permalink
sql: simplify generic query plans
Browse files Browse the repository at this point in the history
All fully-optimized memos that can be reused without re-optimization are
now stored in `PreparedStatement.GenericMemo`. Prior to this commit,
some fully-optimized memos were stored in `PreparedStatement.BaseMemo`.

Release note: None
  • Loading branch information
mgartner committed Oct 3, 2024
1 parent faf3310 commit 9b6f0f9
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 63 deletions.
105 changes: 53 additions & 52 deletions pkg/sql/plan_opt.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,14 @@ func (p *planner) prepareUsingOptimizer(
stmt.Prepared.StatementNoConstants = pm.StatementNoConstants
stmt.Prepared.Columns = pm.Columns
stmt.Prepared.Types = pm.Types
stmt.Prepared.BaseMemo = cachedData.Memo
if cachedData.Memo.IsOptimized() {
// A cache, fully optimized memo is an "ideal generic
// memo".
stmt.Prepared.GenericMemo = cachedData.Memo
stmt.Prepared.IdealGenericPlan = true
} else {
stmt.Prepared.BaseMemo = cachedData.Memo
}
return opc.flags, nil
}
opc.log(ctx, "query cache hit but memo is stale (prepare)")
Expand All @@ -163,7 +170,7 @@ func (p *planner) prepareUsingOptimizer(
}

// Build the memo. Do not attempt to build a generic plan at PREPARE-time.
memo, _, err := opc.buildReusableMemo(ctx, false /* buildGeneric */)
memo, _, err := opc.buildReusableMemo(ctx, false /* allowNonIdealGeneric */)
if err != nil {
return 0, err
}
Expand Down Expand Up @@ -213,7 +220,14 @@ func (p *planner) prepareUsingOptimizer(
stmt.Prepared.Columns = resultCols
stmt.Prepared.Types = p.semaCtx.Placeholders.Types
if opc.allowMemoReuse {
stmt.Prepared.BaseMemo = memo
if memo.IsOptimized() {
// A memo fully optimized at prepare time is an "ideal generic
// memo".
stmt.Prepared.GenericMemo = memo
stmt.Prepared.IdealGenericPlan = true
} else {
stmt.Prepared.BaseMemo = memo
}
if opc.useCache {
// execPrepare sets the PrepareMetadata.InferredTypes field after this
// point. However, once the PrepareMetadata goes into the cache, it
Expand Down Expand Up @@ -419,13 +433,13 @@ const (
// 1. The statement does not contain placeholders nor fold-able stable
// operators.
// 2. Or, the placeholder fast path is used.
// 3. Or, buildGeneric is true and the plan is fully optimized as best as
// possible in the presence of placeholders.
// 3. Or, allowNonIdealGeneric is true and the plan is fully optimized as best
// as possible in the presence of placeholders.
//
// The returned memo is fully detached from the planner and can be used with
// reuseMemo independently and concurrently by multiple threads.
func (opc *optPlanningCtx) buildReusableMemo(
ctx context.Context, buildGeneric bool,
ctx context.Context, allowNonIdealGeneric bool,
) (*memo.Memo, memoType, error) {
p := opc.p

Expand Down Expand Up @@ -506,7 +520,7 @@ func (opc *optPlanningCtx) buildReusableMemo(
opc.log(ctx, "placeholder fast path")
opc.flags.Set(planFlagOptimized)
return opc.optimizer.DetachMemo(ctx), memoTypeIdealGeneric, nil
} else if buildGeneric {
} else if allowNonIdealGeneric {
// Build a generic query plan if the placeholder fast path failed and a
// generic plan was requested.
opc.log(ctx, "optimizing (generic)")
Expand Down Expand Up @@ -579,11 +593,15 @@ func (opc *optPlanningCtx) incPlanTypeTelemetry(cachedMemo *memo.Memo) {
// useGenericPlan returns true if a generic query plan should be used instead of
// a custom plan.
func (opc *optPlanningCtx) useGenericPlan() bool {
prep := opc.p.stmt.Prepared
// Always use an ideal generic plan.
if prep.IdealGenericPlan {
return true
}
switch opc.p.SessionData().PlanCacheMode {
case sessiondatapb.PlanCacheModeForceGeneric:
return true
case sessiondatapb.PlanCacheModeAuto:
prep := opc.p.stmt.Prepared
// We need to build CustomPlanThreshold custom plans before considering
// a generic plan.
if prep.Costs.NumCustom() < CustomPlanThreshold {
Expand All @@ -607,7 +625,7 @@ func (opc *optPlanningCtx) useGenericPlan() bool {
// from, baseMemo or genericMemo. It returns nil if both memos are stale. It
// selects baseMemo or genericMemo based on the following rules, in order:
//
// 1. If baseMemo is fully optimized and not stale, it is returned as-is.
// 1. If the generic memo is ideal, it is returned as-is.
// 2. If plan_cache_mode=force_generic_plan is true then genericMemo is
// returned as-is if it is not stale.
// 3. If plan_cache_mode=auto, there have been at least 5 custom plans
Expand All @@ -620,54 +638,37 @@ func (opc *optPlanningCtx) useGenericPlan() bool {
// stale.
// 5. Otherwise, nil is returned and the caller is responsible for building a
// new memo.
//
// The logic is structured to avoid unnecessary (*memo.Memo).IsStale calls,
// since they can be expensive.
func (opc *optPlanningCtx) chooseValidPreparedMemo(
ctx context.Context, baseMemo *memo.Memo, genericMemo *memo.Memo,
) (*memo.Memo, error) {
// First check for a fully optimized, non-stale, base memo.
if baseMemo != nil && baseMemo.IsOptimized() {
isStale, err := baseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return baseMemo, nil
}
}

func (opc *optPlanningCtx) chooseValidPreparedMemo(ctx context.Context) (*memo.Memo, error) {
prep := opc.p.stmt.Prepared
reuseGeneric := opc.useGenericPlan()

// Next check for a non-stale, generic memo.
if reuseGeneric && genericMemo != nil {
isStale, err := genericMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if opc.useGenericPlan() {
if prep.GenericMemo == nil {
// A generic plan does not yet exist.
return nil, nil
}
isStale, err := prep.GenericMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return genericMemo, nil
} else {
// Clear the generic cost if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearGeneric()
return prep.GenericMemo, nil
}
// Clear the generic cost if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearGeneric()
return nil, nil
}

// Next, check for a non-stale, normalized memo, if a generic memo should
// not be reused.
if !reuseGeneric && baseMemo != nil && !baseMemo.IsOptimized() {
isStale, err := baseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if prep.BaseMemo != nil {
isStale, err := prep.BaseMemo.IsStale(ctx, opc.p.EvalContext(), opc.catalog)
if err != nil {
return nil, err
} else if !isStale {
return baseMemo, nil
} else {
// Clear the custom costs if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearCustom()
return prep.BaseMemo, nil
}
// Clear the custom costs if the memo is stale. DDL or new stats
// could drastically change the cost of generic and custom plans, so
// we should re-consider which to use.
prep.Costs.ClearCustom()
}

// A valid memo was not found.
Expand Down Expand Up @@ -705,7 +706,7 @@ func (opc *optPlanningCtx) fetchPreparedMemo(ctx context.Context) (_ *memo.Memo,

// If the statement was previously prepared, check for a reusable memo.
// First check for a valid (non-stale) memo.
validMemo, err := opc.chooseValidPreparedMemo(ctx, prep.BaseMemo, prep.GenericMemo)
validMemo, err := opc.chooseValidPreparedMemo(ctx)
if err != nil {
return nil, err
}
Expand All @@ -728,10 +729,10 @@ func (opc *optPlanningCtx) fetchPreparedMemo(ctx context.Context) (_ *memo.Memo,
if opc.allowMemoReuse {
switch typ {
case memoTypeIdealGeneric:
// If we have an "ideal" generic memo, store it as a base memo. It will
// always be used regardless of plan_cache_mode, so there is no need to
// set GenericCost.
prep.BaseMemo = newMemo
// An "ideal" generic memo will always be used regardless of
// plan_cache_mode, so there is no need to set GenericCost.
prep.GenericMemo = newMemo
prep.IdealGenericPlan = true
case memoTypeGeneric:
prep.GenericMemo = newMemo
prep.Costs.SetGeneric(newMemo.RootExpr().(memo.RelExpr).Cost())
Expand Down Expand Up @@ -787,7 +788,7 @@ func (opc *optPlanningCtx) buildExecMemo(ctx context.Context) (_ *memo.Memo, _ e
return nil, err
} else if isStale {
opc.log(ctx, "query cache hit but needed update")
cachedData.Memo, _, err = opc.buildReusableMemo(ctx, false /* buildGeneric */)
cachedData.Memo, _, err = opc.buildReusableMemo(ctx, false /* allowNonIdealGeneric */)
if err != nil {
return nil, err
}
Expand Down
17 changes: 6 additions & 11 deletions pkg/sql/prepared_stmt.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,23 +52,18 @@ type PreparedStatement struct {

// BaseMemo is the memoized data structure constructed by the cost-based
// optimizer during prepare of a SQL statement.
//
// It may be a fully-optimized memo if it contains an "ideal generic plan"
// that is guaranteed to be optimal across all executions of the prepared
// statement. Ideal generic plans are generated when the statement has no
// placeholders nor fold-able stable expressions, or when the placeholder
// fast-path is utilized.
//
// If it is not an ideal generic plan, it is an unoptimized, normalized
// memo that is used as a starting point for optimization of custom plans.
BaseMemo *memo.Memo

// GenericMemo, if present, is a fully-optimized memo that can be executed
// as-is.
// TODO(mgartner): Put all fully-optimized plans in the GenericMemo field to
// reduce confusion.
GenericMemo *memo.Memo

// IdealGenericPlan is true if GenericMemo is guaranteed to be optimal
// across all executions of the prepared statement. Ideal generic plans are
// generated when the statement has no placeholders nor fold-able stable
// expressions, or when the placeholder fast-path is utilized.
IdealGenericPlan bool

// Costs tracks the costs of previously optimized custom and generic plans.
Costs planCosts

Expand Down

0 comments on commit 9b6f0f9

Please sign in to comment.