diff --git a/.unreleased/pr_7271 b/.unreleased/pr_7271 new file mode 100644 index 00000000000..813c8a2c138 --- /dev/null +++ b/.unreleased/pr_7271 @@ -0,0 +1 @@ +Implements: #7271 Push down ORDER BY in real time continuous aggregate queries diff --git a/src/cross_module_fn.c b/src/cross_module_fn.c index 886f58aa5de..3b874ad12c9 100644 --- a/src/cross_module_fn.c +++ b/src/cross_module_fn.c @@ -261,7 +261,7 @@ ts_tsl_loaded(PG_FUNCTION_ARGS) } static void -preprocess_query_tsl_default_fn_community(Query *parse) +preprocess_query_tsl_default_fn_community(Query *parse, int *cursor_opts) { /* No op in community licensed code */ } diff --git a/src/cross_module_fn.h b/src/cross_module_fn.h index 08548d4cf7f..dc61c852a3c 100644 --- a/src/cross_module_fn.h +++ b/src/cross_module_fn.h @@ -152,7 +152,7 @@ typedef struct CrossModuleFunctions PGFunction chunk_unfreeze_chunk; PGFunction recompress_chunk_segmentwise; PGFunction get_compressed_chunk_index_for_recompression; - void (*preprocess_query_tsl)(Query *parse); + void (*preprocess_query_tsl)(Query *parse, int *cursor_opts); } CrossModuleFunctions; extern TSDLLEXPORT CrossModuleFunctions *ts_cm_functions; diff --git a/src/guc.c b/src/guc.c index 24321f2a8c5..3a75819fe72 100644 --- a/src/guc.c +++ b/src/guc.c @@ -66,6 +66,7 @@ bool ts_guc_enable_qual_propagation = true; bool ts_guc_enable_cagg_reorder_groupby = true; bool ts_guc_enable_now_constify = true; bool ts_guc_enable_foreign_key_propagation = true; +TSDLLEXPORT bool ts_guc_enable_cagg_sort_pushdown = true; TSDLLEXPORT bool ts_guc_enable_cagg_watermark_constify = true; TSDLLEXPORT int ts_guc_cagg_max_individual_materializations = 10; bool ts_guc_enable_osm_reads = true; @@ -562,6 +563,17 @@ _guc_init(void) NULL, NULL); + DefineCustomBoolVariable(MAKE_EXTOPTION("enable_cagg_sort_pushdown"), + "Enable sort pushdown for continuous aggregates", + "Enable pushdown of ORDER BY clause for continuous aggregates", + &ts_guc_enable_cagg_sort_pushdown, + true, + PGC_USERSET, + 0, + NULL, + NULL, + NULL); + DefineCustomBoolVariable(MAKE_EXTOPTION("enable_cagg_watermark_constify"), "Enable cagg watermark constify", "Enable constifying cagg watermark for real-time caggs", diff --git a/src/guc.h b/src/guc.h index 059cc5f465d..4e789ce6931 100644 --- a/src/guc.h +++ b/src/guc.h @@ -27,8 +27,9 @@ extern bool ts_guc_enable_cagg_reorder_groupby; extern TSDLLEXPORT int ts_guc_cagg_max_individual_materializations; extern bool ts_guc_enable_now_constify; extern bool ts_guc_enable_foreign_key_propagation; -extern TSDLLEXPORT bool ts_guc_enable_cagg_watermark_constify; extern bool ts_guc_enable_osm_reads; +extern TSDLLEXPORT bool ts_guc_enable_cagg_sort_pushdown; +extern TSDLLEXPORT bool ts_guc_enable_cagg_watermark_constify; extern TSDLLEXPORT bool ts_guc_enable_dml_decompression; extern TSDLLEXPORT bool ts_guc_enable_dml_decompression_tuple_filtering; extern TSDLLEXPORT bool ts_guc_enable_compressed_direct_batch_delete; diff --git a/src/planner/planner.c b/src/planner/planner.c index 29f1a7ebabc..4fb7ed08f37 100644 --- a/src/planner/planner.c +++ b/src/planner/planner.c @@ -653,7 +653,7 @@ timescaledb_planner(Query *parse, const char *query_string, int cursor_opts, preprocess_query((Node *) parse, &context); if (ts_guc_enable_optimizations) - ts_cm_functions->preprocess_query_tsl(parse); + ts_cm_functions->preprocess_query_tsl(parse, &cursor_opts); } if (prev_planner_hook != NULL) diff --git a/tsl/src/continuous_aggs/planner.c b/tsl/src/continuous_aggs/planner.c index 02f51c345c1..a1ae715bc6b 100644 --- a/tsl/src/continuous_aggs/planner.c +++ b/tsl/src/continuous_aggs/planner.c @@ -355,3 +355,101 @@ constify_cagg_watermark(Query *parse) if (context.valid_query) replace_watermark_with_const(&context); } + +/* + * Push down ORDER BY and LIMIT into subqueries of UNION for realtime + * continuous aggregates when sorting by time. + */ +void +cagg_sort_pushdown(Query *parse, int *cursor_opts) +{ + ListCell *lc; + + /* Nothing to do if we have no valid sort clause */ + if (list_length(parse->rtable) != 1 || list_length(parse->sortClause) != 1 || + !OidIsValid(linitial_node(SortGroupClause, parse->sortClause)->sortop)) + return; + + Cache *cache = ts_hypertable_cache_pin(); + + foreach (lc, parse->rtable) + { + RangeTblEntry *rte = lfirst(lc); + + /* + * Realtime cagg view will have 2 rtable entries, one for the materialized data and one for + * the not yet materialized data. + */ + if (rte->rtekind != RTE_SUBQUERY || rte->relkind != RELKIND_VIEW || + list_length(rte->subquery->rtable) != 2) + continue; + + ContinuousAgg *cagg = ts_continuous_agg_find_by_relid(rte->relid); + + /* + * This optimization only applies to realtime caggs. + */ + if (!cagg || !cagg->data.finalized || cagg->data.materialized_only) + continue; + + Hypertable *ht = ts_hypertable_cache_get_entry_by_id(cache, cagg->data.mat_hypertable_id); + Dimension const *dim = hyperspace_get_open_dimension(ht->space, 0); + + /* We should only encounter hypertables with an open dimension */ + if (!dim) + continue; + + SortGroupClause *sort = linitial_node(SortGroupClause, parse->sortClause); + TargetEntry *tle = get_sortgroupref_tle(sort->tleSortGroupRef, parse->targetList); + + /* + * We only pushdown ORDER BY when it's single column + * ORDER BY on the time column. + */ + AttrNumber time_col = dim->column_attno; + if (!IsA(tle->expr, Var) || castNode(Var, tle->expr)->varattno != time_col) + continue; + + RangeTblEntry *mat_rte = linitial_node(RangeTblEntry, rte->subquery->rtable); + RangeTblEntry *rt_rte = lsecond_node(RangeTblEntry, rte->subquery->rtable); + + mat_rte->subquery->sortClause = list_copy(parse->sortClause); + rt_rte->subquery->sortClause = list_copy(parse->sortClause); + + TargetEntry *mat_tle = list_nth(mat_rte->subquery->targetList, time_col - 1); + TargetEntry *rt_tle = list_nth(rt_rte->subquery->targetList, time_col - 1); + linitial_node(SortGroupClause, mat_rte->subquery->sortClause)->tleSortGroupRef = + mat_tle->ressortgroupref; + linitial_node(SortGroupClause, rt_rte->subquery->sortClause)->tleSortGroupRef = + rt_tle->ressortgroupref; + + SortGroupClause *cagg_group = linitial(rt_rte->subquery->groupClause); + cagg_group = list_nth(rt_rte->subquery->groupClause, rt_tle->ressortgroupref - 1); + cagg_group->sortop = sort->sortop; + cagg_group->nulls_first = sort->nulls_first; + + Oid placeholder; + int16 strategy; + get_ordering_op_properties(sort->sortop, &placeholder, &placeholder, &strategy); + + /* + * If this is DESC order and the sortop is the commutator of the cagg_group sortop, + * we can align the sortop of the cagg_group with the sortop of the sort clause, which + * will allow us to have the GroupAggregate node to produce the correct order and avoid + * having to resort. + */ + if (strategy == BTGreaterStrategyNumber) + { + rte->subquery->rtable = list_make2(rt_rte, mat_rte); + } + + /* + * We have to prevent parallelism when we do this optimization because + * the subplans of the Append have to be processed sequentially. + */ + *cursor_opts = *cursor_opts & ~CURSOR_OPT_PARALLEL_OK; + parse->sortClause = NIL; + rte->subquery->sortClause = NIL; + } + ts_cache_release(cache); +} diff --git a/tsl/src/continuous_aggs/planner.h b/tsl/src/continuous_aggs/planner.h index 58e072b079b..578c0bd6915 100644 --- a/tsl/src/continuous_aggs/planner.h +++ b/tsl/src/continuous_aggs/planner.h @@ -10,5 +10,6 @@ #include "planner/planner.h" void constify_cagg_watermark(Query *parse); +void cagg_sort_pushdown(Query *parse, int *cursor_opts); #endif diff --git a/tsl/src/planner.c b/tsl/src/planner.c index 578f876d75d..22733cef32b 100644 --- a/tsl/src/planner.c +++ b/tsl/src/planner.c @@ -211,7 +211,7 @@ tsl_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntr * Run preprocess query optimizations */ void -tsl_preprocess_query(Query *parse) +tsl_preprocess_query(Query *parse, int *cursor_opts) { Assert(parse != NULL); @@ -220,6 +220,12 @@ tsl_preprocess_query(Query *parse) { constify_cagg_watermark(parse); } + + /* Push down ORDER BY and LIMIT for realtime cagg */ + if (ts_guc_enable_cagg_sort_pushdown) + { + cagg_sort_pushdown(parse, cursor_opts); + } } /* diff --git a/tsl/src/planner.h b/tsl/src/planner.h index f88d6c45fa6..acfdb5f9073 100644 --- a/tsl/src/planner.h +++ b/tsl/src/planner.h @@ -16,5 +16,5 @@ void tsl_create_upper_paths_hook(PlannerInfo *, UpperRelationKind, RelOptInfo *, void tsl_set_rel_pathlist_query(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *); void tsl_set_rel_pathlist_dml(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *); void tsl_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte); -void tsl_preprocess_query(Query *parse); +void tsl_preprocess_query(Query *parse, int *cursor_opts); void tsl_postprocess_plan(PlannedStmt *stmt); diff --git a/tsl/test/expected/cagg_planning.out b/tsl/test/expected/cagg_planning.out new file mode 100644 index 00000000000..b1efd54d0b8 --- /dev/null +++ b/tsl/test/expected/cagg_planning.out @@ -0,0 +1,383 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +\set PREFIX 'EXPLAIN (analyze, costs off, timing off, summary off)' +\set TEST_BASE_NAME cagg_planning +SELECT format('include/%s_load.sql', :'TEST_BASE_NAME') AS "TEST_LOAD_NAME", + format('include/%s_query.sql', :'TEST_BASE_NAME') AS "TEST_QUERY_NAME", + format('%s/results/%s_results_baseline.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') AS "TEST_RESULTS_BASELINE", + format('%s/results/%s_results_optimized.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') AS "TEST_RESULTS_OPTIMIZED" \gset +SELECT format('\! diff -u --label Baseline --label Optimized %s %s', :'TEST_RESULTS_BASELINE', :'TEST_RESULTS_OPTIMIZED') AS "DIFF_CMD" \gset +SET timezone TO PST8PDT; +CREATE TABLE metrics(time timestamptz, device text, metric text, value float); +SELECT create_hypertable('metrics', 'time'); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------- + (1,public,metrics,t) +(1 row) + +-- insert initial data to be in materialized part of cagg +INSERT INTO metrics SELECT '2020-01-01'::timestamptz + format('%s day', i::text)::interval, 'device 1', 'metric 1', i FROM generate_series(0, 9, 1) g(i); +-- cagg with grouping only by time column +CREATE MATERIALIZED VIEW cagg1 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1; +NOTICE: refreshing continuous aggregate "cagg1" +CREATE MATERIALIZED VIEW cagg1_ordered_asc WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1 ORDER BY 1; +NOTICE: refreshing continuous aggregate "cagg1_ordered_asc" +CREATE MATERIALIZED VIEW cagg1_ordered_desc WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1 ORDER BY 1 DESC; +NOTICE: refreshing continuous aggregate "cagg1_ordered_desc" +-- cagg with grouping by device and time column +CREATE MATERIALIZED VIEW cagg2 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT device, time_bucket('3 day', time), avg(value) FROM metrics GROUP BY device, 2; +NOTICE: refreshing continuous aggregate "cagg2" +-- cagg with first/last +CREATE MATERIALIZED VIEW cagg3 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), first(value, time), last(value, time), array_agg(value ORDER BY value) AS array_asc, array_agg(value ORDER BY value DESC) AS array_desc FROM metrics GROUP BY device, 1; +NOTICE: refreshing continuous aggregate "cagg3" +-- insert more data to be in real-time part of cagg +INSERT INTO metrics SELECT '2020-01-01'::timestamptz + format('%s day', i::text)::interval, 'device 1', 'metric 1', i FROM generate_series(10, 16, 1) g(i); +\ir :TEST_QUERY_NAME +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +SHOW timescaledb.enable_cagg_sort_pushdown; + timescaledb.enable_cagg_sort_pushdown +--------------------------------------- + on +(1 row) + +:PREFIX SELECT 'query 01' AS label, * FROM cagg1 ORDER BY time_bucket; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_2 (actual rows=4 loops=1) + Order: _materialized_hypertable_2.time_bucket + -> Index Scan Backward using _hyper_2_4_chunk__materialized_hypertable_2_time_bucket_idx on _hyper_2_4_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_2_5_chunk__materialized_hypertable_2_time_bucket_idx on _hyper_2_5_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan Backward using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan Backward using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 02' AS label, * FROM cagg1 ORDER BY time_bucket DESC; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") DESC + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_2 (actual rows=4 loops=1) + Order: _materialized_hypertable_2.time_bucket DESC + -> Index Scan using _hyper_2_5_chunk__materialized_hypertable_2_time_bucket_idx on _hyper_2_5_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_time_bucket_idx on _hyper_2_4_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 03' AS label, * FROM cagg1_ordered_asc ORDER BY time_bucket; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_3 (actual rows=4 loops=1) + Order: _materialized_hypertable_3.time_bucket + -> Index Scan Backward using _hyper_3_6_chunk__materialized_hypertable_3_time_bucket_idx on _hyper_3_6_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_3_7_chunk__materialized_hypertable_3_time_bucket_idx on _hyper_3_7_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan Backward using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan Backward using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 04' AS label, * FROM cagg1_ordered_asc ORDER BY time_bucket DESC; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") DESC + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_3 (actual rows=4 loops=1) + Order: _materialized_hypertable_3.time_bucket DESC + -> Index Scan using _hyper_3_7_chunk__materialized_hypertable_3_time_bucket_idx on _hyper_3_7_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_time_bucket_idx on _hyper_3_6_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 05' AS label, * FROM cagg1_ordered_desc ORDER BY time_bucket; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_4 (actual rows=4 loops=1) + Order: _materialized_hypertable_4.time_bucket + -> Index Scan Backward using _hyper_4_9_chunk__materialized_hypertable_4_time_bucket_idx on _hyper_4_9_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_4_8_chunk__materialized_hypertable_4_time_bucket_idx on _hyper_4_8_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan Backward using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan Backward using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 06' AS label, * FROM cagg1_ordered_desc ORDER BY time_bucket DESC; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, metrics."time")) + -> Custom Scan (ChunkAppend) on metrics (actual rows=3 loops=1) + Order: time_bucket('@ 3 days'::interval, metrics."time") DESC + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial GroupAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_4 (actual rows=4 loops=1) + Order: _materialized_hypertable_4.time_bucket DESC + -> Index Scan using _hyper_4_8_chunk__materialized_hypertable_4_time_bucket_idx on _hyper_4_8_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_4_9_chunk__materialized_hypertable_4_time_bucket_idx on _hyper_4_9_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 07' AS label, * FROM cagg2 ORDER BY time_bucket; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_5 (actual rows=4 loops=1) + Order: _materialized_hypertable_5.time_bucket + -> Index Scan Backward using _hyper_5_10_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_10_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_5_11_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_11_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Sort (actual rows=2 loops=1) + Sort Key: (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")) + Sort Method: quicksort + -> Finalize HashAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")), _hyper_1_3_chunk.device + Batches: 1 + -> Append (actual rows=3 loops=1) + -> Partial HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time"), _hyper_1_3_chunk.device + Batches: 1 + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time"), _hyper_1_14_chunk.device + Batches: 1 + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(25 rows) + +:PREFIX SELECT 'query 08' AS label, * FROM cagg2 ORDER BY time_bucket DESC; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Sort (actual rows=2 loops=1) + Sort Key: (time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time")) DESC + Sort Method: quicksort + -> Finalize HashAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time")), _hyper_1_14_chunk.device + Batches: 1 + -> Append (actual rows=3 loops=1) + -> Partial HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time"), _hyper_1_14_chunk.device + Batches: 1 + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time"), _hyper_1_3_chunk.device + Batches: 1 + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_5 (actual rows=4 loops=1) + Order: _materialized_hypertable_5.time_bucket DESC + -> Index Scan using _hyper_5_11_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_11_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_5_10_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_10_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(25 rows) + +:PREFIX SELECT 'query 07' AS label, * FROM cagg3 ORDER BY time_bucket; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_6 (actual rows=4 loops=1) + Order: _materialized_hypertable_6.time_bucket + -> Index Scan Backward using _hyper_6_12_chunk__materialized_hypertable_6_time_bucket_idx on _hyper_6_12_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_6_13_chunk__materialized_hypertable_6_time_bucket_idx on _hyper_6_13_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Subquery Scan on "*SELECT* 2" (actual rows=2 loops=1) + -> GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")), _hyper_1_3_chunk.device + -> Sort (actual rows=6 loops=1) + Sort Key: (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")), _hyper_1_3_chunk.device, _hyper_1_3_chunk.value + Sort Method: quicksort + -> Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +:PREFIX SELECT 'query 08' AS label, * FROM cagg3 ORDER BY time_bucket DESC; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Subquery Scan on "*SELECT* 2" (actual rows=2 loops=1) + -> GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time")), _hyper_1_14_chunk.device + -> Sort (actual rows=6 loops=1) + Sort Key: (time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time")) DESC, _hyper_1_14_chunk.device, _hyper_1_14_chunk.value + Sort Method: quicksort + -> Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_6 (actual rows=4 loops=1) + Order: _materialized_hypertable_6.time_bucket DESC + -> Index Scan using _hyper_6_13_chunk__materialized_hypertable_6_time_bucket_idx on _hyper_6_13_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_6_12_chunk__materialized_hypertable_6_time_bucket_idx on _hyper_6_12_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(20 rows) + +-- not optimized atm +:PREFIX SELECT 'query 101' AS label, * FROM cagg2 ORDER BY time_bucket::date; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=6 loops=1) + Sort Key: ((_hyper_5_10_chunk.time_bucket)::date) + Sort Method: quicksort + -> Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_5_10_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_10_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_5_11_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_11_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Finalize HashAggregate (actual rows=2 loops=1) + Group Key: _hyper_1_3_chunk.device, (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")) + Batches: 1 + -> Append (actual rows=3 loops=1) + -> Partial HashAggregate (actual rows=2 loops=1) + Group Key: _hyper_1_3_chunk.device, time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + Batches: 1 + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial HashAggregate (actual rows=1 loops=1) + Group Key: _hyper_1_14_chunk.device, time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + Batches: 1 + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(24 rows) + +:PREFIX SELECT 'query 102' AS label, * FROM cagg2 ORDER BY time_bucket::date DESC; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------- + Sort (actual rows=6 loops=1) + Sort Key: ((_hyper_5_10_chunk.time_bucket)::date) DESC + Sort Method: quicksort + -> Result (actual rows=6 loops=1) + -> Append (actual rows=6 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_5_10_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_10_chunk (actual rows=3 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan using _hyper_5_11_chunk__materialized_hypertable_5_time_bucket_idx on _hyper_5_11_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Finalize HashAggregate (actual rows=2 loops=1) + Group Key: _hyper_1_3_chunk.device, (time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time")) + Batches: 1 + -> Append (actual rows=3 loops=1) + -> Partial HashAggregate (actual rows=2 loops=1) + Group Key: _hyper_1_3_chunk.device, time_bucket('@ 3 days'::interval, _hyper_1_3_chunk."time") + Batches: 1 + -> Index Scan using _hyper_1_3_chunk_metrics_time_idx on _hyper_1_3_chunk (actual rows=4 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) + -> Partial HashAggregate (actual rows=1 loops=1) + Group Key: _hyper_1_14_chunk.device, time_bucket('@ 3 days'::interval, _hyper_1_14_chunk."time") + Batches: 1 + -> Index Scan using _hyper_1_14_chunk_metrics_time_idx on _hyper_1_14_chunk (actual rows=2 loops=1) + Index Cond: ("time" >= 'Sat Jan 11 16:00:00 2020 PST'::timestamp with time zone) +(24 rows) + +\set ECHO none +-- diff baseline and optimized results +:DIFF_CMD +--- Baseline ++++ Optimized +@@ -1,6 +1,6 @@ + timescaledb.enable_cagg_sort_pushdown + --------------------------------------- +- off ++ on + (1 row) + + label | time_bucket | avg diff --git a/tsl/test/expected/cagg_query.out b/tsl/test/expected/cagg_query.out index c24bc6548d9..577b529221a 100644 --- a/tsl/test/expected/cagg_query.out +++ b/tsl/test/expected/cagg_query.out @@ -241,28 +241,25 @@ select * from mat_m1 order by location, timec asc; :EXPLAIN select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Sort Key: _hyper_2_4_chunk.timec DESC - -> Append - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) - Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Sort - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Result - Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(19 rows) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Sort + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)) DESC, _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk + Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) +(16 rows) -- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- :EXPLAIN @@ -299,28 +296,25 @@ select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' :EXPLAIN select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Sort Key: _hyper_3_6_chunk.timec DESC - -> Append - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) - Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Sort - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Result - Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(19 rows) + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Append + -> GroupAggregate + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Sort + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)) DESC, _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk + Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) +(16 rows) :EXPLAIN select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; diff --git a/tsl/test/expected/cagg_query_using_merge.out b/tsl/test/expected/cagg_query_using_merge.out index ed6af859ff9..9a7a44dc7e4 100644 --- a/tsl/test/expected/cagg_query_using_merge.out +++ b/tsl/test/expected/cagg_query_using_merge.out @@ -243,28 +243,25 @@ select * from mat_m1 order by location, timec asc; :EXPLAIN select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Sort Key: _hyper_2_4_chunk.timec DESC - -> Append - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) - Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Sort - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Result - Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(19 rows) + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append + -> GroupAggregate + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Sort + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)) DESC, _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk + Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) +(16 rows) -- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- :EXPLAIN @@ -301,28 +298,25 @@ select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' :EXPLAIN select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Sort Key: _hyper_3_6_chunk.timec DESC - -> Append - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) - Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Sort - Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location - -> Result - Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(19 rows) + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + Append + -> GroupAggregate + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Sort + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)) DESC, _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk + Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) +(16 rows) :EXPLAIN select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; diff --git a/tsl/test/expected/cagg_union_view-16.out b/tsl/test/expected/cagg_union_view-16.out index a15e46bd53a..436a465c123 100644 --- a/tsl/test/expected/cagg_union_view-16.out +++ b/tsl/test/expected/cagg_union_view-16.out @@ -592,39 +592,34 @@ ORDER by 1; -- plan output :PREFIX SELECT * FROM mat_m1 ORDER BY 1; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------ - Sort (actual rows=3 loops=1) - Sort Key: _hyper_9_15_chunk.time_bucket - Sort Method: quicksort - -> Append (actual rows=3 loops=1) - -> Index Scan using _hyper_9_15_chunk__materialized_hypertable_9_time_bucket_idx on _hyper_9_15_chunk (actual rows=1 loops=1) - Index Cond: (time_bucket < 25) - -> Finalize GroupAggregate (actual rows=2 loops=1) - Group Key: (time_bucket(5, _hyper_7_11_chunk.a)) - Filter: ((sum(_hyper_7_11_chunk.c) > 50) AND ((avg(_hyper_7_11_chunk.b))::integer > 12)) - Rows Removed by Filter: 1 - -> Sort (actual rows=3 loops=1) - Sort Key: (time_bucket(5, _hyper_7_11_chunk.a)) - Sort Method: quicksort - -> Append (actual rows=3 loops=1) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_11_chunk.a) - -> Index Scan Backward using _hyper_7_11_chunk_ht_intdata_a_idx on _hyper_7_11_chunk (actual rows=2 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_13_chunk.a) - -> Index Scan Backward using _hyper_7_13_chunk_ht_intdata_a_idx on _hyper_7_13_chunk (actual rows=3 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_14_chunk.a) - -> Index Scan Backward using _hyper_7_14_chunk_ht_intdata_a_idx on _hyper_7_14_chunk (actual rows=1 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - Rows Removed by Filter: 2 -(30 rows) + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan Backward using _hyper_9_15_chunk__materialized_hypertable_9_time_bucket_idx on _hyper_9_15_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 25) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket(5, ht_intdata.a)) + Filter: ((sum(ht_intdata.c) > 50) AND ((avg(ht_intdata.b))::integer > 12)) + Rows Removed by Filter: 1 + -> Custom Scan (ChunkAppend) on ht_intdata (actual rows=3 loops=1) + Order: time_bucket(5, ht_intdata.a) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_11_chunk.a) + -> Index Scan Backward using _hyper_7_11_chunk_ht_intdata_a_idx on _hyper_7_11_chunk (actual rows=2 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_13_chunk.a) + -> Index Scan Backward using _hyper_7_13_chunk_ht_intdata_a_idx on _hyper_7_13_chunk (actual rows=3 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_14_chunk.a) + -> Index Scan Backward using _hyper_7_14_chunk_ht_intdata_a_idx on _hyper_7_14_chunk (actual rows=1 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + Rows Removed by Filter: 2 +(25 rows) -- Test caggs with different time types CREATE TABLE smallint_table (time smallint, value int); diff --git a/tsl/test/expected/cagg_union_view-17.out b/tsl/test/expected/cagg_union_view-17.out index a15e46bd53a..436a465c123 100644 --- a/tsl/test/expected/cagg_union_view-17.out +++ b/tsl/test/expected/cagg_union_view-17.out @@ -592,39 +592,34 @@ ORDER by 1; -- plan output :PREFIX SELECT * FROM mat_m1 ORDER BY 1; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------ - Sort (actual rows=3 loops=1) - Sort Key: _hyper_9_15_chunk.time_bucket - Sort Method: quicksort - -> Append (actual rows=3 loops=1) - -> Index Scan using _hyper_9_15_chunk__materialized_hypertable_9_time_bucket_idx on _hyper_9_15_chunk (actual rows=1 loops=1) - Index Cond: (time_bucket < 25) - -> Finalize GroupAggregate (actual rows=2 loops=1) - Group Key: (time_bucket(5, _hyper_7_11_chunk.a)) - Filter: ((sum(_hyper_7_11_chunk.c) > 50) AND ((avg(_hyper_7_11_chunk.b))::integer > 12)) - Rows Removed by Filter: 1 - -> Sort (actual rows=3 loops=1) - Sort Key: (time_bucket(5, _hyper_7_11_chunk.a)) - Sort Method: quicksort - -> Append (actual rows=3 loops=1) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_11_chunk.a) - -> Index Scan Backward using _hyper_7_11_chunk_ht_intdata_a_idx on _hyper_7_11_chunk (actual rows=2 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_13_chunk.a) - -> Index Scan Backward using _hyper_7_13_chunk_ht_intdata_a_idx on _hyper_7_13_chunk (actual rows=3 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - -> Partial GroupAggregate (actual rows=1 loops=1) - Group Key: time_bucket(5, _hyper_7_14_chunk.a) - -> Index Scan Backward using _hyper_7_14_chunk_ht_intdata_a_idx on _hyper_7_14_chunk (actual rows=1 loops=1) - Index Cond: (a >= 25) - Filter: ((b < 16) AND (c > 20)) - Rows Removed by Filter: 2 -(30 rows) + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan Backward using _hyper_9_15_chunk__materialized_hypertable_9_time_bucket_idx on _hyper_9_15_chunk (actual rows=1 loops=1) + Index Cond: (time_bucket < 25) + -> Finalize GroupAggregate (actual rows=2 loops=1) + Group Key: (time_bucket(5, ht_intdata.a)) + Filter: ((sum(ht_intdata.c) > 50) AND ((avg(ht_intdata.b))::integer > 12)) + Rows Removed by Filter: 1 + -> Custom Scan (ChunkAppend) on ht_intdata (actual rows=3 loops=1) + Order: time_bucket(5, ht_intdata.a) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_11_chunk.a) + -> Index Scan Backward using _hyper_7_11_chunk_ht_intdata_a_idx on _hyper_7_11_chunk (actual rows=2 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_13_chunk.a) + -> Index Scan Backward using _hyper_7_13_chunk_ht_intdata_a_idx on _hyper_7_13_chunk (actual rows=3 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + -> Partial GroupAggregate (actual rows=1 loops=1) + Group Key: time_bucket(5, _hyper_7_14_chunk.a) + -> Index Scan Backward using _hyper_7_14_chunk_ht_intdata_a_idx on _hyper_7_14_chunk (actual rows=1 loops=1) + Index Cond: (a >= 25) + Filter: ((b < 16) AND (c > 20)) + Rows Removed by Filter: 2 +(25 rows) -- Test caggs with different time types CREATE TABLE smallint_table (time smallint, value int); diff --git a/tsl/test/sql/CMakeLists.txt b/tsl/test/sql/CMakeLists.txt index 79c29c4ab94..20da9852895 100644 --- a/tsl/test/sql/CMakeLists.txt +++ b/tsl/test/sql/CMakeLists.txt @@ -10,6 +10,7 @@ set(TEST_FILES cagg_deprecated_bucket_ng.sql cagg_errors.sql cagg_invalidation.sql + cagg_planning.sql cagg_policy.sql cagg_query.sql cagg_refresh.sql diff --git a/tsl/test/sql/cagg_planning.sql b/tsl/test/sql/cagg_planning.sql new file mode 100644 index 00000000000..22c40abef90 --- /dev/null +++ b/tsl/test/sql/cagg_planning.sql @@ -0,0 +1,63 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. + +\set PREFIX 'EXPLAIN (analyze, costs off, timing off, summary off)' + +\set TEST_BASE_NAME cagg_planning +SELECT format('include/%s_load.sql', :'TEST_BASE_NAME') AS "TEST_LOAD_NAME", + format('include/%s_query.sql', :'TEST_BASE_NAME') AS "TEST_QUERY_NAME", + format('%s/results/%s_results_baseline.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') AS "TEST_RESULTS_BASELINE", + format('%s/results/%s_results_optimized.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') AS "TEST_RESULTS_OPTIMIZED" \gset + +SELECT format('\! diff -u --label Baseline --label Optimized %s %s', :'TEST_RESULTS_BASELINE', :'TEST_RESULTS_OPTIMIZED') AS "DIFF_CMD" \gset + +SET timezone TO PST8PDT; + +CREATE TABLE metrics(time timestamptz, device text, metric text, value float); +SELECT create_hypertable('metrics', 'time'); + +-- insert initial data to be in materialized part of cagg +INSERT INTO metrics SELECT '2020-01-01'::timestamptz + format('%s day', i::text)::interval, 'device 1', 'metric 1', i FROM generate_series(0, 9, 1) g(i); + +-- cagg with grouping only by time column +CREATE MATERIALIZED VIEW cagg1 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1; + +CREATE MATERIALIZED VIEW cagg1_ordered_asc WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1 ORDER BY 1; + +CREATE MATERIALIZED VIEW cagg1_ordered_desc WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), avg(value) FROM metrics GROUP BY 1 ORDER BY 1 DESC; + +-- cagg with grouping by device and time column +CREATE MATERIALIZED VIEW cagg2 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT device, time_bucket('3 day', time), avg(value) FROM metrics GROUP BY device, 2; + +-- cagg with first/last +CREATE MATERIALIZED VIEW cagg3 WITH (timescaledb.continuous,timescaledb.materialized_only=false) +AS SELECT time_bucket('3 day', time), first(value, time), last(value, time), array_agg(value ORDER BY value) AS array_asc, array_agg(value ORDER BY value DESC) AS array_desc FROM metrics GROUP BY device, 1; + +-- insert more data to be in real-time part of cagg +INSERT INTO metrics SELECT '2020-01-01'::timestamptz + format('%s day', i::text)::interval, 'device 1', 'metric 1', i FROM generate_series(10, 16, 1) g(i); + +\ir :TEST_QUERY_NAME + +\set ECHO none +\set PREFIX '' + +SET timescaledb.enable_cagg_sort_pushdown TO off; +\o :TEST_RESULTS_BASELINE +\ir :TEST_QUERY_NAME +\o + +RESET timescaledb.enable_cagg_sort_pushdown; +\o :TEST_RESULTS_OPTIMIZED +\ir :TEST_QUERY_NAME +\o + +\set ECHO all + +-- diff baseline and optimized results +:DIFF_CMD + diff --git a/tsl/test/sql/include/cagg_planning_query.sql b/tsl/test/sql/include/cagg_planning_query.sql new file mode 100644 index 00000000000..9fca01421b2 --- /dev/null +++ b/tsl/test/sql/include/cagg_planning_query.sql @@ -0,0 +1,25 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. + +SHOW timescaledb.enable_cagg_sort_pushdown; + +:PREFIX SELECT 'query 01' AS label, * FROM cagg1 ORDER BY time_bucket; +:PREFIX SELECT 'query 02' AS label, * FROM cagg1 ORDER BY time_bucket DESC; + +:PREFIX SELECT 'query 03' AS label, * FROM cagg1_ordered_asc ORDER BY time_bucket; +:PREFIX SELECT 'query 04' AS label, * FROM cagg1_ordered_asc ORDER BY time_bucket DESC; + +:PREFIX SELECT 'query 05' AS label, * FROM cagg1_ordered_desc ORDER BY time_bucket; +:PREFIX SELECT 'query 06' AS label, * FROM cagg1_ordered_desc ORDER BY time_bucket DESC; + +:PREFIX SELECT 'query 07' AS label, * FROM cagg2 ORDER BY time_bucket; +:PREFIX SELECT 'query 08' AS label, * FROM cagg2 ORDER BY time_bucket DESC; + +:PREFIX SELECT 'query 07' AS label, * FROM cagg3 ORDER BY time_bucket; +:PREFIX SELECT 'query 08' AS label, * FROM cagg3 ORDER BY time_bucket DESC; + +-- not optimized atm +:PREFIX SELECT 'query 101' AS label, * FROM cagg2 ORDER BY time_bucket::date; +:PREFIX SELECT 'query 102' AS label, * FROM cagg2 ORDER BY time_bucket::date DESC; +