Skip to content

Commit

Permalink
[BugFix] Disable date_trunc equivalent replace if binary type is LE (#…
Browse files Browse the repository at this point in the history
…53229)

Signed-off-by: shuming.li <[email protected]>
(cherry picked from commit 3e89c4e)
  • Loading branch information
LiShuMing authored and mergify[bot] committed Nov 27, 2024
1 parent 60ae012 commit 39b0fe2
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,18 @@ public DateTruncEquivalent() {}

/**
* TODO: we can support this later.
* Change date_trunc('month', col) to col = '2023-12-01' will get a wrong result.
* MV : select date_trunc('day', col) as dt from t
* Query : select date_trunc('month, col) from t where date_trunc('month', col) = '2023-11-01'
* Change date_trunc('month', dt) to col = '2023-12-01' will get a wrong result.
* MV : select date_trunc('day', dt) as dt from t
* Query1 : select date_trunc('month, dt) from t dt = '2023-11-01'
* -- cannot be rewritten, rewrite result will be wrong
* Rewritten: select date_trunc('month, dt) from t where date_trunc('month', dt) = '2023-11-01'
*
* Query2 : select date_trunc('month, dt) from t where dt between '2023-11-01' and '2023-12-01'
* -- cannot be rewritten, dt='2023-12-01' doesn't match with date_trunc('month', dt)= '2023-11-01'
* Rewritten : select date_trunc('month, dt) from t where date_trunc('month', dt) between '2023-11-01' and '2023-12-01'
*/
private static Set<BinaryType> SUPPORTED_BINARY_TYPES = ImmutableSet.of(
BinaryType.GE,
BinaryType.LE,
BinaryType.GT,
BinaryType.LT
);
Expand Down Expand Up @@ -109,6 +114,9 @@ public ScalarOperator rewrite(RewriteEquivalentContext eqContext,
return null;
}
BinaryPredicateOperator predicate = (BinaryPredicateOperator) newInput.clone();
if (!isSupportedBinaryType(predicate.getBinaryType())) {
return null;
}
predicate.setChild(0, replace);
return predicate;
} else if (newInput instanceof CallOperator) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

package com.starrocks.planner;

import com.starrocks.utframe.UtFrameUtils;
import org.junit.BeforeClass;
import org.junit.Test;

Expand Down Expand Up @@ -239,7 +240,7 @@ public void testDateTruncPartitionColumnExpr1() throws Exception {
"PARTITION BY ds \n" +
"DISTRIBUTED BY RANDOM \n" +
"AS SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"bitmap_union(to_bitmap(`order_id`)) AS `order_num`, \n" +
"date_trunc('minute', `dt`) AS ds\n" +
"FROM `test_partition_expr_tbl1`\n" +
"group by ds;";
Expand All @@ -254,8 +255,20 @@ public void testDateTruncPartitionColumnExpr1() throws Exception {
"group by ds")
.match("test_partition_expr_mv1");
}
connectContext.getSessionVariable().setEnableMaterializedViewTimeSeriesPushDownRewrite(false);
{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('minute', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE `dt` BETWEEN '2023-04-11' AND '2023-04-12'\n" +
"group by ds")
.nonMatch("test_partition_expr_mv1");
}
connectContext.getSessionVariable().setEnableMaterializedViewTimeSeriesPushDownRewrite(true);

{
UtFrameUtils.mockLogicalScanIsEmptyOutputRows(false);
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('minute', `dt`) AS ds \n" +
Expand All @@ -264,7 +277,6 @@ public void testDateTruncPartitionColumnExpr1() throws Exception {
"group by ds")
.match("test_partition_expr_mv1");
}

starRocksAssert.dropMaterializedView("test_partition_expr_mv1");
starRocksAssert.dropTable("test_partition_expr_tbl1");
}
Expand Down Expand Up @@ -293,7 +305,6 @@ public void testDateTruncPartitionColumnExpr2() throws Exception {
"FROM `test_partition_expr_tbl1`\n" +
"group by ds;";
starRocksAssert.withMaterializedView(mv);

{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
Expand Down Expand Up @@ -328,6 +339,121 @@ public void testDateTruncPartitionColumnExpr2() throws Exception {
starRocksAssert.dropTable("test_partition_expr_tbl1");
}

@Test
public void testDateTruncPartitionColumnExpr3() throws Exception {
String tableSQL = "CREATE TABLE `test_partition_expr_tbl1` (\n" +
" `order_id` bigint(20) NOT NULL,\n" +
" `dt` datetime NOT NULL,\n" +
" `value` varchar(256) NULL \n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`order_id`, `dt`)\n" +
"PARTITION BY RANGE(`dt`)\n" +
"(\n" +
"PARTITION p2023041017 VALUES [(\"2023-04-10 17:00:00\"), (\"2023-04-10 18:00:00\")),\n" +
"PARTITION p2023041021 VALUES [(\"2023-04-10 21:00:00\"), (\"2023-04-10 22:00:00\"))\n" +
")\n" +
"DISTRIBUTED BY HASH(`order_id`)";
starRocksAssert.withTable(tableSQL);
String mv = "CREATE MATERIALIZED VIEW `test_partition_expr_mv1`\n" +
"PARTITION BY ds \n" +
"DISTRIBUTED BY RANDOM \n" +
"AS SELECT \n" +
"bitmap_union(to_bitmap(`order_id`)) AS `order_num`, \n" +
"date_trunc('hour', `dt`) AS ds\n" +
"FROM `test_partition_expr_tbl1`\n" +
"group by ds;";
starRocksAssert.withMaterializedView(mv);

{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE date_trunc('day', `dt`) = '2023-04-01'\n" +
"group by ds")
.nonMatch("test_partition_expr_mv1");
}
UtFrameUtils.mockLogicalScanIsEmptyOutputRows(false);
{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE date_trunc('day', `dt`) BETWEEN '2023-04-01' AND '2023-05-01'\n" +
"group by ds")
.match("test_partition_expr_mv1");
}

{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE `dt` BETWEEN '2023-04-11' AND '2023-04-12'\n" +
"group by ds")
.match("test_partition_expr_mv1");
}

starRocksAssert.dropMaterializedView("test_partition_expr_mv1");
starRocksAssert.dropTable("test_partition_expr_tbl1");
}

@Test
public void testDateTruncPartitionColumnExpr4() throws Exception {
String tableSQL = "CREATE TABLE `test_partition_expr_tbl1` (\n" +
" `order_id` bigint(20) NOT NULL,\n" +
" `dt` datetime NOT NULL,\n" +
" `value` varchar(256) NULL \n" +
") ENGINE=OLAP\n" +
"DUPLICATE KEY(`order_id`, `dt`)\n" +
"PARTITION BY date_trunc('hour', `dt`)\n" + // with date_trunc partition expression
"DISTRIBUTED BY HASH(`order_id`)";
starRocksAssert.withTable(tableSQL);
String mv = "CREATE MATERIALIZED VIEW `test_partition_expr_mv1`\n" +
"PARTITION BY ds \n" +
"DISTRIBUTED BY RANDOM \n" +
"AS SELECT \n" +
"bitmap_union(to_bitmap(`order_id`)) AS `order_num`, \n" +
"date_trunc('hour', `dt`) AS ds\n" +
"FROM `test_partition_expr_tbl1`\n" +
"group by ds;";
starRocksAssert.withMaterializedView(mv);

{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE date_trunc('day', `dt`) = '2023-04-01'\n" +
"group by ds")
.nonMatch("test_partition_expr_mv1");
}

UtFrameUtils.mockLogicalScanIsEmptyOutputRows(false);
{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE date_trunc('day', `dt`) BETWEEN '2023-04-01' AND '2023-05-01'\n" +
"group by ds")
.match("test_partition_expr_mv1");
}

{
sql("SELECT \n" +
"count(DISTINCT `order_id`) AS `order_num`, \n" +
"date_trunc('day', `dt`) AS ds \n" +
"FROM `test_partition_expr_tbl1`\n" +
"WHERE `dt` BETWEEN '2023-04-11' AND '2023-04-12'\n" +
"group by ds")
.match("test_partition_expr_mv1");
}

starRocksAssert.dropMaterializedView("test_partition_expr_mv1");
starRocksAssert.dropTable("test_partition_expr_tbl1");
}

@Test
public void testMvRewriteForColumnReorder() throws Exception {
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -788,7 +788,7 @@ public void testPartialPartitionRewriteWithDateTruncExpr1() throws Exception {

{
String query = "select date_trunc('minute', `k1`) AS ds, sum(v1) " +
" FROM base_tbl1 where `k1` = '2020-02-11' group by ds";
" FROM base_tbl1 where date_trunc('minute', `k1`) = '2020-02-11' group by ds";
String plan = getFragmentPlan(query);
PlanTestBase.assertContains(plan, "test_mv1", "ds = '2020-02-11 00:00:00'");
}
Expand Down Expand Up @@ -872,8 +872,8 @@ public void testPartialPartitionRewriteWithDateTruncExpr2() throws Exception {

{
String query = "select date_trunc('minute', `k1`) AS ds, sum(v1) " +
" FROM base_tbl1 where `k1` = '2020-02-11' group by ds";
String plan = getFragmentPlan(query);
" FROM base_tbl1 where date_trunc('minute', `k1`) = '2020-02-11' group by ds";
String plan = getFragmentPlan(query, "MV");
PlanTestBase.assertContains(plan, "test_mv1", "ds = '2020-02-11 00:00:00'");
}

Expand Down

0 comments on commit 39b0fe2

Please sign in to comment.