Skip to content

Commit

Permalink
Add CURRENT_TIMESTAMP, CURRENT_DATE function support (#41)
Browse files Browse the repository at this point in the history
Co-authored-by: Kai Xu <[email protected]>
  • Loading branch information
kxu1026 and Kai Xu authored Feb 1, 2021
1 parent 29cc116 commit 1b5be21
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 274 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
package com.linkedin.coral.hive.hive2rel;

import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
import org.apache.calcite.sql.validate.SqlDelegatingConformance;


Expand All @@ -14,7 +15,7 @@ public class HiveSqlConformance extends SqlDelegatingConformance {
public static SqlConformance HIVE_SQL = new HiveSqlConformance();

private HiveSqlConformance() {
super(PRAGMATIC_2003);
super(SqlConformanceEnum.PRAGMATIC_2003);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ protected R visit(ASTNode node, C ctx) {
case HiveParser.TOK_TABNAME:
return visitTabnameNode(node, ctx);

case HiveParser.KW_CURRENT_DATE:
case HiveParser.KW_CURRENT_TIMESTAMP:
case HiveParser.Identifier:
return visitIdentifier(node, ctx);

Expand All @@ -83,9 +85,6 @@ protected R visit(ASTNode node, C ctx) {
case HiveParser.TOK_INSERT:
return visitInsert(node, ctx);

case HiveParser.TOK_DESTINATION:
return null;

case HiveParser.TOK_SELECTDI:
return visitSelectDistinct(node, ctx);

Expand Down Expand Up @@ -173,8 +172,10 @@ protected R visit(ASTNode node, C ctx) {
case HiveParser.TOK_TABLE_OR_COL:
return visitTableTokOrCol(node, ctx);

case HiveParser.TOK_DESTINATION:
case HiveParser.EOF:
return null;

// add function names here
case HiveParser.TOK_ISNOTNULL:
case HiveParser.TOK_ISNULL:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2017-2020 LinkedIn Corporation. All rights reserved.
* Copyright 2017-2021 LinkedIn Corporation. All rights reserved.
* Licensed under the BSD-2 Clause license.
* See LICENSE in the project root for license information.
*/
Expand Down Expand Up @@ -99,7 +99,10 @@ public Iterator<Object[]> getConvertSql() {
"SELECT a, count(distinct b), count(distinct c) from foo group by a",

// order by
"SELECT * from foo order by a"
"SELECT * from foo order by a",

//NiladicParentheses
"SELECT current_timestamp", "SELECT current_date"

// window
// Not yet implemented
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,16 +1,85 @@
/**
* Copyright 2017-2020 LinkedIn Corporation. All rights reserved.
* Copyright 2017-2021 LinkedIn Corporation. All rights reserved.
* Licensed under the BSD-2 Clause license.
* See LICENSE in the project root for license information.
*/
package com.linkedin.coral.presto.rel2presto;

import org.apache.calcite.rel.RelNode;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;

import static org.testng.Assert.*;


public class HiveToPrestoConverterTest {

@Test
public void basicSql() {
RelToPrestoConverter relToPrestoConverter;

@BeforeTest
public void beforeClass() throws HiveException, MetaException {
TestUtils.initializeViews();
relToPrestoConverter = new RelToPrestoConverter();
}

@Test(dataProvider = "viewTestCases")
public void testViews(String database, String view, String expectedSql) {
RelNode relNode = TestUtils.convertView(database, view);
String expandedSql = relToPrestoConverter.convert(relNode);
assertTrue(expandedSql.contains(expectedSql));
}

@DataProvider(name = "viewTestCases")
public Object[][] viewTestCasesProvider() {
return new Object[][] { { "test", "fuzzy_union_view", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\")" },

{ "test", "fuzzy_union_view_with_more_than_two_tables", "SELECT \"a\", \"b\"\nFROM (SELECT *\nFROM ("
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\")\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\")" },

{ "test", "fuzzy_union_view_with_alias", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablea\")" },

{ "test", "fuzzy_union_view_single_branch_evolved", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tableb\"\nUNION ALL\n"
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tablec\")" },

{ "test", "fuzzy_union_view_double_branch_evolved_same", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tabled\"\nUNION ALL\n"
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tablee\")" },

{ "test", "fuzzy_union_view_double_branch_evolved_different", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tablef\"\nUNION ALL\n"
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tableg\")" },

{ "test", "fuzzy_union_view_more_than_two_branches_evolved", "SELECT \"a\", \"b\"\nFROM (SELECT *\nFROM ("
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tablef\"\nUNION ALL\n"
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tableg\")\nUNION ALL\n"
+ "SELECT \"a\", CAST(row(b.b1) as row(b1 varchar)) AS \"b\"\nFROM \"test\".\"tablef\")" },

{ "test", "fuzzy_union_view_map_with_struct_value_evolved", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", TRANSFORM_VALUES(b, (k, v) -> cast(row(v.b1) as row(b1 varchar))) AS \"b\"\nFROM \"test\".\"tableh\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablei\")" },

{ "test", "fuzzy_union_view_array_with_struct_value_evolved", "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", TRANSFORM(b, x -> cast(row(x.b1) as row(b1 varchar))) AS \"b\"\nFROM \"test\".\"tablej\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\nFROM \"test\".\"tablek\")" },

{ "test", "fuzzy_union_view_deeply_nested_struct_evolved", "" + "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", CAST(row(b.b1, cast(row(b.b2.b3, cast(row(b.b2.b4.b5) as row(b5 varchar))) as row(b3 varchar, b4 row(b5 varchar)))) as row(b1 varchar, b2 row(b3 varchar, b4 row(b5 varchar)))) AS \"b\"\nFROM \"test\".\"tablel\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\n" + "FROM \"test\".\"tablem\")" },

{ "test", "fuzzy_union_view_deeply_nested_complex_struct_evolved", "" + "SELECT \"a\", \"b\"\nFROM ("
+ "SELECT \"a\", CAST(row(b.b1, transform_values(b.m1, (k, v) -> cast(row(v.b1, transform(v.a1, x -> cast(row(x.b1) as row(b1 varchar)))) as row(b1 varchar, a1 array(row(b1 varchar)))))) as row(b1 varchar, m1 map(varchar, row(b1 varchar, a1 array(row(b1 varchar)))))) AS \"b\"\nFROM \"test\".\"tablen\"\nUNION ALL\n"
+ "SELECT \"a\", \"b\"\n" + "FROM \"test\".\"tableo\")" },

{ "test", "current_date_and_timestamp_view", "SELECT CURRENT_TIMESTAMP, TRIM(CAST(CURRENT_TIMESTAMP AS VARCHAR(65535))) AS \"ct\", CURRENT_DATE, CURRENT_DATE AS \"cd\", \"a\"\nFROM \"test\".\"tablea\"" } };
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -465,4 +465,19 @@ public void testCurrentUser() {
String expected = formatSql("SELECT CURRENT_USER AS \"CURRENT_USER\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")");
testConversion(sql, expected);
}

@Test
public void testCurrentTimestamp() {
String sql = "SELECT current_timestamp";
String expected =
formatSql("SELECT CURRENT_TIMESTAMP AS \"CURRENT_TIMESTAMP\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")");
testConversion(sql, expected);
}

@Test
public void testCurrentDate() {
String sql = "SELECT current_date";
String expected = formatSql("SELECT CURRENT_DATE AS \"CURRENT_DATE\"\nFROM (VALUES (0)) AS \"t\" (\"ZERO\")");
testConversion(sql, expected);
}
}
Loading

0 comments on commit 1b5be21

Please sign in to comment.