From 3744e37dcc4434999cfaacdfb4495f3644d758bf Mon Sep 17 00:00:00 2001 From: Prasad Subramanya Date: Tue, 12 Sep 2017 22:15:21 -0700 Subject: [PATCH] MD-884: Changed storage plugin name from dfs to dfs_test --- ...plate => dfs_test-storage-plugin.template} | 2 +- .../Advanced/complextype/json/complex.json | 2 +- .../5000/100000rows/parquet/widestrings.json | 2 +- .../Advanced/json/json_storage/drill1816.q | 2 +- .../json/json_storage/jsonGenericGroup.json | 2 +- .../data/metadata_caching.json | 2 +- .../partition_pruning/data/q1.q | 2 +- .../partition_pruning/data/q2.q | 2 +- .../partition_pruning/data/q3.q | 2 +- .../partition_pruning/data/q4.q | 2 +- .../plan/metadata_caching.json | 2 +- .../partition_pruning/plan/q1.q | 2 +- .../partition_pruning/plan/q2.q | 2 +- .../partition_pruning/plan/q3.q | 2 +- .../partition_pruning/plan/q4.q | 2 +- .../resources/Advanced/mondrian/mondrian.json | 2 +- .../tpcds_sf1_hive0dot13.json | 2 +- .../hive1_parquet/tpcds_sf1_hive1.json | 2 +- .../tpcds_sf1_hive1dot2.json | 2 +- .../hive2dot1_parquet_withdate/tpcds.json | 2 +- .../tpcds.json | 2 +- .../original/json/tpcds_json_sf1.json | 2 +- .../not_supported/tpcds_parquet_sf1.json | 2 +- .../original/parquet/tpcds_parquet_sf1.json | 2 +- .../tpcds_parquet_sf1.json_createTables | 2 +- .../original/text/tpcds_text_sf1.json | 2 +- .../original/tpcds_parquet_sf100.json | 2 +- .../sanity/tpcds_parquet_sf100.json | 2 +- .../variants/tpcds_parquet_sf100.json | 2 +- .../tpch_sf1/original/json/tpch_sf1_json.json | 2 +- .../original/parquet/tpch_sf1_parquet.json | 2 +- .../tpch_sf1/sanity/json/tpch_sf1_json.json | 2 +- .../sanity/parquet/tpch_sf1_parquet.json | 2 +- .../tpch_sf1/smoke/json/tpch_sf1_json.json | 2 +- .../smoke/parquet/tpch_sf1_parquet.json | 2 +- .../datetime/json/tpch_sf1_json.json | 2 +- .../datetime/parquet/tpch_sf1_parquet.json | 2 +- .../math/json/tpch_sf1_json.json | 2 +- .../math/parquet/tpch_sf1_parquet.json | 2 +- .../strings/json/tpch_sf1_json.json | 2 +- .../strings/parquet/tpch_sf1_parquet.json | 2 +- .../tpch/tpch_sf100/limit0/limit0.json | 2 +- .../parquet/tpch_sf100_parquet.json | 2 +- .../Datasources/cross-sources/drill.ddl | 2 +- .../Datasources/ctas/create_tables.sh | 4 +- .../ctas/create_tables_complex_parquet.ddl | 2 +- .../ctas/create_tables_complex_parquet.sh | 4 +- .../ctas/create_tables_parquet.ddl | 28 +++---- .../ctas_existing_partition_pruning.ddl | 10 +-- .../ctas_existing_partition_pruning.sh | 4 +- .../ctas_tpch_multiple_partitions.sh | 4 +- .../ctas_tpch_single_partition1.sh | 4 +- .../Datasources/ctas_auto_partition/setup.sh | 4 +- .../Datasources/droptable/droptable1.ddl | 2 +- .../Datasources/droptable/droptable11.q | 2 +- .../Datasources/droptable/droptable32.ddl | 2 +- .../Datasources/droptable/droptableif1.ddl | 2 +- .../Datasources/droptable/droptableif11.q | 2 +- .../Datasources/droptable/droptableif32.ddl | 2 +- .../Datasources/droptable/droptablesetup.sh | 16 ++-- .../hive_storage/change_metadata.sh | 4 +- .../hive_native/drill_create_parquet.ddl | 2 +- .../impersonation/dfs/noaccessnestedviews.sql | 2 +- .../Datasources/join/crt_tbl_prtntd_tbl.sh | 12 +-- .../join/crt_tbls_partition_by_l.ddl | 22 +++--- .../join/crt_tbls_partition_by_r.ddl | 22 +++--- .../aggregation/bugs/create_bugs_view.sh | 4 +- .../aggregation/bugs/create_bugs_view.sql | 2 +- .../create_count_distinct_views.sh | 4 +- .../create_count_distinct_views.sql | 10 +-- .../group_by_case/create_groupByCase_views.sh | 4 +- .../create_groupByCase_views.sql | 4 +- .../multicolumn/create_multicolumn_views.sh | 4 +- .../multicolumn/create_multicolumn_views.sql | 2 +- .../aggregation/sanity/create_sanity_views.sh | 4 +- .../sanity/create_sanity_views.sql | 6 +- .../aggregation/scalar/create_scalar_views.sh | 4 +- .../scalar/create_scalar_views.sql | 2 +- .../parquet/create_tpcdsVariants_views.sh | 4 +- .../parquet/create_tpcdsVariants_views.sql | 12 +-- .../limit0/filters/create_filters_views.sh | 4 +- .../limit0/filters/create_filters_views.sql | 6 +- .../create_implicit_cast_views.sh | 4 +- .../create_implicit_cast_views.sql | 4 +- .../p1tests/create_p1tests_csv_views.sh | 4 +- .../p1tests/create_p1tests_csv_views.sql | 4 +- .../p1tests/create_p1tests_parquet_views.sh | 4 +- .../p1tests/create_p1tests_parquet_views.sql | 4 +- .../limit0/union/create_union_views.sh | 4 +- .../limit0/union/create_union_views.sql | 4 +- .../union_all/create_union_all_views.sh | 4 +- .../union_all/create_union_all_views.sql | 10 +-- .../aggregates/create_wf_aggregates_views.sh | 4 +- .../aggregates/create_wf_aggregates_views.sql | 8 +- .../bugs/create_wf_bugs_views.sh | 4 +- .../bugs/create_wf_bugs_views.sql | 12 +-- .../create_wf_empty_over_clause_views.sh | 4 +- .../create_wf_empty_over_clause_views.sql | 4 +- .../create_wf_multiple_partitions_views.sh | 4 +- .../create_wf_multiple_partitions_views.sql | 14 ++-- .../metadata_caching/addremove_files.sh | 4 +- .../metadata_caching/advanced_gen.ddl | 2 +- .../metadata_caching/advanced_gen.sh | 4 +- .../metadata_caching/ctas_auto.ddl | 2 +- .../metadata_caching/delete_toplevel_cache.sh | 4 +- .../metadata_caching/gen_partitioned_data.ddl | 6 +- .../Datasources/metadata_caching/gendata.ddl | 16 ++-- .../Datasources/metadata_caching/gendata.sh | 2 +- .../refresh_metadata_addremovefiles.ddl | 44 +++++------ .../refresh_metadata_deletecache.ddl | 2 +- .../refresh_metadata_multilevel.ddl | 20 ++--- .../refresh_metadata_multilevel.sh | 4 +- .../metadata_caching/refresh_metadata_tpch.sh | 4 +- .../resources/Datasources/parquet_date/gen.sh | 4 +- .../parquet_date/metadata_cache.ddl | 74 +++++++++---------- .../Datasources/subqueries/create_tables.sh | 4 +- .../Datasources/subqueries/create_views.sh | 4 +- .../Datasources/tpcds/createTablesJson.sh | 4 +- .../Datasources/tpcds/createTablesJson.sql | 50 ++++++------- .../Datasources/tpcds/createTablesParquet.sh | 4 +- .../Datasources/tpcds/createTablesParquet.sql | 50 ++++++------- .../Datasources/tpcds/createViewsJson.sh | 4 +- .../Datasources/tpcds/createViewsJson.sql | 50 ++++++------- .../Datasources/tpcds/createViewsParquet.sh | 4 +- .../Datasources/tpcds/createViewsParquet.sql | 50 ++++++------- .../tpcds/createViewsParquet_sf100.sh | 4 +- .../tpcds/createViewsParquet_sf100.sql | 50 ++++++------- .../Datasources/tpcds/createViewsText.sh | 4 +- .../Datasources/tpcds/createViewsText.sql | 50 ++++++------- .../tpcds/refresh_metadata_tpcds.sh | 4 +- .../aggregates/aggregate/aggregate.json | 2 +- .../aggregates/aggregation/bugs/bugs.json | 2 +- .../count_distinct/count_distinct.json | 2 +- .../aggregation/group_by_case/case.json | 2 +- .../group_by_expression.json | 2 +- .../aggregation/multicolumn/multicolumn.json | 2 +- .../aggregates/aggregation/sanity/sanity.json | 2 +- .../aggregates/aggregation/scalar/scalar.json | 2 +- .../tpcds_variants/parquet/aggregate.json | 2 +- .../tpcds_variants/text/aggregate.json | 2 +- .../resources/Functional/amplab/amplab.json | 2 +- .../Functional/case_expr/casexpr.json | 2 +- .../Functional/complex/json/complex.json | 2 +- .../Functional/complex/json/drill-2879.json | 2 +- .../Functional/complex/json/drill-3537a.json | 2 +- .../Functional/complex/json/drill-3537b.json | 2 +- .../Functional/complex/json/drill-4180.json | 2 +- .../Functional/complex/json/drill-4479a.json | 2 +- .../Functional/complex/json/drill-4479b.json | 2 +- .../Functional/complex/json/drill-4664.json | 2 +- .../Functional/complex/parquet/complex.json | 2 +- .../resources/Functional/convert/convert.json | 2 +- .../cross-sources/cross-sources.json | 2 +- .../merge-join/mj_nullable-json-hive-join.q | 2 +- .../merge-join/mj_nullable-json-text-join.q | 4 +- .../mj_nullable-parquet-hive-join.q | 2 +- .../mj_nullable-parquet-hive-leftjoin.q | 2 +- .../mj_nullable-parquet-json-join.q | 4 +- .../mj_nullable-parquet-text-join.q | 4 +- .../mj_nullable-parquet-text-view-join.q | 10 +-- .../merge-join/mj_parquet-hive-join.q | 2 +- .../merge-join/mj_parquet-json-join.q | 4 +- .../merge-join/mj_parquet-text-join.q | 4 +- .../cross-sources/nullable-json-hive-join.q | 2 +- .../nullable-json-hive-unionall.q | 2 +- .../nullable-json-hivehbase-join.q | 2 +- .../cross-sources/nullable-json-text-join.q | 4 +- .../nullable-json-text-unionall.q | 4 +- .../nullable-parquet-hive-fulljoin.q | 2 +- .../nullable-parquet-hive-join.q | 2 +- .../nullable-parquet-hive-leftjoin.q | 2 +- .../nullable-parquet-hive-unionall.q | 2 +- .../nullable-parquet-json-join.q | 4 +- .../nullable-parquet-json-unionall.q | 4 +- .../nullable-parquet-text-join.q | 4 +- .../nullable-parquet-text-unionall.q | 4 +- .../nullable-parquet-text-view-join.q | 10 +-- .../parquet-hive-fulljoin_DRILL-2707.q | 2 +- .../cross-sources/parquet-hive-join.q | 2 +- .../cross-sources/parquet-hive-unionall.q | 2 +- .../parquet-json-fulljoin_DRILL-2707.q | 4 +- .../cross-sources/parquet-json-join.q | 4 +- .../cross-sources/parquet-json-unionall.q | 4 +- .../cross-sources/parquet-text-join.q | 4 +- .../cross-sources/parquet-text-unionall.q | 4 +- .../Functional/cross-sources/q_DRILL-2606.q | 2 +- .../ctas/ctas_auto_partition/copy.sh | 2 +- .../csv/data/ctas_auto_partition.json | 2 +- .../data/ctas_auto_partition.json | 2 +- .../plan/ctas_auto_partition.json | 2 +- .../json/data/ctas_auto_partition.json | 2 +- .../parquet/data/ctas_auto_partition.json | 2 +- .../parquet/plan/ctas_auto_partition.json | 2 +- .../general/data/ctas_auto_partition.json | 2 +- .../general/data/drill3947_1.q | 2 +- .../general/data/drill3947_2.q | 2 +- .../general/data/drill3947_3.q | 2 +- .../general/data/drill3947_4.q | 2 +- .../general/data/drill3947_5.q | 2 +- .../general/data/drill3947_6.q | 2 +- .../general/data/drill3947_7.q | 2 +- .../general/data/drill3947_8.q | 2 +- .../general/data/drill3947_9.q | 2 +- .../ctas_auto_partition/general/drill3965_1.q | 2 +- .../ctas_auto_partition/general/drill3965_2.q | 2 +- .../ctas_auto_partition/general/drill3965_3.q | 2 +- .../general/plan/ctas_auto_partition.json | 2 +- .../general/plan/drill3947_1.q | 2 +- .../general/plan/drill3947_2.q | 2 +- .../general/plan/drill3947_3.q | 2 +- .../general/plan/drill3947_4.q | 2 +- .../general/plan/drill3947_5.q | 2 +- .../general/plan/drill3947_6.q | 2 +- .../general/plan/drill3947_7.q | 2 +- .../general/plan/drill3947_8.q | 2 +- .../general/plan/drill3947_9.q | 2 +- .../data/ctas_auto_partition.json | 2 +- .../plan/ctas_auto_partition.json | 2 +- .../tpch0.01_single_partition/tpch.json | 2 +- .../tpch0.01_single_partition1/tpch.json | 2 +- .../ctas_flatten/100000rows/ctas_flatten.json | 2 +- .../ctas/ctas_flatten/100000rows/filter0.q | 8 +- .../ctas/ctas_flatten/100000rows/filter13.q | 8 +- .../ctas/ctas_flatten/100000rows/filter14.q | 8 +- .../ctas/ctas_flatten/100000rows/filter15.q | 8 +- .../ctas/ctas_flatten/100000rows/filter16.q | 8 +- .../ctas/ctas_flatten/100000rows/filter19.q | 8 +- .../ctas/ctas_flatten/100000rows/filter2.q | 8 +- .../ctas/ctas_flatten/100000rows/filter20.q | 8 +- .../ctas/ctas_flatten/100000rows/filter21.q | 8 +- .../ctas/ctas_flatten/100000rows/filter24.q | 8 +- .../ctas/ctas_flatten/100000rows/filter25.q | 8 +- .../ctas/ctas_flatten/100000rows/filter3.q | 8 +- .../ctas/ctas_flatten/100000rows/filter4.q | 8 +- .../ctas/ctas_flatten/100000rows/filter5.q | 8 +- .../100000rows/general0_DRILL-2161.q | 8 +- .../100000rows/hash-join/join11.q | 6 +- .../ctas/ctas_flatten/2rows/alltypes1.q | 6 +- .../ctas/ctas_flatten/2rows/alltypes2.q | 6 +- .../ctas/ctas_flatten/2rows/ctas_flatten.json | 2 +- .../ctas/ctas_flatten/2rows/filter0.q | 6 +- .../ctas/ctas_flatten/2rows/filter1.q | 6 +- .../ctas/ctas_flatten/2rows/filter11.q | 6 +- .../ctas/ctas_flatten/2rows/filter12.q | 6 +- .../ctas/ctas_flatten/2rows/filter13.q | 6 +- .../ctas/ctas_flatten/2rows/filter14.q | 6 +- .../ctas/ctas_flatten/2rows/filter15.q | 6 +- .../ctas/ctas_flatten/2rows/filter16.q | 6 +- .../ctas/ctas_flatten/2rows/filter18.q | 6 +- .../ctas/ctas_flatten/2rows/filter19.q | 6 +- .../ctas/ctas_flatten/2rows/filter2.q | 6 +- .../ctas/ctas_flatten/2rows/filter20.q | 6 +- .../ctas/ctas_flatten/2rows/filter21.q | 6 +- .../ctas/ctas_flatten/2rows/filter23.q | 6 +- .../ctas/ctas_flatten/2rows/filter24.q | 6 +- .../ctas/ctas_flatten/2rows/filter25.q | 6 +- .../ctas/ctas_flatten/2rows/filter3.q | 6 +- .../ctas/ctas_flatten/2rows/filter4.q | 6 +- .../ctas/ctas_flatten/2rows/filter5.q | 6 +- .../ctas/ctas_flatten/2rows/filter6.q | 6 +- .../ctas/ctas_flatten/2rows/filter7.q | 6 +- .../ctas/ctas_flatten/2rows/filter8.q | 6 +- .../ctas/ctas_flatten/2rows/filter9.q | 6 +- .../ctas/ctas_flatten/2rows/functions1.q | 6 +- .../ctas_flatten/2rows/hash-join/hj_filter.q | 6 +- .../ctas/ctas_flatten/2rows/hash-join/join1.q | 6 +- .../ctas_flatten/2rows/hash-join/join10.q | 6 +- .../ctas_flatten/2rows/hash-join/join11.q | 6 +- .../ctas/ctas_flatten/2rows/hash-join/join6.q | 6 +- .../ctas_flatten/2rows/nested1_DRILL-2254.q | 6 +- .../ctas/ctas_flatten/2rows/orderby0.q | 6 +- .../ctas/ctas_flatten/2rows/orderby1.q | 6 +- .../ctas_flatten/2rows/orderby15_DRILL-2254.q | 6 +- .../ctas_flatten/2rows/orderby16_DRILL-2254.q | 6 +- .../ctas_flatten/2rows/orderby22_DRILL-2254.q | 6 +- .../ctas/ctas_flatten/2rows/orderby4.q | 6 +- .../ctas/ctas_flatten/2rows/orderby5.q | 6 +- .../ctas/ctas_flatten/2rows/orderby7.q | 6 +- .../ctas/ctas_flatten/2rows/q_DRILL-2292.q | 6 +- .../ctas/ctas_flatten/2rows/withinGroupBy9.q | 6 +- .../ctas/ctas_joins_aggregates/README | 2 +- .../ctas/ctas_joins_aggregates/ctas.json | 2 +- .../ctas/ctas_joins_aggregates/drill_2293.sql | 6 +- .../resources/Functional/cttas/cttas.json | 2 +- .../resources/Functional/cttas/cttas_23.sql | 2 +- .../resources/Functional/cttas/cttas_24.sql | 4 +- .../data-shapes/more-columns/morecolumns.json | 2 +- .../5000/1000rows/json/widestrings.json | 27 +++++++ .../5000/1000rows/parquet/widestrings.json | 2 +- .../5000/1000rows/text/widestrings.json | 27 +++++++ .../wide-columns/general/widestrings.json | 2 +- .../date_trunc/dt_trunc.json.failing | 2 +- .../Functional/datetime/datetime-basic.json | 2 +- .../resources/Functional/decimal/decimal.json | 2 +- .../Functional/droptable/droptable.json | 2 +- .../Functional/droptable/droptable11.q | 2 +- .../Functional/droptable/droptable12.q | 2 +- .../droptable/droptable13.q_negative | 2 +- .../Functional/droptable/droptable14.q | 2 +- .../Functional/droptable/droptable15.q | 2 +- .../Functional/droptable/droptable25.q | 4 +- .../droptable/droptable28.q_negative | 2 +- .../Functional/droptable/droptable29.q | 2 +- .../Functional/droptable/droptable30.q | 4 +- .../Functional/droptable/droptable31.q | 2 +- .../Functional/droptable/droptable32v.e | 2 +- .../Functional/droptable/droptable4.q | 2 +- .../Functional/droptable/droptable6.q | 2 +- .../droptable/droptable7.q_negative | 2 +- .../droptable/droptable8.q_negative | 2 +- .../droptable/droptable_negative.json | 2 +- .../Functional/droptable/droptableif11.q | 2 +- .../Functional/droptable/droptableif12.q | 2 +- .../Functional/droptable/droptableif14.q | 2 +- .../Functional/droptable/droptableif15.q | 2 +- .../Functional/droptable/droptableif25.q | 4 +- .../droptable/droptableif28.q_negative | 2 +- .../Functional/droptable/droptableif29.q | 2 +- .../Functional/droptable/droptableif30.q | 4 +- .../Functional/droptable/droptableif31.q | 2 +- .../Functional/droptable/droptableif32v.e | 2 +- .../Functional/droptable/droptableif4.q | 2 +- .../Functional/droptable/droptableif6.q | 2 +- .../droptable/droptableif7.q_negative | 2 +- .../droptable/droptableif8.q_negative | 2 +- .../explicit_cast/explicit_cast.json | 2 +- .../Functional/filter/pushdown/data/data.json | 2 +- .../Functional/filter/pushdown/plan/plan.json | 2 +- .../filter/selectivity/negative/plan.json | 2 +- .../filter/selectivity/plan/plan.json | 2 +- .../100000rows/flatten_operators.json | 2 +- .../2rows/flatten_operators.json | 2 +- .../general/flatten_operators.json | 2 +- .../flatten_operators/general/jira1679.q | 2 +- .../Functional/flatten_operators/general/q3.q | 2 +- .../flatten_operators/negative/drill-2777.sql | 2 +- .../flatten_operators/negative/negative.json | 2 +- .../schema_changes/flatten_operators.json | 2 +- .../hive/hive_functions/drill_hive_chain3.q | 2 +- .../hash-join/data/hive_native.json | 2 +- .../data/nullable-parquet-hive-fulljoin.q | 2 +- .../data/nullable-parquet-hive-join.q | 2 +- .../data/nullable-parquet-hive-leftjoin.q | 2 +- .../data/nullable-parquet-hive-unionall.q | 2 +- .../data/parquet-hive-fulljoin_DRILL-2707.q | 2 +- .../hash-join/data/parquet-hive-join.q | 2 +- .../hash-join/data/parquet-hive-unionall.q | 2 +- .../hash-join/plan/hive_native.json | 2 +- .../plan/nullable-parquet-hive-fulljoin.q | 2 +- .../plan/nullable-parquet-hive-join.q | 2 +- .../plan/nullable-parquet-hive-leftjoin.q | 2 +- .../plan/nullable-parquet-hive-unionall.q | 2 +- .../plan/parquet-hive-fulljoin_DRILL-2707.q | 2 +- .../hash-join/plan/parquet-hive-join.q | 2 +- .../hash-join/plan/parquet-hive-unionall.q | 2 +- .../merge-join/data/hive_native.json | 2 +- .../data/nullable-parquet-hive-join.q | 2 +- .../data/nullable-parquet-hive-leftjoin.q | 2 +- .../data/nullable-parquet-hive-unionall.q | 2 +- .../merge-join/data/parquet-hive-join.q | 2 +- .../merge-join/data/parquet-hive-unionall.q | 2 +- .../merge-join/plan/hive_native.json | 2 +- .../plan/nullable-parquet-hive-join.q | 2 +- .../plan/nullable-parquet-hive-leftjoin.q | 2 +- .../merge-join/plan/parquet-hive-join.q | 2 +- .../operators/data/hive_native.json | 2 +- .../operators/plan/hive_native.json | 2 +- .../compressed/rc_snappy_json_join.q.fail | 2 +- .../compressed/seq_snappy_json_join.q | 2 +- .../compressed/text_bz2_json_join.q | 2 +- .../compressed/text_gz_json_join.q | 2 +- .../hive_storage/fileformats/parquet2.q.fail | 2 +- .../dfs_parquet/hivegeneratedParquet.json | 2 +- .../dfs_parquet/q7.sql.drill4337 | 2 +- .../hive_native/q7.sql.drill4337 | 2 +- .../hivegenerated_parquet/hiveplugin/q7.sql | 2 +- .../hive_functions.json | 2 +- .../Functional/identifiers/identifiers.json | 2 +- .../impersonation/dfs/groupusershowfiles.q | 4 +- .../impersonation/dfs/noaccessnestedviews.q | 2 +- .../impersonation/dfs/ownertwotables.q | 4 +- .../impersonation/dfs/secondaryuserctas.q | 4 +- .../dfs/secondaryuserctasparquet.q | 4 +- .../dfs/secondaryusernestedviews.q | 4 +- .../dfs/secondaryusernestedviewscsv.q | 4 +- .../dfs/secondaryuserreplaceview.q | 4 +- .../impersonation/dfs/secondaryuserview.q | 4 +- .../impersonation/dfs/showtabledifferentacl.e | 8 +- .../impersonation/dfs/showtabledifferentacl.q | 4 +- .../impersonation/dfs/superusernestedviews.q | 4 +- .../impersonation/dfs/superusershowfiles.q | 4 +- .../impersonation/dfs/superusershowtables.q | 4 +- .../dfs/unknownusercreatecsvtable.q | 2 +- .../dfs/unknownuserthreetables.json | 2 +- .../dfs/unknownuserthreetables.q | 2 +- .../implicit_cast/implicit_cast.json | 2 +- .../Functional/int96/int96_data.json | 2 +- .../Functional/int96/int96_plan.json | 2 +- .../Functional/int96/q11.sql_drill5389 | 2 +- .../Functional/int96/q8.sql_drill5389 | 2 +- .../data/ctas_auto_partition.json | 2 +- .../plan/ctas_auto_partition.json | 2 +- .../dfs/csv/data/partitionDirectory.json | 2 +- .../dfs/csv/plan/partitionDirectory.json | 2 +- .../dfs/hierarchical/data/1.q | 2 +- .../dfs/hierarchical/data/2.q | 2 +- .../dfs/hierarchical/data/3.q | 2 +- .../dfs/hierarchical/data/4.q | 2 +- .../dfs/hierarchical/data/6.q | 2 +- .../hierarchical/data/partitionDirectory.json | 2 +- .../dfs/hierarchical/plan/1.q | 2 +- .../dfs/hierarchical/plan/2.q | 2 +- .../dfs/hierarchical/plan/3.q | 2 +- .../dfs/hierarchical/plan/4.q | 2 +- .../hierarchical/plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../hier_intint/data/partitionDirectory.json | 2 +- .../hier_intint/plan/partitionDirectory.json | 2 +- .../joins/explicit_cast/explicit_cast.json | 2 +- .../joins/full_outer/full_outer.json | 2 +- .../joins/implicit_cast/implicit_cast.json | 2 +- .../implicit_cast.json | 2 +- .../Functional/joins/inner/inner.json | 2 +- .../resources/Functional/joins/join/join.json | 2 +- .../joins/left_outer/left_outer.json | 2 +- .../joins/nulleqjoin/nulleqjoin.json | 2 +- .../joins/on_expression/on_expression.json | 2 +- .../order_by/queries/order_by_queries.json | 2 +- .../joins/right_outer/right_outer.json | 2 +- .../joins/subqueries/subqueries.json | 2 +- .../Functional/joins/views/views.json | 2 +- .../json/extended_json/extended_json.json | 2 +- .../json/json_bracketless/json_storage.json | 2 +- .../json_kvgenflatten/flatten/drill2217_1.q | 2 +- .../json_kvgenflatten/flatten/drill2217_2.q | 2 +- .../json_kvgenflatten/flatten/drill2217_3.q | 2 +- .../flatten/flatten_drill3562.q | 2 +- .../flatten/flatten_drill3562_1.q.f | 2 +- .../json/json_kvgenflatten/kvgenFlatten.json | 2 +- .../json/json_storage/jsonGenericGroup.json | 2 +- .../kvgenflatten-nulls.json | 2 +- .../aggregates/aggregate/data/aggregate.json | 2 +- .../aggregates/aggregate/plan/aggregate.json | 2 +- .../aggregation/bugs/data/bugs.json | 2 +- .../aggregation/bugs/plan/bugs.json | 2 +- .../count_distinct/data/count_distinct.json | 2 +- .../count_distinct/plan/count_distinct.json | 2 +- .../aggregation/group_by_case/data/case.json | 2 +- .../aggregation/group_by_case/plan/case.json | 2 +- .../data/group_by_expression.json | 2 +- .../plan/group_by_expression.json | 2 +- .../multicolumn/data/multicolumn.json | 2 +- .../multicolumn/plan/multicolumn.json | 2 +- .../aggregation/sanity/data/sanity.json | 2 +- .../aggregation/sanity/plan/sanity.json | 2 +- .../aggregation/scalar/data/scalar.json | 2 +- .../aggregation/scalar/plan/scalar.json | 2 +- .../parquet/data/aggregate.json | 2 +- .../parquet/plan/aggregate.json | 2 +- .../limit0/convert/data/convert.json | 2 +- .../limit0/convert/plan/convert.json | 2 +- .../limit0/datetime/data/datetime-basic.json | 2 +- .../limit0/datetime/plan/datetime-basic.json | 2 +- .../limit0/decimal/data/decimal.json | 2 +- .../Functional/limit0/filters/data/data.json | 2 +- .../Functional/limit0/filters/plan/data.json | 2 +- .../functions/data/limit0_functions.json | 2 +- .../functions/plan/limit0_functions.json | 2 +- .../hive_functions/data/drill_hive_chain3.q | 2 +- .../hive_functions/plan/drill_hive_chain3.q | 2 +- .../hash-join/data/hive_native.json | 2 +- .../nullable-parquet-hive-fulljoin.q_MD-766 | 2 +- .../data/nullable-parquet-hive-join.q_MD-766 | 2 +- .../nullable-parquet-hive-leftjoin.q_MD-766 | 2 +- .../data/nullable-parquet-hive-unionall.q | 2 +- .../parquet-hive-fulljoin_DRILL-2707.q_MD-766 | 2 +- .../hash-join/data/parquet-hive-join.q_MD-766 | 2 +- .../hash-join/data/parquet-hive-unionall.q | 2 +- .../hash-join/plan/hive_native.json | 2 +- .../plan/nullable-parquet-hive-fulljoin.q | 2 +- .../plan/nullable-parquet-hive-join.q | 2 +- .../plan/nullable-parquet-hive-leftjoin.q | 2 +- .../nullable-parquet-hive-unionall.q.fail | 2 +- .../plan/parquet-hive-fulljoin_DRILL-2707.q | 2 +- .../hash-join/plan/parquet-hive-join.q | 2 +- .../plan/parquet-hive-unionall.q.fail | 2 +- .../merge-join/data/hive_native.json | 2 +- .../data/nullable-parquet-hive-join.q_MD-766 | 2 +- .../nullable-parquet-hive-leftjoin.q_Md-766 | 2 +- .../data/nullable-parquet-hive-unionall.q | 2 +- .../data/parquet-hive-join.q_MD-766 | 2 +- .../merge-join/data/parquet-hive-unionall.q | 2 +- .../merge-join/plan/hive_native.json | 2 +- .../plan/nullable-parquet-hive-join.q | 2 +- .../plan/nullable-parquet-hive-leftjoin.q | 2 +- .../merge-join/plan/parquet-hive-join.q | 2 +- .../operators/data/hive_native.json | 2 +- .../operators/plan/hive_native.json | 2 +- .../data/implicit_cast.json | 2 +- .../plan/implicit_cast.json | 2 +- .../limit0/p1tests/parquet/data/p1tests.json | 2 +- .../limit0/p1tests/parquet/plan/p1tests.json | 2 +- .../limit0/p1tests/text/data/p1tests.json | 2 +- .../limit0/p1tests/text/plan/p1tests.json | 2 +- .../Functional/limit0/union/data/queries.json | 2 +- .../Functional/limit0/union/plan/queries.json | 2 +- .../prq_union_all/data/prq_union_all.json | 2 +- .../prq_union_all/plan/prq_union_all.json | 2 +- .../limit0/views/data/query35.e_tsv | 2 +- .../Functional/limit0/views/data/views.json | 2 +- .../Functional/limit0/views/plan/views.json | 2 +- .../aggregates/data/queries.json | 2 +- .../aggregates/plan/queries.json | 2 +- .../window_functions/bugs/data/bugs.json | 2 +- .../window_functions/bugs/plan/bugs.json | 2 +- .../data/empty_over_clause.json | 2 +- .../plan/empty_over_clause.json | 2 +- .../data/multiple_partitions.json | 2 +- .../plan/multiple_partitions.json | 2 +- .../data/tpcds_parquet_sf1.json | 2 +- .../plan/tpcds_parquet_sf1.json | 2 +- .../metadata_caching/data/bool_partition1.q | 2 +- .../metadata_caching/data/bool_partition2.q | 2 +- .../metadata_caching/data/date_partition1.q | 2 +- .../metadata_caching/data/date_partition2.q | 2 +- .../metadata_caching/data/error1.e_tsv | 2 +- .../Functional/metadata_caching/data/error1.q | 2 +- .../metadata_caching/data/int_partition1.q | 2 +- .../mc1_addautopartitioned_files1.q_disabled | 2 +- .../mc1_addautopartitioned_files2.q_disabled | 2 +- .../metadata_caching/data/mc1_autopartition.q | 2 +- .../data/mc1_deletetoplevelcache1.q | 2 +- .../data/mc1_deletetoplevelcache2.q | 2 +- .../metadata_caching/data/mc1_dirfilter1.q | 2 +- .../metadata_caching/data/mc1_dirfilter2.q | 2 +- .../metadata_caching/data/mc1_dirfilter3.q | 2 +- .../metadata_caching/data/mc1_dirfilter4.q | 2 +- ...c1_removeautopartitioned_files1.q_disabled | 2 +- .../data/metadata_caching_small.json | 2 +- .../metadata_caching_small.json | 2 +- .../data/metadata_caching_pp.json | 2 +- .../plan/metadata_caching_pp.json | 2 +- .../plan/bool_partition1.q.fail | 2 +- .../plan/bool_partition2.q.fail | 2 +- .../metadata_caching/plan/date_partition1.q | 2 +- .../plan/date_partition2.q_nondeterministic | 2 +- .../metadata_caching/plan/drill-3892.q | 2 +- .../metadata_caching/plan/int_partition1.q | 2 +- .../metadata_caching/plan/mc1_autopartition.q | 2 +- .../plan/metadata_caching_small.json | 2 +- .../min_max_dir/drill-3474/drill-3474_1.q | 2 +- .../min_max_dir/drill-3474/drill-3474_11.q | 2 +- .../min_max_dir/drill-3474/drill-3474_12.q | 2 +- .../min_max_dir/drill-3474/drill-3474_13.q | 2 +- .../min_max_dir/drill-3474/drill-3474_15.q | 2 +- .../min_max_dir/drill-3474/drill-3474_16.q | 2 +- .../min_max_dir/drill-3474/drill-3474_18.q | 2 +- .../Functional/min_max_dir/drill-3894_1.q | 2 +- .../Functional/min_max_dir/drill-3894_10.q | 2 +- .../Functional/min_max_dir/drill-3894_11.q | 2 +- .../Functional/min_max_dir/drill-3894_12.q | 2 +- .../Functional/min_max_dir/drill-3894_13.q | 2 +- .../Functional/min_max_dir/drill-3894_14.q | 2 +- .../Functional/min_max_dir/drill-3894_15.q | 2 +- .../Functional/min_max_dir/drill-3894_16.q | 2 +- .../Functional/min_max_dir/drill-3894_17.q | 2 +- .../Functional/min_max_dir/drill-3894_18.q | 2 +- .../Functional/min_max_dir/drill-3894_19.q | 2 +- .../Functional/min_max_dir/drill-3894_2.q | 2 +- .../Functional/min_max_dir/drill-3894_20.q | 2 +- .../Functional/min_max_dir/drill-3894_21.q | 2 +- .../Functional/min_max_dir/drill-3894_22.q | 2 +- .../Functional/min_max_dir/drill-3894_23.q | 2 +- .../Functional/min_max_dir/drill-3894_24.q | 2 +- .../Functional/min_max_dir/drill-3894_25.q | 2 +- .../Functional/min_max_dir/drill-3894_26.q | 2 +- .../Functional/min_max_dir/drill-3894_27.q | 2 +- .../Functional/min_max_dir/drill-3894_28.q | 2 +- .../Functional/min_max_dir/drill-3894_29.q | 2 +- .../Functional/min_max_dir/drill-3894_3.q | 2 +- .../Functional/min_max_dir/drill-3894_30.q | 2 +- .../Functional/min_max_dir/drill-3894_31.q | 2 +- .../Functional/min_max_dir/drill-3894_32.q | 2 +- .../Functional/min_max_dir/drill-3894_33.q | 2 +- .../Functional/min_max_dir/drill-3894_34.q | 2 +- .../Functional/min_max_dir/drill-3894_35.q | 2 +- .../Functional/min_max_dir/drill-3894_36.q | 2 +- .../Functional/min_max_dir/drill-3894_37.q | 2 +- .../Functional/min_max_dir/drill-3894_4.q | 2 +- .../Functional/min_max_dir/drill-3894_5.q | 2 +- .../Functional/min_max_dir/drill-3894_6.q | 2 +- .../Functional/min_max_dir/drill-3894_7.q | 2 +- .../Functional/min_max_dir/drill-3894_8.q | 2 +- .../Functional/min_max_dir/drill-3894_9.q | 2 +- .../Functional/min_max_dir/min_max_dir.json | 2 +- framework/resources/Functional/misc/misc.json | 2 +- .../Functional/morefiles/morefiles.json | 2 +- .../resources/Functional/orderby/orderby.json | 2 +- .../resources/Functional/p1tests/p1tests.json | 2 +- .../complex_reader/parquetComplexTest.json | 2 +- .../parquet_storage/negative/negative.json | 2 +- .../parquet_autoPrtn/autoPrtnPrqNstd.json | 2 +- .../auto_partition/data/drill4996.q | 2 +- .../auto_partition/data/parquet_date.json | 2 +- .../parquet_date/auto_partition/data/q1.q | 2 +- .../parquet_date/auto_partition/data/q2.q | 2 +- .../parquet_date/auto_partition/data/q3.q | 2 +- .../parquet_date/auto_partition/data/q4.q | 2 +- .../parquet_date/auto_partition/data/q5.q | 2 +- .../parquet_date/auto_partition/data/q6.q | 2 +- .../parquet_date/auto_partition/data/q7.q | 2 +- .../auto_partition/plan/parquet_date.json | 2 +- .../parquet_date/auto_partition/plan/q1.q | 2 +- .../parquet_date/auto_partition/plan/q2.q | 2 +- .../parquet_date/auto_partition/plan/q3.q | 2 +- .../parquet_date/auto_partition/plan/q4.q | 2 +- .../auto_partition/plan/q5.q.drill4999 | 2 +- .../parquet_date/auto_partition/plan/q6.q | 2 +- .../auto_partition/plan/q7.q.drill4999 | 2 +- .../parquet_date/generic/drill5004.q | 2 +- .../parquet_date/generic/mixed1.q | 2 +- .../parquet_date/generic/parquet_date.json | 2 +- .../parquet_storage/parquet_date/generic/q1.q | 2 +- .../parquet_storage/parquet_date/generic/q2.q | 2 +- .../parquet_storage/parquet_date/generic/q3.q | 2 +- .../parquet_storage/parquet_date/generic/q4.q | 2 +- .../parquet_storage/parquet_date/generic/q5.q | 2 +- .../parquet_storage/parquet_date/generic/q6.q | 2 +- .../parquet_storage/parquet_date/generic/q7.q | 2 +- .../parquet_storage/parquet_date/generic/q8.q | 2 +- .../parquet_date/generic/spark1.q | 2 +- .../parquet_date/generic/spark2.q | 2 +- .../parquet_date/generic/spark3.q | 2 +- .../parquet_date/generic/spark4.q | 2 +- .../parquet_date/generic/spark5.q | 2 +- .../parquet_date/generic/spark6.q | 2 +- .../parquet_date/generic/spark7.q | 2 +- .../mc_parquet_date/drill1.2gen/q15.q | 2 +- .../mc_parquet_date/drill1.2gen/q16.q | 2 +- .../mc_parquet_date/drill1.2gen/q17.q | 2 +- .../mc_parquet_date/drill1.2gen/q18.q | 2 +- .../mc_parquet_date/drill1.2gen/q19.q | 2 +- .../mc_parquet_date/drill1.2gen/q20.q | 8 +- .../mc_parquet_date/drill1.2gen/q21.q | 4 +- .../mc_parquet_date/drill1.2gen/q22.q | 8 +- .../mc_parquet_date/drill1.2gen/q23.q | 2 +- .../mc_parquet_date/drill1.2gen/q24.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q15.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q16.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q17.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q18.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q19.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q20.q | 8 +- .../mc_parquet_date/drill1.6gen/mc_q21.q | 4 +- .../mc_parquet_date/drill1.6gen/mc_q22.q | 8 +- .../mc_parquet_date/drill1.6gen/mc_q23.q | 2 +- .../mc_parquet_date/drill1.6gen/mc_q24.q | 2 +- .../mc_parquet_date/generic/mixed1.q | 2 +- .../generic/mixed1_partitioned1.q.drill5002 | 2 +- .../generic/mixed1_partitioned2.q | 2 +- .../generic/mixed1_partitioned3.q | 2 +- .../generic/mixed1_partitioned4.q | 4 +- .../generic/mixed1_partitioned5.q | 2 +- .../generic/mixed1_partitioned6.q | 2 +- .../generic/mixed1_partitioned7.q | 2 +- .../generic/mixed1_partitioned8.q | 6 +- .../parquet_date/mc_parquet_date/generic/q1.q | 2 +- .../parquet_date/mc_parquet_date/generic/q2.q | 2 +- .../parquet_date/mc_parquet_date/generic/q3.q | 2 +- .../parquet_date/mc_parquet_date/generic/q4.q | 2 +- .../parquet_date/mc_parquet_date/generic/q5.q | 2 +- .../parquet_date/mc_parquet_date/generic/q6.q | 2 +- .../parquet_date/mc_parquet_date/generic/q7.q | 2 +- .../parquet_date/mc_parquet_date/generic/q8.q | 2 +- .../mc_parquet_date/generic/spark1.q | 2 +- .../mc_parquet_date/generic/spark2.q | 2 +- .../mc_parquet_date/generic/spark3.q | 2 +- .../mc_parquet_date/generic/spark4.q | 2 +- .../mc_parquet_date/generic/spark5.q | 2 +- .../mc_parquet_date/generic/spark6.q | 2 +- .../mc_parquet_date/generic/spark7.q | 2 +- .../mc_parquet_date/parquet_date.json | 2 +- .../parquet_generic/DRILL-4759.q | 2 +- .../parquet_generic/drill4048_1.q | 2 +- .../parquet_generic/drill4349.q | 2 +- .../parquet_generic/drill4764_1.q | 2 +- .../parquet_generic/drill4764_2.q | 2 +- .../parquet_generic/drill4764_3.q | 2 +- .../parquet_generic/drill4764_4.q | 2 +- .../parquet_generic/drill4764_5.q | 2 +- .../parquet_generic/drill4764_6.q | 10 +-- .../parquet_generic/parquetReadGroup.json | 2 +- .../parquetInNestedDir.json | 2 +- .../dfs/csv/data/drill4071.q | 2 +- .../dfs/csv/data/partitionDirectory.json | 2 +- .../dfs/csv/plan/drill4071.q | 2 +- .../dfs/csv/plan/partitionDirectory.json | 2 +- .../dfs/hierarchical/data/drill4071.q | 2 +- .../dfs/hierarchical/data/drill4665_0.q | 2 +- .../dfs/hierarchical/data/drill4665_1.q | 2 +- .../dfs/hierarchical/data/drill4665_2.q | 2 +- .../dfs/hierarchical/data/drill4665_3.q | 2 +- .../dfs/hierarchical/data/drill4665_4.q | 2 +- .../dfs/hierarchical/data/drill4665_5.q | 2 +- .../dfs/hierarchical/data/drill4665_6.q | 2 +- .../dfs/hierarchical/data/drill4665_7.q | 2 +- .../hierarchical/data/partitionDirectory.json | 2 +- .../dfs/hierarchical/plan/drill4071.q | 2 +- .../dfs/hierarchical/plan/drill4665_0.q | 2 +- .../dfs/hierarchical/plan/drill4665_1.q | 2 +- .../dfs/hierarchical/plan/drill4665_2.q | 2 +- .../dfs/hierarchical/plan/drill4665_3.q | 2 +- .../dfs/hierarchical/plan/drill4665_4.q | 2 +- .../dfs/hierarchical/plan/drill4665_5.q | 2 +- .../dfs/hierarchical/plan/drill4665_6.q | 2 +- .../dfs/hierarchical/plan/drill4665_7.q | 2 +- .../hierarchical/plan/partitionDirectory.json | 2 +- .../dfs/json/data/drill4250_1.q | 2 +- .../dfs/json/data/drill4250_2.q | 2 +- .../dfs/json/data/drill4250_3.q | 2 +- .../dfs/json/data/partitionDirectory.json | 2 +- .../dfs/json/plan/drill4250_1.q | 2 +- .../dfs/json/plan/drill4250_2.q | 2 +- .../dfs/json/plan/drill4250_3.q | 2 +- .../dfs/json/plan/partitionDirectory.json | 2 +- .../dfs/mixed/data/partitionDirectory.json | 2 +- .../dfs/parquet/data/count.q | 2 +- .../dfs/parquet/data/drill4825_1.q | 4 +- .../dfs/parquet/data/drill4825_10.q | 4 +- .../dfs/parquet/data/drill4825_11.q | 4 +- .../dfs/parquet/data/drill4825_2.q | 4 +- .../dfs/parquet/data/drill4825_3.q | 4 +- .../dfs/parquet/data/drill4825_4.q | 4 +- .../dfs/parquet/data/drill4825_5.q | 4 +- .../dfs/parquet/data/drill4825_6.q | 4 +- .../dfs/parquet/data/drill4825_7.q | 4 +- .../dfs/parquet/data/drill4825_8.q | 10 +-- .../dfs/parquet/data/drill4825_9.q | 4 +- .../dfs/parquet/data/partitionDirectory.json | 2 +- .../dfs/parquet/plan/drill4825_1.q | 4 +- .../dfs/parquet/plan/drill4825_10.q | 4 +- .../dfs/parquet/plan/drill4825_2.q | 4 +- .../dfs/parquet/plan/drill4825_3.q | 4 +- .../dfs/parquet/plan/drill4825_4.q | 4 +- .../dfs/parquet/plan/drill4825_5.q | 4 +- .../dfs/parquet/plan/drill4825_6.q | 4 +- .../dfs/parquet/plan/drill4825_7.q | 4 +- .../dfs/parquet/plan/drill4825_8.q | 10 +-- .../dfs/parquet/plan/drill4825_9.q | 4 +- .../dfs/parquet/plan/drill4860.q.fail | 4 +- .../dfs/parquet/plan/partitionDirectory.json | 2 +- .../hive/general/data/general.json | 2 +- .../hive/general/plan/general.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../join/data/partitionDirectory.json | 2 +- .../join/plan/partitionDirectory.json | 2 +- .../data/partitionString.json | 2 +- .../date_partition/data/partitionDate.json | 2 +- .../data/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../data/partitionInt.json | 2 +- .../plan/partitionInt.json | 2 +- .../data/partitionString.json | 2 +- .../hier_intint/data/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../int_partition/data/partitionInt.json | 2 +- .../int_partition/plan/partitionInt.json | 2 +- .../parquet/join/data/partitionDirectory.json | 2 +- .../data/partitionString.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionInt.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../hier_intint/data/partitionDirectory.json | 2 +- .../hier_intint/plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../text/int_partition/plan/partitionInt.json | 2 +- .../text/join/data/partitionDirectory.json | 2 +- .../text/join/plan/partitionDirectory.json | 2 +- .../data/partitionDirectory.json | 2 +- .../plan/partitionDirectory.json | 2 +- .../Functional/schema-changes/drill4032_1.q | 2 +- .../schema-changes/schem-changes.json | 2 +- .../json/empty_batch_json.json | 2 +- .../text/dfs/empty_batch_text_dfs.json | 2 +- .../Functional/subqueries/bugs/bugs.json | 2 +- .../exists_not_exists/exists_not_exists.json | 2 +- .../subqueries/negative/not_supported.json | 2 +- .../Functional/subqueries/not_in/not_in.json | 2 +- .../scalar_aggregate/scalar_aggregate.json | 2 +- .../subqueries/with_clause/with_clause.json | 2 +- .../table_function/positive/drill-3149_8.q | 2 +- .../positive/table_function.json | 2 +- .../text_storage/negative/textReadGroup.json | 2 +- .../text_storage/testcases/drill1831.q | 2 +- .../text_storage/testcases/textReadGroup.json | 2 +- .../tpcds/impala/json/tpcds_sf1_json.json | 2 +- .../impala/parquet/tpcds_parquet_sf1.json | 2 +- .../tpcds/impala/text/tpcds_text_sf1.json | 2 +- .../tpcds/sanity/json/tpcds_sf1_json.json | 2 +- .../sanity/parquet/tpcds_parquet_sf1.json | 2 +- .../tpcds/sanity/text/drill_4872.sql | 6 +- .../tpcds/sanity/text/tpcds_text_sf1.json | 2 +- .../tpcds/variants/json/tpcds_json_sf1.json | 2 +- .../variants/parquet/tpcds_parquet_sf1.json | 2 +- .../tpcds/variants/text/tpcds_text_sf1.json | 2 +- .../tpch/sf0dot01/original/json/tpch.json | 2 +- .../original/noextension/json/tpch.json | 2 +- .../original/noextension/parquet-root/01.q | 2 +- .../noextension/parquet-root/tpch.json | 2 +- .../original/noextension/parquet/01.q | 2 +- .../original/noextension/parquet/tpch.json | 2 +- .../noextension/parquetNull/tpch.json | 2 +- .../original/noextension/text/tpch.json | 2 +- .../tpch/sf0dot01/original/parquet/tpch.json | 2 +- .../original/text/text_decimal/1.json | 2 +- .../original/text/text_decimal/tpch.json | 2 +- .../original/text/text_double/tpch.json | 2 +- .../original/text/text_noviews/tpch.json | 2 +- .../sf0dot01/smoke/parquet/tpch-smoke.json | 2 +- framework/resources/Functional/udfs/udfs.json | 2 +- .../resources/Functional/union/queries.json | 2 +- .../union_all/negative/negative.json | 2 +- .../prq_union_all/prq_union_all.json | 2 +- .../resources/Functional/values/values.json | 2 +- .../Functional/views/drill2461.e_tsv | 2 +- .../resources/Functional/views/query35.e_tsv | 2 +- .../resources/Functional/views/views.json | 2 +- .../window_functions/aggregates/queries.json | 2 +- .../window_functions/bugs/bugs.json | 2 +- .../empty_over_clause/empty_over_clause.json | 2 +- .../window_functions/first_val/queries.json | 2 +- .../frameclause/RBCRACR/queries.json | 2 +- .../frameclause/RBUPACR/queries.json | 2 +- .../frameclause/RBUPAUF/queries.json | 2 +- .../RBUPAUF/starInSubQry.q.failing | 2 +- .../window_functions/frameclause/README.txt | 4 +- .../frameclause/defaultFrame/queries.json | 2 +- .../frameclause/multipl_wnwds/queries.json | 2 +- .../frameclause/subQueries/queries.json | 2 +- .../window_functions/lag_func/queries.json | 2 +- .../window_functions/last_val/queries.json | 2 +- .../window_functions/lead_func/queries.json | 2 +- .../window_functions/misc/misc.json | 2 +- .../multiple_partitions.json | 2 +- .../window_functions/negative/negative.json | 2 +- .../window_functions/nestedAggs/nstdagg.json | 2 +- .../window_functions/ntile_func/queries.json | 2 +- .../optimization/data/query.json | 2 +- .../optimization/plan/plan.json | 2 +- .../window_functions/order_by/order_by.json | 2 +- .../partition_by/partition_by.json | 2 +- .../statistical_aggregates.json | 2 +- .../window_functions/tpcds/query47.sql.fail | 10 +-- .../tpcds/tpcds_parquet_sf1.json | 2 +- .../tpcds_variants/tpcds_parquet_sf1.json | 2 +- .../window_functions/views/queries.json | 2 +- 867 files changed, 1496 insertions(+), 1442 deletions(-) rename conf/plugin-templates/{dfs-storage-plugin.template => dfs_test-storage-plugin.template} (99%) create mode 100644 framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/json/widestrings.json create mode 100644 framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/text/widestrings.json diff --git a/conf/plugin-templates/dfs-storage-plugin.template b/conf/plugin-templates/dfs_test-storage-plugin.template similarity index 99% rename from conf/plugin-templates/dfs-storage-plugin.template rename to conf/plugin-templates/dfs_test-storage-plugin.template index 68e6fd631..60c3227d5 100755 --- a/conf/plugin-templates/dfs-storage-plugin.template +++ b/conf/plugin-templates/dfs_test-storage-plugin.template @@ -1,5 +1,5 @@ { - "name" : "dfs", + "name" : "dfs_test", "config" : { "type" : "file", "enabled" : true, diff --git a/framework/resources/Advanced/complextype/json/complex.json b/framework/resources/Advanced/complextype/json/complex.json index 3bf811cab..81c25a846 100644 --- a/framework/resources/Advanced/complextype/json/complex.json +++ b/framework/resources/Advanced/complextype/json/complex.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirAdvancedComplexJson", + "schema": "dfs_test.drillTestDirAdvancedComplexJson", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Advanced/data-shapes/wide-columns/5000/100000rows/parquet/widestrings.json b/framework/resources/Advanced/data-shapes/wide-columns/5000/100000rows/parquet/widestrings.json index 0555ce379..2be6b651c 100644 --- a/framework/resources/Advanced/data-shapes/wide-columns/5000/100000rows/parquet/widestrings.json +++ b/framework/resources/Advanced/data-shapes/wide-columns/5000/100000rows/parquet/widestrings.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.wideStringsParquet5000Width100000rows", + "schema": "dfs_test.wideStringsParquet5000Width100000rows", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/json/json_storage/drill1816.q b/framework/resources/Advanced/json/json_storage/drill1816.q index 1bf930172..31d799762 100644 --- a/framework/resources/Advanced/json/json_storage/drill1816.q +++ b/framework/resources/Advanced/json/json_storage/drill1816.q @@ -1 +1 @@ -select data from dfs.`/drill/testdata/json_kvgenflatten/kvgen-complex-large.json`; +select data from dfs_test.`/drill/testdata/json_kvgenflatten/kvgen-complex-large.json`; diff --git a/framework/resources/Advanced/json/json_storage/jsonGenericGroup.json b/framework/resources/Advanced/json/json_storage/jsonGenericGroup.json index 448bbb05d..4b9757837 100644 --- a/framework/resources/Advanced/json/json_storage/jsonGenericGroup.json +++ b/framework/resources/Advanced/json/json_storage/jsonGenericGroup.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/data/metadata_caching.json b/framework/resources/Advanced/metadata_caching/partition_pruning/data/metadata_caching.json index 13830343c..99f53dfca 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/data/metadata_caching.json +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/data/metadata_caching.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q1.q b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q1.q index 78faa9e06..e9be55ce8 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q1.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 order by l_orderkey, l_extendedprice limit 10; +select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 order by l_orderkey, l_extendedprice limit 10; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q2.q b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q2.q index fd37ef7db..981d2a1e2 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q2.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q2.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 and l_discount=0.07 order by l_orderkey, l_extendedprice limit 10; +select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 and l_discount=0.07 order by l_orderkey, l_extendedprice limit 10; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q3.q b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q3.q index ec5026927..bf744f5b4 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q3.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q3.q @@ -1 +1 @@ -select L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, L_COMMITDATE, L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT, dir0, dir1, dir2 from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) order by l_orderkey, l_extendedprice limit 10; +select L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, L_COMMITDATE, L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT, dir0, dir1, dir2 from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) order by l_orderkey, l_extendedprice limit 10; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q4.q b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q4.q index 06a73968b..9b1aa5bc9 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/data/q4.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/data/q4.q @@ -1 +1 @@ -select L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, L_COMMITDATE, L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT, dir0, dir1, dir2 from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) and l_discount=0.07 order by l_orderkey limit 10; +select L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, L_COMMITDATE, L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT, dir0, dir1, dir2 from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) and l_discount=0.07 order by l_orderkey limit 10; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/metadata_caching.json b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/metadata_caching.json index 41adebe7e..f85c03d22 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/metadata_caching.json +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/metadata_caching.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q1.q b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q1.q index d5d55a68a..9f2a32f49 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q1.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q1.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15; +explain plan for select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q2.q b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q2.q index 93a1de4b8..608a8db08 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q2.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q2.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 and l_discount=0.07 order by l_orderkey limit 10; +explain plan for select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2=15 and l_discount=0.07 order by l_orderkey limit 10; diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q3.q b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q3.q index 6cfd5cdf3..c56d8b22c 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q3.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q3.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5); +explain plan for select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5); diff --git a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q4.q b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q4.q index 064ce70f5..180a28600 100644 --- a/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q4.q +++ b/framework/resources/Advanced/metadata_caching/partition_pruning/plan/q4.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) and l_discount=0.07 order by l_orderkey limit 10; +explain plan for select * from dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem` where dir0=2006 and dir1=12 and dir2 IN (15, 20, 1, 2, 5) and l_discount=0.07 order by l_orderkey limit 10; diff --git a/framework/resources/Advanced/mondrian/mondrian.json b/framework/resources/Advanced/mondrian/mondrian.json index c83d947a4..d5ea86c8b 100644 --- a/framework/resources/Advanced/mondrian/mondrian.json +++ b/framework/resources/Advanced/mondrian/mondrian.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirMondrian", + "schema": "dfs_test.drillTestDirMondrian", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive0dot13_parquet/tpcds_sf1_hive0dot13.json b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive0dot13_parquet/tpcds_sf1_hive0dot13.json index cfaf339c1..1b5469a12 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive0dot13_parquet/tpcds_sf1_hive0dot13.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive0dot13_parquet/tpcds_sf1_hive0dot13.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds1_hive0dot13", + "schema": "dfs_test.tpcds1_hive0dot13", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1_parquet/tpcds_sf1_hive1.json b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1_parquet/tpcds_sf1_hive1.json index 21b9b36e5..877d0fcdc 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1_parquet/tpcds_sf1_hive1.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1_parquet/tpcds_sf1_hive1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds1_hive1", + "schema": "dfs_test.tpcds1_hive1", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1dot2_parquet/tpcds_sf1_hive1dot2.json b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1dot2_parquet/tpcds_sf1_hive1dot2.json index d0896fb6b..a5c8d46fd 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1dot2_parquet/tpcds_sf1_hive1dot2.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive1dot2_parquet/tpcds_sf1_hive1dot2.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds1_hive1dot2", + "schema": "dfs_test.tpcds1_hive1dot2", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive2dot1_parquet_withdate/tpcds.json b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive2dot1_parquet_withdate/tpcds.json index fe7f54775..f316a83aa 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive2dot1_parquet_withdate/tpcds.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/hive2dot1_parquet_withdate/tpcds.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds1_hive2dot1_withdate", + "schema": "dfs_test.tpcds1_hive2dot1_withdate", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/metadata_caching_hive1dot2_parquet_withdate/tpcds.json b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/metadata_caching_hive1dot2_parquet_withdate/tpcds.json index b64945486..79705d226 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/metadata_caching_hive1dot2_parquet_withdate/tpcds.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/hive-generated-parquet/metadata_caching_hive1dot2_parquet_withdate/tpcds.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds1_hive1dot2_parquet_withdate", + "schema": "dfs_test.tpcds1_hive1dot2_parquet_withdate", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/original/json/tpcds_json_sf1.json b/framework/resources/Advanced/tpcds/tpcds_sf1/original/json/tpcds_json_sf1.json index 92cc46e8f..73b12ad55 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/original/json/tpcds_json_sf1.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/original/json/tpcds_json_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_json_views", + "schema": "dfs_test.tpcds_sf1_json_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/original/not_supported/tpcds_parquet_sf1.json b/framework/resources/Advanced/tpcds/tpcds_sf1/original/not_supported/tpcds_parquet_sf1.json index 3815be097..35a701040 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/original/not_supported/tpcds_parquet_sf1.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/original/not_supported/tpcds_parquet_sf1.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json b/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json index 73f96de1a..a21ffc50d 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json_createTables b/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json_createTables index 1d7ccae79..55a314660 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json_createTables +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/original/parquet/tpcds_parquet_sf1.json_createTables @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_parquet_sf1_v", + "schema": "dfs_test.tpcds_parquet_sf1_v", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf1/original/text/tpcds_text_sf1.json b/framework/resources/Advanced/tpcds/tpcds_sf1/original/text/tpcds_text_sf1.json index 7489d3b51..641ae3aac 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf1/original/text/tpcds_text_sf1.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf1/original/text/tpcds_text_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_text_views", + "schema": "dfs_test.tpcds_sf1_text_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf100/original/tpcds_parquet_sf100.json b/framework/resources/Advanced/tpcds/tpcds_sf100/original/tpcds_parquet_sf100.json index 0508553d5..4ccfba1bb 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf100/original/tpcds_parquet_sf100.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf100/original/tpcds_parquet_sf100.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf100_parquet_views", + "schema": "dfs_test.tpcds_sf100_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf100/sanity/tpcds_parquet_sf100.json b/framework/resources/Advanced/tpcds/tpcds_sf100/sanity/tpcds_parquet_sf100.json index 0508553d5..4ccfba1bb 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf100/sanity/tpcds_parquet_sf100.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf100/sanity/tpcds_parquet_sf100.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf100_parquet_views", + "schema": "dfs_test.tpcds_sf100_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpcds/tpcds_sf100/variants/tpcds_parquet_sf100.json b/framework/resources/Advanced/tpcds/tpcds_sf100/variants/tpcds_parquet_sf100.json index c4072e4a7..592232acc 100755 --- a/framework/resources/Advanced/tpcds/tpcds_sf100/variants/tpcds_parquet_sf100.json +++ b/framework/resources/Advanced/tpcds/tpcds_sf100/variants/tpcds_parquet_sf100.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf100_parquet_views", + "schema": "dfs_test.tpcds_sf100_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/original/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/original/json/tpch_sf1_json.json index 313d71d56..e5c6214fa 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/original/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/original/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/original/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/original/parquet/tpch_sf1_parquet.json index cdc2ff090..c6a149624 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/original/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/original/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sanity/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/sanity/json/tpch_sf1_json.json index 158c89e41..4f2fbebfd 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sanity/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sanity/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sanity/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/sanity/parquet/tpch_sf1_parquet.json index 4c0c2cae1..ceea14ec1 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sanity/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sanity/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/smoke/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/smoke/json/tpch_sf1_json.json index 8fdd8a205..8c0f98a18 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/smoke/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/smoke/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/smoke/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/smoke/parquet/tpch_sf1_parquet.json index 318c81dbc..8c511f12e 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/smoke/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/smoke/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/json/tpch_sf1_json.json index 47ae7cd0a..b1547e0b0 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/parquet/tpch_sf1_parquet.json index e26baf5e4..738ad5e03 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/datetime/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/json/tpch_sf1_json.json index 3d52f50e3..a6f325cee 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/parquet/tpch_sf1_parquet.json index db44eb65b..4e5dce4de 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/math/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/json/tpch_sf1_json.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/json/tpch_sf1_json.json index 69fcec278..f82d7a4bc 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/json/tpch_sf1_json.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/json/tpch_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_json", + "schema": "dfs_test.tpch_sf1_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/parquet/tpch_sf1_parquet.json b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/parquet/tpch_sf1_parquet.json index 7cc8e23d4..ce03c7cff 100755 --- a/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/parquet/tpch_sf1_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf1/sql_functions/strings/parquet/tpch_sf1_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpch_sf1_parquet", + "schema": "dfs_test.tpch_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf100/limit0/limit0.json b/framework/resources/Advanced/tpch/tpch_sf100/limit0/limit0.json index 4f9371d18..51db0207f 100644 --- a/framework/resources/Advanced/tpch/tpch_sf100/limit0/limit0.json +++ b/framework/resources/Advanced/tpch/tpch_sf100/limit0/limit0.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch100Parquet", + "schema": "dfs_test.drillTestDirTpch100Parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Advanced/tpch/tpch_sf100/parquet/tpch_sf100_parquet.json b/framework/resources/Advanced/tpch/tpch_sf100/parquet/tpch_sf100_parquet.json index 43938d008..7d9442d08 100644 --- a/framework/resources/Advanced/tpch/tpch_sf100/parquet/tpch_sf100_parquet.json +++ b/framework/resources/Advanced/tpch/tpch_sf100/parquet/tpch_sf100_parquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch100Parquet", + "schema": "dfs_test.drillTestDirTpch100Parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Datasources/cross-sources/drill.ddl b/framework/resources/Datasources/cross-sources/drill.ddl index be1811f55..96e1eeb6e 100644 --- a/framework/resources/Datasources/cross-sources/drill.ddl +++ b/framework/resources/Datasources/cross-sources/drill.ddl @@ -18,7 +18,7 @@ TBLPROPERTIES ("serialization.null.format"="null"); -create table dfs.`cross-sources`.fewtypes as +create table dfs_test.`cross-sources`.fewtypes as select makerequired(cast(int_col as int)) int_col, makerequired(cast(bigint_col as bigint)) bigint_col, diff --git a/framework/resources/Datasources/ctas/create_tables.sh b/framework/resources/Datasources/ctas/create_tables.sh index dcbaa2d99..8b6506b0d 100755 --- a/framework/resources/Datasources/ctas/create_tables.sh +++ b/framework/resources/Datasources/ctas/create_tables.sh @@ -12,9 +12,9 @@ hadoop fs -mkdir /drill/testdata/ctas/parquet if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_parquet.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_parquet.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_parquet.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_parquet.ddl fi # TODO: it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl b/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl index 0ff26140e..ee5276c79 100644 --- a/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl +++ b/framework/resources/Datasources/ctas/create_tables_complex_parquet.ddl @@ -14,4 +14,4 @@ CREATE TABLE `complex.json` AS ooof, ooos, oooa - FROM dfs.`/drill/testdata/complex/json/complex.json`; + FROM dfs_test.`/drill/testdata/complex/json/complex.json`; diff --git a/framework/resources/Datasources/ctas/create_tables_complex_parquet.sh b/framework/resources/Datasources/ctas/create_tables_complex_parquet.sh index 362e8b86b..2006aec9c 100755 --- a/framework/resources/Datasources/ctas/create_tables_complex_parquet.sh +++ b/framework/resources/Datasources/ctas/create_tables_complex_parquet.sh @@ -12,9 +12,9 @@ hadoop fs -chmod 777 /drill/testdata/complex/parquet if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.drillTestDirComplexParquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_complex_parquet.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.drillTestDirComplexParquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_complex_parquet.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.drillTestDirComplexParquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_complex_parquet.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.drillTestDirComplexParquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas/create_tables_complex_parquet.ddl fi # TODO: it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/ctas/create_tables_parquet.ddl b/framework/resources/Datasources/ctas/create_tables_parquet.ddl index 6814a19a0..99759ce05 100644 --- a/framework/resources/Datasources/ctas/create_tables_parquet.ddl +++ b/framework/resources/Datasources/ctas/create_tables_parquet.ddl @@ -6,7 +6,7 @@ select j1.c_varchar, j1.c_date, j2.c_date from - dfs.`ctas`.`j1` inner join dfs.`ctas`.`j2` ON (j1.c_timestamp = j2.c_timestamp and j1.c_date between '1960-01-03' and '1960-01-15' ) + dfs_test.`ctas`.`j1` inner join dfs_test.`ctas`.`j2` ON (j1.c_timestamp = j2.c_timestamp and j1.c_date between '1960-01-03' and '1960-01-15' ) where j1.c_bigint IS NOT DISTINCT FROM j2.c_bigint and j1.c_timestamp IS NOT NULL @@ -17,7 +17,7 @@ select j1.c_bigint, coalesce(j3.c_bigint, -1000) from - dfs.`ctas`.`j1` left outer join dfs.`ctas`.`j3` ON (j1.c_time = j3.c_time) + dfs_test.`ctas`.`j1` left outer join dfs_test.`ctas`.`j3` ON (j1.c_time = j3.c_time) where j1.c_boolean IS NULL and j1.d9 > 0 @@ -26,13 +26,13 @@ where create table ctas_t3 as select j4.c_varchar || j2.c_varchar as c1 from - dfs.`ctas`.`j4` right outer join dfs.`ctas`.`j2` on (j2.c_integer = j4.c_integer) + dfs_test.`ctas`.`j4` right outer join dfs_test.`ctas`.`j2` on (j2.c_integer = j4.c_integer) ; create table ctas_t4 as select concat(j4.c_varchar, j2.c_varchar) as c1 from - dfs.`ctas`.`j4` right outer join dfs.`ctas`.`j2` on (j2.c_integer = j4.c_integer) + dfs_test.`ctas`.`j4` right outer join dfs_test.`ctas`.`j2` on (j2.c_integer = j4.c_integer) ; create table ctas_t5(count_star, max_j1_c_int, min_j2_c_int, avg_j1_c_int, avg_j2_c_int) as @@ -43,20 +43,20 @@ select avg(j1.c_integer), avg(j2.c_integer) from - dfs.`ctas`.`j1` full outer join dfs.`ctas`.`j2` on (j1.c_date = j2.c_date) + dfs_test.`ctas`.`j1` full outer join dfs_test.`ctas`.`j2` on (j1.c_date = j2.c_date) where j1.c_boolean is false ; -/* create table ctas_t6(c1,c2,c3,c4,c5) as select c_integer, c_bigint, c_date, c_time, c_varchar from dfs.`ctas`.`j4` where c_bigint is null; */ +/* create table ctas_t6(c1,c2,c3,c4,c5) as select c_integer, c_bigint, c_date, c_time, c_varchar from dfs_test.`ctas`.`j4` where c_bigint is null; */ -create table ctas_t8(c1) as select distinct c_integer from ( select c_integer from dfs.`ctas`.`j1` union all select c_integer from dfs.`ctas`.`j2`) as xyz; +create table ctas_t8(c1) as select distinct c_integer from ( select c_integer from dfs_test.`ctas`.`j1` union all select c_integer from dfs_test.`ctas`.`j2`) as xyz; create table ctas_t9 as select c_integer as c1, count(c_date) as c2 from - dfs.`ctas`.`j1` + dfs_test.`ctas`.`j1` group by c_integer order by @@ -68,7 +68,7 @@ select count(distinct c_integer), sum(c_integer) from - dfs.`ctas`.`j1` + dfs_test.`ctas`.`j1` group by c_date order by @@ -84,7 +84,7 @@ from d9, c_date, c_timestamp - from dfs.`ctas`.`j3` + from dfs_test.`ctas`.`j3` ) as sq group by c_date @@ -98,7 +98,7 @@ select sum(c_bigint) , count(c_date) from - dfs.`ctas`.`j4` + dfs_test.`ctas`.`j4` group by c_boolean, c_timestamp @@ -117,7 +117,7 @@ from c_date, c_varchar from - dfs.`ctas`.`j1` + dfs_test.`ctas`.`j1` ) as sq group by c_varchar; @@ -127,7 +127,7 @@ select c_date, count(distinct c_time) from - dfs.`ctas`.`j4` + dfs_test.`ctas`.`j4` group by c_date order by @@ -139,7 +139,7 @@ select count(distinct c_varchar) as count_distinct_2, count(distinct c_time) as count_distinct_3 from - dfs.`ctas`.`j1` + dfs_test.`ctas`.`j1` group by c_time ; diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl index 77c3fec44..0741f6fa2 100644 --- a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl +++ b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl @@ -1,5 +1,5 @@ -create table `existing_partition_pruning/lineitempart` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitempart`; -create table `existing_partition_pruning/lineitem` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/lineitem`; -create table `existing_partition_pruning/lineitem_hierarchical_intstring` partition by (dir0, dir1) as select * from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring`; -create table `existing_partition_pruning/ordersjson` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/ordersjson`; -create table `existing_partition_pruning/orders` partition by (dir0) as select * from dfs.`/drill/testdata/partition_pruning/dfs/orders`; +create table `existing_partition_pruning/lineitempart` partition by (dir0) as select * from dfs_test.`/drill/testdata/partition_pruning/dfs/lineitempart`; +create table `existing_partition_pruning/lineitem` partition by (dir0) as select * from dfs_test.`/drill/testdata/partition_pruning/dfs/lineitem`; +create table `existing_partition_pruning/lineitem_hierarchical_intstring` partition by (dir0, dir1) as select * from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring`; +create table `existing_partition_pruning/ordersjson` partition by (dir0) as select * from dfs_test.`/drill/testdata/partition_pruning/dfs/ordersjson`; +create table `existing_partition_pruning/orders` partition by (dir0) as select * from dfs_test.`/drill/testdata/partition_pruning/dfs/orders`; diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh index 978767223..964dfbd4f 100755 --- a/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh +++ b/framework/resources/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.sh @@ -7,9 +7,9 @@ hadoop fs -mkdir /drill/testdata/ctas_auto_partition/existing_partition_pruning if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_existing_partition_pruning.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh index ec3e195f0..336c9eaef 100755 --- a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh +++ b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.sh @@ -7,9 +7,9 @@ hadoop fs -mkdir /drill/testdata/ctas_auto_partition/tpch_multiple_partitions if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_multiple_partitions.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh index bb3063448..02a8ece1d 100755 --- a/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh +++ b/framework/resources/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.sh @@ -7,9 +7,9 @@ hadoop fs -mkdir /drill/testdata/ctas_auto_partition/tpch_single_partition1 if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/ctas_tpch_single_partition1.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/ctas_auto_partition/setup.sh b/framework/resources/Datasources/ctas_auto_partition/setup.sh index dcf17101b..3b8afe7bd 100755 --- a/framework/resources/Datasources/ctas_auto_partition/setup.sh +++ b/framework/resources/Datasources/ctas_auto_partition/setup.sh @@ -7,9 +7,9 @@ hadoop fs -mkdir /drill/testdata/ctas_auto_partition/tpch_single_partition if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/create_tables.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/create_tables.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/create_tables.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/ctas_auto_partition/create_tables.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/droptable/droptable1.ddl b/framework/resources/Datasources/droptable/droptable1.ddl index 72470cd4e..873acddba 100644 --- a/framework/resources/Datasources/droptable/droptable1.ddl +++ b/framework/resources/Datasources/droptable/droptable1.ddl @@ -1 +1 @@ -refresh table metadata dfs.`/drill/testdata/droptable/droptable1`; +refresh table metadata dfs_test.`/drill/testdata/droptable/droptable1`; diff --git a/framework/resources/Datasources/droptable/droptable11.q b/framework/resources/Datasources/droptable/droptable11.q index e11df1aed..bc1347fcd 100644 --- a/framework/resources/Datasources/droptable/droptable11.q +++ b/framework/resources/Datasources/droptable/droptable11.q @@ -1 +1 @@ -drop table dfs.droptable.droptable11; +drop table dfs_test.droptable.droptable11; diff --git a/framework/resources/Datasources/droptable/droptable32.ddl b/framework/resources/Datasources/droptable/droptable32.ddl index 3c21593a6..e99b60fa2 100644 --- a/framework/resources/Datasources/droptable/droptable32.ddl +++ b/framework/resources/Datasources/droptable/droptable32.ddl @@ -1,2 +1,2 @@ -use dfs.droptable; +use dfs_test.droptable; create or replace view droptable32v as select * from droptable32; diff --git a/framework/resources/Datasources/droptable/droptableif1.ddl b/framework/resources/Datasources/droptable/droptableif1.ddl index df156456f..cf7f93fec 100644 --- a/framework/resources/Datasources/droptable/droptableif1.ddl +++ b/framework/resources/Datasources/droptable/droptableif1.ddl @@ -1 +1 @@ -refresh table metadata dfs.`/drill/testdata/droptable/droptableif1`; +refresh table metadata dfs_test.`/drill/testdata/droptable/droptableif1`; diff --git a/framework/resources/Datasources/droptable/droptableif11.q b/framework/resources/Datasources/droptable/droptableif11.q index 586a6b7dc..b671a48c9 100644 --- a/framework/resources/Datasources/droptable/droptableif11.q +++ b/framework/resources/Datasources/droptable/droptableif11.q @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif11; +drop table if exists dfs_test.droptable.droptableif11; diff --git a/framework/resources/Datasources/droptable/droptableif32.ddl b/framework/resources/Datasources/droptable/droptableif32.ddl index ebcc9e67d..df838ae2d 100644 --- a/framework/resources/Datasources/droptable/droptableif32.ddl +++ b/framework/resources/Datasources/droptable/droptableif32.ddl @@ -1,2 +1,2 @@ -use dfs.droptable; +use dfs_test.droptable; create or replace view droptableif32v as select * from droptableif32; diff --git a/framework/resources/Datasources/droptable/droptablesetup.sh b/framework/resources/Datasources/droptable/droptablesetup.sh index bb1d6c309..9ec8f9adc 100755 --- a/framework/resources/Datasources/droptable/droptablesetup.sh +++ b/framework/resources/Datasources/droptable/droptablesetup.sh @@ -4,17 +4,17 @@ source conf/drillTestConfig.properties #droptable1.q if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable1.ddl + ${DRILL_HOME}/bin/sqlline -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable1.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable1.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable1.ddl fi #droptableif1.q if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif1.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif1.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif1.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif1.ddl fi #droptable10.q @@ -40,15 +40,15 @@ hadoop fs -mkdir ${DRILL_TESTDATA}/droptable/droptableif23 #droptable32.q if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable32.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable32.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable32.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptable32.ddl fi #droptableif32.q if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif32.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif32.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif32.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/droptable/droptableif32.ddl fi diff --git a/framework/resources/Datasources/hive_storage/change_metadata.sh b/framework/resources/Datasources/hive_storage/change_metadata.sh index 13ead1778..42aa68137 100755 --- a/framework/resources/Datasources/hive_storage/change_metadata.sh +++ b/framework/resources/Datasources/hive_storage/change_metadata.sh @@ -6,9 +6,9 @@ ${DRILL_TEST_DATA_DIR}/Datasources/hive/execHive.sh ${DRILL_TEST_DATA_DIR}/Datas if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/change_metadata_drill.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/change_metadata_drill.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/change_metadata_drill.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctasAutoPartition;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/change_metadata_drill.sql fi ${DRILL_TEST_DATA_DIR}/Datasources/hive/execHive.sh ${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/change_metadata2.ddl diff --git a/framework/resources/Datasources/hive_storage/hive_native/drill_create_parquet.ddl b/framework/resources/Datasources/hive_storage/hive_native/drill_create_parquet.ddl index f9e115dc5..ecef7c3fd 100644 --- a/framework/resources/Datasources/hive_storage/hive_native/drill_create_parquet.ddl +++ b/framework/resources/Datasources/hive_storage/hive_native/drill_create_parquet.ddl @@ -2,7 +2,7 @@ # THIS SCRIPT IS NOT USED ANYWHERE IN OUR FUNCTIONAL TESTS # THIS IS JUST CHECKED IN SO THAT WE HAVE A REFERENCE AS TO HOW WE GENERATED THE DATA # -create table dfs.drillTestDir.drillgen_fewtypes_null_hive as +create table dfs_test.drillTestDir.drillgen_fewtypes_null_hive as select int_col, bigint_col, diff --git a/framework/resources/Datasources/impersonation/dfs/noaccessnestedviews.sql b/framework/resources/Datasources/impersonation/dfs/noaccessnestedviews.sql index 231fdecfc..3d845723f 100644 --- a/framework/resources/Datasources/impersonation/dfs/noaccessnestedviews.sql +++ b/framework/resources/Datasources/impersonation/dfs/noaccessnestedviews.sql @@ -1,5 +1,5 @@ alter session set `store.format` = 'parquet'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; create or replace view user1datanoaccessv1(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; create or replace view user1datanoaccessv2(c1, c4) as select c1, c4 from user1datanoaccessv1; create or replace view user1datanoaccessb1(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; diff --git a/framework/resources/Datasources/join/crt_tbl_prtntd_tbl.sh b/framework/resources/Datasources/join/crt_tbl_prtntd_tbl.sh index 8e98c5f63..357c92b65 100755 --- a/framework/resources/Datasources/join/crt_tbl_prtntd_tbl.sh +++ b/framework/resources/Datasources/join/crt_tbl_prtntd_tbl.sh @@ -3,13 +3,13 @@ source conf/drillTestConfig.properties if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_l.ddl - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_r.ddl - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbl_prtnby_nulls.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_l.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_r.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbl_prtnby_nulls.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_l.ddl - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_r.ddl - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbl_prtnby_nulls.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_l.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbls_partition_by_r.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.Join;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/join/crt_tbl_prtnby_nulls.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl b/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl index ef5317d62..517acd155 100644 --- a/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl +++ b/framework/resources/Datasources/join/crt_tbls_partition_by_l.ddl @@ -1,22 +1,22 @@ DROP TABLE IF EXISTS l_tblprtnby_intcl; -CREATE TABLE l_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_chrcl; -CREATE TABLE l_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_vrchrcl1; -CREATE TABLE l_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_vrchrcl2; -CREATE TABLE l_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_dtcl; -CREATE TABLE l_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_timcl; -CREATE TABLE l_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_tmstmpcl; -CREATE TABLE l_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_fltcl; -CREATE TABLE l_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_intrvlyrcl; -CREATE TABLE l_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_intrvldycl; -CREATE TABLE l_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; DROP TABLE IF EXISTS l_tblprtnby_blncl; -CREATE TABLE l_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_l`; +CREATE TABLE l_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_l`; diff --git a/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl b/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl index b63e2f63e..3a89a4067 100644 --- a/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl +++ b/framework/resources/Datasources/join/crt_tbls_partition_by_r.ddl @@ -1,22 +1,22 @@ DROP TABLE IF EXISTS r_tblprtnby_intcl; -CREATE TABLE r_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_intcl PARTITION BY( col_int ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_chrcl; -CREATE TABLE r_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_chrcl PARTITION BY( col_chr ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_vrchrcl1; -CREATE TABLE r_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_vrchrcl1 PARTITION BY( col_vrchr1 ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_vrchrcl2; -CREATE TABLE r_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_vrchrcl2 PARTITION BY( col_vrchr2 ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_dtcl; -CREATE TABLE r_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_dtcl PARTITION BY( col_dt ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_timcl; -CREATE TABLE r_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_timcl PARTITION BY( col_tim ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_tmstmpcl; -CREATE TABLE r_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_tmstmpcl PARTITION BY( col_tmstmp ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_fltcl; -CREATE TABLE r_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_fltcl PARTITION BY( col_flt ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_intrvlyrcl; -CREATE TABLE r_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_intrvlyrcl PARTITION BY( col_intrvl_yr ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_intrvldycl; -CREATE TABLE r_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_intrvldycl PARTITION BY( col_intrvl_day ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; DROP TABLE IF EXISTS r_tblprtnby_blncl; -CREATE TABLE r_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs.`/drill/testdata/join/typeall_r`; +CREATE TABLE r_tblprtnby_blncl PARTITION BY( col_bln ) AS SELECT * FROM dfs_test.`/drill/testdata/join/typeall_r`; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh index a0b63449c..1ec3ef7e5 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql index 88691008a..958e06dc0 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/bugs/create_bugs_view.sql @@ -1 +1 @@ -create or replace view `dfs.aggregation`.`drill-1977_v` as select cast(a1 as int) a1, cast(b1 as integer) b1, cast(c1 as boolean) c1, cast(d1 as date) d1 from `dfs.aggregation`.`drill-1977.json`; +create or replace view `dfs_test.aggregation`.`drill-1977_v` as select cast(a1 as int) a1, cast(b1 as integer) b1, cast(c1 as boolean) c1, cast(d1 as date) d1 from `dfs_test.aggregation`.`drill-1977.json`; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh index 8b97cf7a2..82981f9e1 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql index 61eb3c9a7..d673736a0 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/count_distinct/create_count_distinct_views.sql @@ -1,9 +1,9 @@ -create or replace view `dfs.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes_with_nulls; +create or replace view `dfs_test.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes_with_nulls; -create or replace view `dfs.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes; +create or replace view `dfs_test.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes; -create or replace view `dfs.aggregation`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs.aggregation`.t1; +create or replace view `dfs_test.aggregation`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs_test.aggregation`.t1; -create or replace view `dfs.aggregation`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs.aggregation`.t2; +create or replace view `dfs_test.aggregation`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs_test.aggregation`.t2; -create or replace view `dfs.aggregation`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs.aggregation`.t3; +create or replace view `dfs_test.aggregation`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs_test.aggregation`.t3; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh index f82eeebe8..a9d1c360c 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql index 7d48df467..c672c118e 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/group_by_case/create_groupByCase_views.sql @@ -1,3 +1,3 @@ -create or replace view `dfs.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes_with_nulls; +create or replace view `dfs_test.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes_with_nulls; -create or replace view `dfs.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes; +create or replace view `dfs_test.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh index fb1832880..a9155a58e 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql index fa49e6751..aa226488d 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/multicolumn/create_multicolumn_views.sql @@ -1 +1 @@ -create or replace view `dfs.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes_with_nulls; +create or replace view `dfs_test.aggregation`.alltypes_with_nulls_v as select cast(c_varchar as varchar(10)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes_with_nulls; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh index 41ec0f846..9c4d4a262 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql index 3b771322c..8b0e1d2ed 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/sanity/create_sanity_views.sql @@ -1,5 +1,5 @@ -create or replace view `dfs.aggregation`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs.aggregation`.t1; +create or replace view `dfs_test.aggregation`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs_test.aggregation`.t1; -create or replace view `dfs.aggregation`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs.aggregation`.t2; +create or replace view `dfs_test.aggregation`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs_test.aggregation`.t2; -create or replace view `dfs.aggregation`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs.aggregation`.t3; +create or replace view `dfs_test.aggregation`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs_test.aggregation`.t3; diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh b/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh index 2d2190be1..8a376dc24 100755 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.aggregation;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql b/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql index ee299f048..5f24fa385 100644 --- a/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/aggregation/scalar/create_scalar_views.sql @@ -1 +1 @@ -create or replace view `dfs.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs.aggregation`.alltypes; +create or replace view `dfs_test.aggregation`.alltypes_v as select cast(c_varchar as character varying(30)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_smalldecimal as double) c_smalldecimal,cast(c_bigdecimal as double precision) c_bigdecimal,cast(c_float as float) c_float,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double) d38 from `dfs_test.aggregation`.alltypes; diff --git a/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh b/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh index 3591333ae..959fb66b5 100755 --- a/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh +++ b/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql b/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql index df1bfb418..46862b5d8 100644 --- a/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql +++ b/framework/resources/Datasources/limit0/aggregates/tpcds_variants/parquet/create_tpcdsVariants_views.sql @@ -1,4 +1,4 @@ -create or replace view `dfs.tpcds_sf1_parquet`.store_v as select +create or replace view `dfs_test.tpcds_sf1_parquet`.store_v as select cast( s_store_sk as integer) as s_store_sk, cast( s_store_id as varchar(200)) as s_store_id, cast( s_rec_start_date as date) as s_rec_start_date, @@ -28,9 +28,9 @@ cast( s_zip as varchar(200)) as s_zip, cast( s_country as varchar(200)) as s_country, cast( s_gmt_offset as double) as s_gmt_offset, cast( s_tax_precentage as double) as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/parquet/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/store`; -create or replace view `dfs.tpcds_sf1_parquet`.store_sales_v as select +create or replace view `dfs_test.tpcds_sf1_parquet`.store_sales_v as select cast( ss_sold_date_sk as integer) as ss_sold_date_sk, cast( ss_sold_time_sk as integer) as ss_sold_time_sk, cast( ss_item_sk as integer) as ss_item_sk, @@ -54,9 +54,9 @@ cast( ss_coupon_amt as double) as ss_coupon_amt, cast( ss_net_paid as double) as ss_net_paid, cast( ss_net_paid_inc_tax as double) as ss_net_paid_inc_tax, cast( ss_net_profit as double) as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/parquet/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/store_sales`; -create or replace view `dfs.tpcds_sf1_parquet`.customer_v as select +create or replace view `dfs_test.tpcds_sf1_parquet`.customer_v as select cast(c_customer_sk as integer) as c_customer_sk, cast(c_customer_id as varchar(200)) as c_customer_id, cast(c_current_cdemo_sk as integer) as c_current_cdemo_sk, @@ -75,4 +75,4 @@ cast(c_birth_country as varchar(200)) as c_birth_country, cast(c_login as varchar(200)) as c_login, cast(c_email_address as varchar(200)) as c_email_address, cast(c_last_review_date as varchar(200)) as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/parquet/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/customer`; diff --git a/framework/resources/Datasources/limit0/filters/create_filters_views.sh b/framework/resources/Datasources/limit0/filters/create_filters_views.sh index b88a6c4a5..35ab23613 100755 --- a/framework/resources/Datasources/limit0/filters/create_filters_views.sh +++ b/framework/resources/Datasources/limit0/filters/create_filters_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/filters/create_filters_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/filters/create_filters_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/filters/create_filters_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/filters/create_filters_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/filters/create_filters_views.sql b/framework/resources/Datasources/limit0/filters/create_filters_views.sql index 8396fc48c..3a1053429 100644 --- a/framework/resources/Datasources/limit0/filters/create_filters_views.sql +++ b/framework/resources/Datasources/limit0/filters/create_filters_views.sql @@ -1,5 +1,5 @@ -create or replace view `dfs.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs.subqueries`.t1; +create or replace view `dfs_test.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs_test.subqueries`.t1; -create or replace view `dfs.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs.subqueries`.t2; +create or replace view `dfs_test.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs_test.subqueries`.t2; -create or replace view `dfs.subqueries`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs.subqueries`.t3; +create or replace view `dfs_test.subqueries`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs_test.subqueries`.t3; diff --git a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh index 87552b991..cbce72d6d 100755 --- a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh +++ b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.joins;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.joins;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.joins;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.joins;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql index 6e5d306d4..31f28a8a8 100644 --- a/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql +++ b/framework/resources/Datasources/limit0/implicit_cast_with_views/create_implicit_cast_views.sql @@ -1,3 +1,3 @@ -create or replace view cast_tbl_1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_1; +create or replace view cast_tbl_1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs_test.joins`.cast_tbl_1; -create or replace view cast_tbl_2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs.joins`.cast_tbl_2; +create or replace view cast_tbl_2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time, cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28, cast(d38 as double precision) d38 from `dfs_test.joins`.cast_tbl_2; diff --git a/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sh b/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sh index bcae5f52f..229048d37 100755 --- a/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sh +++ b/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_csv_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_csv_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_csv_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_csv_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sql b/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sql index a3990c0d7..7a9899155 100644 --- a/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sql +++ b/framework/resources/Datasources/limit0/p1tests/create_p1tests_csv_views.sql @@ -1,3 +1,3 @@ -create or replace view `dfs.drillTestDirP1`.voter_csv_v as select case when columns[0]='' then cast(null as int) else cast(columns[0] as int) end as voter_id,case when columns[1]='' then cast(null as character(30)) else cast(columns[1] as character(30)) end as name, case when columns[2]='' then cast(null as integer) else cast(columns[2] as integer) end as age, case when columns[3]='' then cast(null as char(20)) else cast(columns[3] as char(20)) end as registration, case when columns[4]='' then cast(null as double precision) else cast(columns[4] as double precision) end as contributions, case when columns[5]='' then cast(null as integer) else cast(columns[5] as integer) end as voterzone, case when columns[6]='' then cast(null as timestamp) else cast(columns[6] as timestamp) end as create_time, cast(columns[7] as boolean) isVote from `dfs.drillTestDirP1`.`voter.csv`; +create or replace view `dfs_test.drillTestDirP1`.voter_csv_v as select case when columns[0]='' then cast(null as int) else cast(columns[0] as int) end as voter_id,case when columns[1]='' then cast(null as character(30)) else cast(columns[1] as character(30)) end as name, case when columns[2]='' then cast(null as integer) else cast(columns[2] as integer) end as age, case when columns[3]='' then cast(null as char(20)) else cast(columns[3] as char(20)) end as registration, case when columns[4]='' then cast(null as double precision) else cast(columns[4] as double precision) end as contributions, case when columns[5]='' then cast(null as integer) else cast(columns[5] as integer) end as voterzone, case when columns[6]='' then cast(null as timestamp) else cast(columns[6] as timestamp) end as create_time, cast(columns[7] as boolean) isVote from `dfs_test.drillTestDirP1`.`voter.csv`; -create or replace view `dfs.drillTestDirP1`.student_csv_v as select cast(columns[0] as integer) student_id, cast(columns[1] as varchar(30)) name, cast(columns[2] as int) age, cast(columns[3] as double) gpa, cast(columns[4] as bigint) studentnum, cast(columns[5] as timestamp) create_time from `dfs.drillTestDirP1`.`student.csv`; +create or replace view `dfs_test.drillTestDirP1`.student_csv_v as select cast(columns[0] as integer) student_id, cast(columns[1] as varchar(30)) name, cast(columns[2] as int) age, cast(columns[3] as double) gpa, cast(columns[4] as bigint) studentnum, cast(columns[5] as timestamp) create_time from `dfs_test.drillTestDirP1`.`student.csv`; diff --git a/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sh b/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sh index 4c15487f9..663c19b9b 100755 --- a/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sh +++ b/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.drillTestDirP1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql b/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql index 713167193..20eccdcb4 100644 --- a/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql +++ b/framework/resources/Datasources/limit0/p1tests/create_p1tests_parquet_views.sql @@ -1,3 +1,3 @@ -create or replace view `dfs.drillTestDirP1`.voter_parquet_v as select cast(voter_id as int) voter_id, cast(name as varchar(30)) as name, cast(age as integer) age, cast(registration as character varying(20)) registration, cast(contributions as double) contributions, cast(voterzone as integer) voterzone, cast(create_time as timestamp) create_time,cast(isVote as boolean) isVote from `dfs.drillTestDirP1`.`voter1.parquet`; +create or replace view `dfs_test.drillTestDirP1`.voter_parquet_v as select cast(voter_id as int) voter_id, cast(name as varchar(30)) as name, cast(age as integer) age, cast(registration as character varying(20)) registration, cast(contributions as double) contributions, cast(voterzone as integer) voterzone, cast(create_time as timestamp) create_time,cast(isVote as boolean) isVote from `dfs_test.drillTestDirP1`.`voter1.parquet`; -create or replace view `dfs.drillTestDirP1`.student_parquet_v as select cast(student_id as integer) student_id, cast(name as character(30)) name, cast(age as integer) age, cast(gpa as float) gpa, cast(studentnum as bigint) studentnum, cast(create_time as timestamp) create_time from `dfs.drillTestDirP1`.student; +create or replace view `dfs_test.drillTestDirP1`.student_parquet_v as select cast(student_id as integer) student_id, cast(name as character(30)) name, cast(age as integer) age, cast(gpa as float) gpa, cast(studentnum as bigint) studentnum, cast(create_time as timestamp) create_time from `dfs_test.drillTestDirP1`.student; diff --git a/framework/resources/Datasources/limit0/union/create_union_views.sh b/framework/resources/Datasources/limit0/union/create_union_views.sh index 67833b28f..4968b9b93 100755 --- a/framework/resources/Datasources/limit0/union/create_union_views.sh +++ b/framework/resources/Datasources/limit0/union/create_union_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.union;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union/create_union_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.union;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union/create_union_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.union;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union/create_union_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.union;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union/create_union_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/union/create_union_views.sql b/framework/resources/Datasources/limit0/union/create_union_views.sql index c08dc9aa7..bda770561 100644 --- a/framework/resources/Datasources/limit0/union/create_union_views.sql +++ b/framework/resources/Datasources/limit0/union/create_union_views.sql @@ -1,3 +1,3 @@ -create or replace view `dfs.union`.union_01_v as select cast(c1 as int) c1,cast(c2 as bigint) c2,cast(c3 as char(2)) c3,cast(c4 as varchar(52)) c4,cast(c5 as timestamp) c5,cast(c6 as date) c6,cast(c7 as boolean) c7,cast(c8 as double) c8, cast(c9 as time) c9 from `dfs.union`.`union_01.parquet`; +create or replace view `dfs_test.union`.union_01_v as select cast(c1 as int) c1,cast(c2 as bigint) c2,cast(c3 as char(2)) c3,cast(c4 as varchar(52)) c4,cast(c5 as timestamp) c5,cast(c6 as date) c6,cast(c7 as boolean) c7,cast(c8 as double) c8, cast(c9 as time) c9 from `dfs_test.union`.`union_01.parquet`; -create or replace view `dfs.union`.union_02_v as select cast(c1 as int) c1,cast(c2 as bigint) c2,cast(c3 as char(2)) c3,cast(c4 as varchar(52)) c4,cast(c5 as timestamp) c5,cast(c6 as date) c6,cast(c7 as boolean) c7,cast(c8 as double) c8, cast(c9 as time) c9 from `dfs.union`.`union_02.parquet`; +create or replace view `dfs_test.union`.union_02_v as select cast(c1 as int) c1,cast(c2 as bigint) c2,cast(c3 as char(2)) c3,cast(c4 as varchar(52)) c4,cast(c5 as timestamp) c5,cast(c6 as date) c6,cast(c7 as boolean) c7,cast(c8 as double) c8, cast(c9 as time) c9 from `dfs_test.union`.`union_02.parquet`; diff --git a/framework/resources/Datasources/limit0/union_all/create_union_all_views.sh b/framework/resources/Datasources/limit0/union_all/create_union_all_views.sh index 49756be9b..ba3d806b7 100755 --- a/framework/resources/Datasources/limit0/union_all/create_union_all_views.sh +++ b/framework/resources/Datasources/limit0/union_all/create_union_all_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.union_all;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union_all/create_union_all_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.union_all;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union_all/create_union_all_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.union_all;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union_all/create_union_all_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.union_all;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/union_all/create_union_all_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/union_all/create_union_all_views.sql b/framework/resources/Datasources/limit0/union_all/create_union_all_views.sql index 5eb4e2ffa..1db81a899 100644 --- a/framework/resources/Datasources/limit0/union_all/create_union_all_views.sql +++ b/framework/resources/Datasources/limit0/union_all/create_union_all_views.sql @@ -1,9 +1,9 @@ -create or replace view `dfs.union_all`.prqUnAll_0_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char as char(8)) col_char,cast(col_vchar as varchar(16)) col_vchar,cast(col_tmstmp as timestamp) col_tmstmp,cast(col_date as date) col_date,cast(col_boln as boolean) col_boln,cast(col_dbl as double) col_dbl from `dfs.union_all`.`prqUnAll_0.parquet`; +create or replace view `dfs_test.union_all`.prqUnAll_0_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char as char(8)) col_char,cast(col_vchar as varchar(16)) col_vchar,cast(col_tmstmp as timestamp) col_tmstmp,cast(col_date as date) col_date,cast(col_boln as boolean) col_boln,cast(col_dbl as double) col_dbl from `dfs_test.union_all`.`prqUnAll_0.parquet`; -create or replace view `dfs.union_all`.prqUnAll_1_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char as char(8)) col_char,cast(col_vchar as varchar(16)) col_vchar,cast(col_tmstmp as timestamp) col_tmstmp,cast(col_date as date) col_date,cast(col_boln as boolean) col_boln,cast(col_dbl as double) col_dbl from `dfs.union_all`.`prqUnAll_1.parquet`; +create or replace view `dfs_test.union_all`.prqUnAll_1_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char as char(8)) col_char,cast(col_vchar as varchar(16)) col_vchar,cast(col_tmstmp as timestamp) col_tmstmp,cast(col_date as date) col_date,cast(col_boln as boolean) col_boln,cast(col_dbl as double) col_dbl from `dfs_test.union_all`.`prqUnAll_1.parquet`; -create or replace view `dfs.union_all`.notEmpty_csv_v as select cast(columns[0] as int) col1,cast(columns[1] as character varying(15)) col2, cast(columns[2] as varchar(7)) col3 from `dfs.union_all`.`notEmptyFolder/notEmpty.csv`; +create or replace view `dfs_test.union_all`.notEmpty_csv_v as select cast(columns[0] as int) col1,cast(columns[1] as character varying(15)) col2, cast(columns[2] as varchar(7)) col3 from `dfs_test.union_all`.`notEmptyFolder/notEmpty.csv`; -create or replace view `dfs.union_all`.empty_1_csv_v as select cast(columns[0] as int) key from `dfs.union_all`.`emptyFiles/empty_1.csv`; +create or replace view `dfs_test.union_all`.empty_1_csv_v as select cast(columns[0] as int) key from `dfs_test.union_all`.`emptyFiles/empty_1.csv`; -create or replace view `dfs.union_all`.empty_1_json_v as select cast(key as int) key from `dfs.union_all`.`emptyFiles/empty_1.json`; +create or replace view `dfs_test.union_all`.empty_1_json_v as select cast(key as int) key from `dfs_test.union_all`.`emptyFiles/empty_1.json`; diff --git a/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh b/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh index 7005b011c..cccc04db2 100755 --- a/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh +++ b/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.window_functions;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.window_functions;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.window_functions;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.window_functions;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql b/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql index bfe03ee3c..f768378f4 100644 --- a/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql +++ b/framework/resources/Datasources/limit0/window_functions/aggregates/create_wf_aggregates_views.sql @@ -1,7 +1,7 @@ -create or replace view `dfs.window_functions`.smlTbl_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char_2 as char(2)) col_char_2,cast(col_vchar_52 as varchar(52)) col_vchar_52,cast(col_tmstmp as timestamp) col_tmstmp, cast(col_dt as date) col_dt, cast(col_booln as boolean), cast(col_dbl as double) col_dbl, cast(col_tm as time) col_tm from `dfs.window_functions`.`smlTbl.parquet`; +create or replace view `dfs_test.window_functions`.smlTbl_v as select cast(col_int as int) col_int,cast(col_bgint as bigint) col_bgint,cast(col_char_2 as char(2)) col_char_2,cast(col_vchar_52 as varchar(52)) col_vchar_52,cast(col_tmstmp as timestamp) col_tmstmp, cast(col_dt as date) col_dt, cast(col_booln as boolean), cast(col_dbl as double) col_dbl, cast(col_tm as time) col_tm from `dfs_test.window_functions`.`smlTbl.parquet`; -create or replace view `dfs.window_functions`.tblWnulls_v as select cast(c1 as integer) c1, cast(c2 as char(1)) c2 from `dfs.window_functions`.`tblWnulls.parquet`; +create or replace view `dfs_test.window_functions`.tblWnulls_v as select cast(c1 as integer) c1, cast(c2 as char(1)) c2 from `dfs_test.window_functions`.`tblWnulls.parquet`; -create or replace view `dfs.window_functions`.allTypsUniq_v as select cast(col0 as int) col0,cast(col1 as bigint) col1,cast(col2 as float) col2,cast(col3 as double) col3,cast(col4 as time) col4,cast(col5 as timestamp) col5,cast(col6 as date) col6,cast(col7 as boolean) col7,cast(col8 as char(2)) col8,cast(col9 as character varying(52)) col9 from `dfs.window_functions`.`allTypsUniq.parquet`; +create or replace view `dfs_test.window_functions`.allTypsUniq_v as select cast(col0 as int) col0,cast(col1 as bigint) col1,cast(col2 as float) col2,cast(col3 as double) col3,cast(col4 as time) col4,cast(col5 as timestamp) col5,cast(col6 as date) col6,cast(col7 as boolean) col7,cast(col8 as char(2)) col8,cast(col9 as character varying(52)) col9 from `dfs_test.window_functions`.`allTypsUniq.parquet`; -create or replace view `dfs.window_functions`.fewRowsAllData_v as select cast(col0 as int) col0,cast(col1 as bigint) col1,cast(col2 as char(2)) col2,cast(col3 as varchar(52)) col3,cast(col4 as timestamp) col4,cast(col5 as date) col5,cast(col6 as boolean) col6,cast(col7 as double) col7,cast(col8 as time) col8 from `dfs.window_functions`.`fewRowsAllData.parquet`; +create or replace view `dfs_test.window_functions`.fewRowsAllData_v as select cast(col0 as int) col0,cast(col1 as bigint) col1,cast(col2 as char(2)) col2,cast(col3 as varchar(52)) col3,cast(col4 as timestamp) col4,cast(col5 as date) col5,cast(col6 as boolean) col6,cast(col7 as double) col7,cast(col8 as time) col8 from `dfs_test.window_functions`.`fewRowsAllData.parquet`; diff --git a/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh b/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh index f04b28fe9..4c3b9f43c 100755 --- a/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh +++ b/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql b/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql index d6b3ac479..19298670a 100644 --- a/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql +++ b/framework/resources/Datasources/limit0/window_functions/bugs/create_wf_bugs_views.sql @@ -1,11 +1,11 @@ -create or replace view `dfs.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs.subqueries`.t1; +create or replace view `dfs_test.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs_test.subqueries`.t1; -create or replace view `dfs.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs.subqueries`.t2; +create or replace view `dfs_test.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs_test.subqueries`.t2; -create or replace view `dfs.subqueries`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs.subqueries`.t3; +create or replace view `dfs_test.subqueries`.t3_v as select cast(a3 as integer) a3,cast(b3 as varchar(5)) b3,cast(c3 as date) c3 from `dfs_test.subqueries`.t3; -create or replace view `dfs.subqueries`.j3_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j3; +create or replace view `dfs_test.subqueries`.j3_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j3; -create or replace view `dfs.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j1; +create or replace view `dfs_test.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j1; -create or replace view `dfs.subqueries`.j6_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j6; +create or replace view `dfs_test.subqueries`.j6_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j6; diff --git a/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh b/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh index f5aeea829..36709ff4b 100755 --- a/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh +++ b/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql b/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql index 04cba381c..ded4eb3fc 100644 --- a/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql +++ b/framework/resources/Datasources/limit0/window_functions/empty_over_clause/create_wf_empty_over_clause_views.sql @@ -1,3 +1,3 @@ -create or replace view `dfs.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j1; +create or replace view `dfs_test.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j1; -create or replace view `dfs.subqueries`.j7_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j7; +create or replace view `dfs_test.subqueries`.j7_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j7; diff --git a/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh b/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh index 2e5f397e6..1600203ef 100755 --- a/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh +++ b/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sh @@ -6,9 +6,9 @@ set -e if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql fi set +x diff --git a/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql b/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql index 350458a4d..cf27b688e 100644 --- a/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql +++ b/framework/resources/Datasources/limit0/window_functions/multiple_partitions/create_wf_multiple_partitions_views.sql @@ -1,13 +1,13 @@ -create or replace view `dfs.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs.subqueries`.t1; +create or replace view `dfs_test.subqueries`.t1_v as select cast(a1 as int) a1,cast(b1 as char(5)) b1, cast(c1 as date) as c1 from `dfs_test.subqueries`.t1; -create or replace view `dfs.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs.subqueries`.t2; +create or replace view `dfs_test.subqueries`.t2_v as select cast(a2 as int) a2, cast(b2 as character varying(5)) b2,cast(c2 as date) c2 from `dfs_test.subqueries`.t2; -create or replace view `dfs.subqueries`.j2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j2; +create or replace view `dfs_test.subqueries`.j2_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j2; -create or replace view `dfs.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j1; +create or replace view `dfs_test.subqueries`.j1_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j1; -create or replace view `dfs.subqueries`.j4_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j4; +create or replace view `dfs_test.subqueries`.j4_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j4; -create or replace view `dfs.subqueries`.j6_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j6; +create or replace view `dfs_test.subqueries`.j6_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j6; -create or replace view `dfs.subqueries`.j7_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs.subqueries`.j7; +create or replace view `dfs_test.subqueries`.j7_v as select cast(c_varchar as varchar(20)) c_varchar,cast(c_integer as int) c_integer,cast(c_bigint as bigint) c_bigint,cast(c_float as float) c_float,cast(c_double as double) c_double,cast(c_date as date) c_date,cast(c_time as time) c_time,cast(c_timestamp as timestamp) c_timestamp,cast(c_boolean as boolean) c_boolean,cast(d9 as double) d9,cast(d18 as double) d18,cast(d28 as double) d28,cast(d38 as double precision) d38 from `dfs_test.subqueries`.j7; diff --git a/framework/resources/Datasources/metadata_caching/addremove_files.sh b/framework/resources/Datasources/metadata_caching/addremove_files.sh index 681b80f3a..466d8dfd5 100755 --- a/framework/resources/Datasources/metadata_caching/addremove_files.sh +++ b/framework/resources/Datasources/metadata_caching/addremove_files.sh @@ -54,9 +54,9 @@ ${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/delete_cache.sh /drill/testd if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl fi hadoop fs -copyFromLocal ${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/data/lineitem1.parquet /drill/testdata/metadata_caching/lineitem_addfiles/lineitem1.parquet diff --git a/framework/resources/Datasources/metadata_caching/advanced_gen.ddl b/framework/resources/Datasources/metadata_caching/advanced_gen.ddl index 8e56909dd..417ff3987 100755 --- a/framework/resources/Datasources/metadata_caching/advanced_gen.ddl +++ b/framework/resources/Datasources/metadata_caching/advanced_gen.ddl @@ -1 +1 @@ -refresh table metadata dfs.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem`; +refresh table metadata dfs_test.`/drill/testdata/tpch100_dir_partitioned_50000files/lineitem`; diff --git a/framework/resources/Datasources/metadata_caching/advanced_gen.sh b/framework/resources/Datasources/metadata_caching/advanced_gen.sh index 7a783d46e..b5b04ee23 100755 --- a/framework/resources/Datasources/metadata_caching/advanced_gen.sh +++ b/framework/resources/Datasources/metadata_caching/advanced_gen.sh @@ -3,9 +3,9 @@ source conf/drillTestConfig.properties if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/advanced_gen.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/advanced_gen.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/advanced_gen.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/advanced_gen.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/metadata_caching/ctas_auto.ddl b/framework/resources/Datasources/metadata_caching/ctas_auto.ddl index 603263cb4..d09993c24 100755 --- a/framework/resources/Datasources/metadata_caching/ctas_auto.ddl +++ b/framework/resources/Datasources/metadata_caching/ctas_auto.ddl @@ -1,6 +1,6 @@ alter session set `planner.width.max_per_node` = 4; alter session set `planner.memory.max_query_memory_per_node` = 8147483648; -create table lineitem partition by (l_shipdate, l_receiptdate) as select * from dfs.`/drill/testdata/tpch100/lineitem`; +create table lineitem partition by (l_shipdate, l_receiptdate) as select * from dfs_test.`/drill/testdata/tpch100/lineitem`; diff --git a/framework/resources/Datasources/metadata_caching/delete_toplevel_cache.sh b/framework/resources/Datasources/metadata_caching/delete_toplevel_cache.sh index 13fb6c07d..88b26363c 100755 --- a/framework/resources/Datasources/metadata_caching/delete_toplevel_cache.sh +++ b/framework/resources/Datasources/metadata_caching/delete_toplevel_cache.sh @@ -3,9 +3,9 @@ source conf/drillTestConfig.properties if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_deletecache.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_deletecache.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_deletecache.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_deletecache.ddl fi hadoop fs -test /drill/testdata/metadata_caching/lineitem_deletecache/.drill.parquet_metadata diff --git a/framework/resources/Datasources/metadata_caching/gen_partitioned_data.ddl b/framework/resources/Datasources/metadata_caching/gen_partitioned_data.ddl index 783143564..d2618d7c5 100755 --- a/framework/resources/Datasources/metadata_caching/gen_partitioned_data.ddl +++ b/framework/resources/Datasources/metadata_caching/gen_partitioned_data.ddl @@ -2,6 +2,6 @@ alter session set `planner.width.max_per_node` = 4; alter session set `planner.memory.max_query_memory_per_node` = 8147483648; alter session set `store.partition.hash_distribute` = true; -create table lineitem partition by (l_shipdate, l_receiptdate) as select * from dfs.`/drill/testdata/tpch100/lineitem`; -create table orders partition by (o_clerk) as select * from dfs.`/drill/testdata/tpch100/orders`; -create table supplier partition by (s_nationkey) as select * from dfs.`/drill/testdata/tpch100/supplier`; +create table lineitem partition by (l_shipdate, l_receiptdate) as select * from dfs_test.`/drill/testdata/tpch100/lineitem`; +create table orders partition by (o_clerk) as select * from dfs_test.`/drill/testdata/tpch100/orders`; +create table supplier partition by (s_nationkey) as select * from dfs_test.`/drill/testdata/tpch100/supplier`; diff --git a/framework/resources/Datasources/metadata_caching/gendata.ddl b/framework/resources/Datasources/metadata_caching/gendata.ddl index b230598b3..c90a06457 100755 --- a/framework/resources/Datasources/metadata_caching/gendata.ddl +++ b/framework/resources/Datasources/metadata_caching/gendata.ddl @@ -1,9 +1,9 @@ ALTER SESSION SET `store.parquet.block-size` = 698000; -create table lineitem as select * from dfs.`/drill/testdata/tpch100/lineitem`; -create table orders as select * from dfs.`/drill/testdata/tpch100/orders`; -create table customer as select * from dfs.`/drill/testdata/tpch100/customer`; -create table supplier as select * from dfs.`/drill/testdata/tpch100/supplier`; -create table part as select * from dfs.`/drill/testdata/tpch100/part`; -create table partsupp as select * from dfs.`/drill/testdata/tpch100/partsupp`; -create table region as select * from dfs.`/drill/testdata/tpch100/region`; -create table nation as select * from dfs.`/drill/testdata/tpch100/nation`; +create table lineitem as select * from dfs_test.`/drill/testdata/tpch100/lineitem`; +create table orders as select * from dfs_test.`/drill/testdata/tpch100/orders`; +create table customer as select * from dfs_test.`/drill/testdata/tpch100/customer`; +create table supplier as select * from dfs_test.`/drill/testdata/tpch100/supplier`; +create table part as select * from dfs_test.`/drill/testdata/tpch100/part`; +create table partsupp as select * from dfs_test.`/drill/testdata/tpch100/partsupp`; +create table region as select * from dfs_test.`/drill/testdata/tpch100/region`; +create table nation as select * from dfs_test.`/drill/testdata/tpch100/nation`; diff --git a/framework/resources/Datasources/metadata_caching/gendata.sh b/framework/resources/Datasources/metadata_caching/gendata.sh index f68c1aee7..e23834158 100755 --- a/framework/resources/Datasources/metadata_caching/gendata.sh +++ b/framework/resources/Datasources/metadata_caching/gendata.sh @@ -8,7 +8,7 @@ echo $mul size=`expr $var / $mul` echo $size #sed "s/$var/$size/g" "gendata.ddl" > gendata.ddl_temp && mv gendata.ddl_temp gendata.ddl -${DRILL_HOME}/bin/sqlline -n user1 -p mapr -u "jdbc:drill:schema=dfs.tpch100_$1files;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/gendata.ddl +${DRILL_HOME}/bin/sqlline -n user1 -p mapr -u "jdbc:drill:schema=dfs_test.tpch100_$1files;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/gendata.ddl #sed "s/$size/$var/g" "gendata.ddl" > gendata.ddl_temp && mv gendata.ddl_temp gendata.ddl # TODO: it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl b/framework/resources/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl index 648db333c..c95f19251 100755 --- a/framework/resources/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl +++ b/framework/resources/Datasources/metadata_caching/refresh_metadata_addremovefiles.ddl @@ -1,22 +1,22 @@ -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_addfiles`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_removefiles`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_adddir`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_removedir`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_removeautopartitioned_files`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/orders`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_intpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_bigintpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_datepartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_timepartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_timestamppartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_intervalpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_varcharpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_floatpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_doublepartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_boolpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/fewtypes_varcharpartition`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/vc_part`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem`; -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_hierarchical_intint`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_addfiles`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_removefiles`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_adddir`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_removedir`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_removeautopartitioned_files`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/orders`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_intpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_bigintpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_datepartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_timepartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_timestamppartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_intervalpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_varcharpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_floatpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_doublepartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_boolpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/fewtypes_varcharpartition`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/vc_part`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_hierarchical_intint`; diff --git a/framework/resources/Datasources/metadata_caching/refresh_metadata_deletecache.ddl b/framework/resources/Datasources/metadata_caching/refresh_metadata_deletecache.ddl index 7db5efb56..04edb9c9e 100755 --- a/framework/resources/Datasources/metadata_caching/refresh_metadata_deletecache.ddl +++ b/framework/resources/Datasources/metadata_caching/refresh_metadata_deletecache.ddl @@ -1 +1 @@ -refresh table metadata dfs.`/drill/testdata/metadata_caching/lineitem_deletecache`; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/lineitem_deletecache`; diff --git a/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.ddl b/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.ddl index 28f32babc..567a849ad 100644 --- a/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.ddl +++ b/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.ddl @@ -1,10 +1,10 @@ -refresh table metadata dfs.metadata_caching_pp.l_3level; -refresh table metadata dfs.metadata_caching_pp.c_1level; -refresh table metadata dfs.metadata_caching_pp.o_2level; -refresh table metadata dfs.metadata_caching_pp.ps_5level; -refresh table metadata dfs.metadata_caching_pp.s_1level; -refresh table metadata dfs.metadata_caching_pp.p_4level; -refresh table metadata dfs.`/drill/testdata/mc/nation`; -refresh table metadata dfs.metadata_caching.empty; -refresh table metadata dfs.`/drill/testdata/metadata_caching/nation`; -refresh table metadata dfs.metadata_caching.empty1; +refresh table metadata dfs_test.metadata_caching_pp.l_3level; +refresh table metadata dfs_test.metadata_caching_pp.c_1level; +refresh table metadata dfs_test.metadata_caching_pp.o_2level; +refresh table metadata dfs_test.metadata_caching_pp.ps_5level; +refresh table metadata dfs_test.metadata_caching_pp.s_1level; +refresh table metadata dfs_test.metadata_caching_pp.p_4level; +refresh table metadata dfs_test.`/drill/testdata/mc/nation`; +refresh table metadata dfs_test.metadata_caching.empty; +refresh table metadata dfs_test.`/drill/testdata/metadata_caching/nation`; +refresh table metadata dfs_test.metadata_caching.empty1; diff --git a/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.sh b/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.sh index 2ea507c1f..b74366a34 100755 --- a/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.sh +++ b/framework/resources/Datasources/metadata_caching/refresh_metadata_multilevel.sh @@ -3,9 +3,9 @@ source conf/drillTestConfig.properties if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_multilevel.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_multilevel.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_multilevel.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_multilevel.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/metadata_caching/refresh_metadata_tpch.sh b/framework/resources/Datasources/metadata_caching/refresh_metadata_tpch.sh index 496d1cec9..c6a605b9e 100755 --- a/framework/resources/Datasources/metadata_caching/refresh_metadata_tpch.sh +++ b/framework/resources/Datasources/metadata_caching/refresh_metadata_tpch.sh @@ -5,9 +5,9 @@ ${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/delete_cache.sh "/drill/test if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/parquet_date/gen.sh b/framework/resources/Datasources/parquet_date/gen.sh index 0e6b2000b..68f9fcd22 100755 --- a/framework/resources/Datasources/parquet_date/gen.sh +++ b/framework/resources/Datasources/parquet_date/gen.sh @@ -24,9 +24,9 @@ hadoop fs -copyFromLocal ${DRILL_TEST_DATA_DIR}/Datasources/parquet_date/auto_pa if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/parquet_date/metadata_cache.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/parquet_date/metadata_cache.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/parquet_date/metadata_cache.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.ctas_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/parquet_date/metadata_cache.ddl fi # TODO: it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Datasources/parquet_date/metadata_cache.ddl b/framework/resources/Datasources/parquet_date/metadata_cache.ddl index 0014ed857..bf659572e 100644 --- a/framework/resources/Datasources/parquet_date/metadata_cache.ddl +++ b/framework/resources/Datasources/parquet_date/metadata_cache.ddl @@ -1,43 +1,43 @@ -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/ctas_t1`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/ctas_t14`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/date_nulls`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null_large`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/pre1.0lineitem_1.2mc`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/ctas_t1`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/ctas_t14`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/date_nulls`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null_large`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/pre1.0lineitem_1.2mc`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/ctas_t1`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/ctas_t14`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/date_nulls`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null_large`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/pre1.0lineitem_1.2mc`; -refresh table metadata dfs.`/drill/testdata/parquet_date/auto_partition/lineitem_single`; -refresh table metadata dfs.`/drill/testdata/parquet_date/auto_partition/item_single`; -refresh table metadata dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/ctas_t1`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/ctas_t14`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/date_nulls`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null_large`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/pre1.0lineitem_1.2mc`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/auto_partition/lineitem_single`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/auto_partition/item_single`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/mixed1`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/spark_generated/d4`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/lineitem_dates`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen1`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/mixed1_partitioned`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/spark_generated/d1`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/spark_generated/d2`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/spark_generated/d3`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen2_lineitem`; -refresh table metadata dfs.`/drill/testdata/mc_parquet_date/fixeddate_lineitem`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t1`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t14`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/date_nulls`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/pre1.0lineitem_1.2mc`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t1`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t14`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/date_nulls`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null`; -refresh table metadata dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/pre1.0lineitem_1.2mc`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/mixed1`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d4`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen1`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/mixed1_partitioned`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d1`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d2`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d3`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen2_lineitem`; +refresh table metadata dfs_test.`/drill/testdata/mc_parquet_date/fixeddate_lineitem`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t1`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t14`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/date_nulls`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/pre1.0lineitem_1.2mc`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t1`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t14`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/date_nulls`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null`; +refresh table metadata dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/pre1.0lineitem_1.2mc`; diff --git a/framework/resources/Datasources/subqueries/create_tables.sh b/framework/resources/Datasources/subqueries/create_tables.sh index c6efc0a82..341722ae2 100755 --- a/framework/resources/Datasources/subqueries/create_tables.sh +++ b/framework/resources/Datasources/subqueries/create_tables.sh @@ -11,9 +11,9 @@ fi if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_tables.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_tables.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_tables.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_tables.sql fi if [ "$?" -eq 0 ] diff --git a/framework/resources/Datasources/subqueries/create_views.sh b/framework/resources/Datasources/subqueries/create_views.sh index ea569d491..2162d17b8 100755 --- a/framework/resources/Datasources/subqueries/create_views.sh +++ b/framework/resources/Datasources/subqueries/create_views.sh @@ -6,9 +6,9 @@ hadoop fs -rm -r /drill/testdata/subqueries/required_type_v.view.drill if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_views.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_views.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.subqueries;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/subqueries/create_views.sql fi if [ "$?" -eq 0 ] diff --git a/framework/resources/Datasources/tpcds/createTablesJson.sh b/framework/resources/Datasources/tpcds/createTablesJson.sh index e362f8749..a3af8f8b5 100755 --- a/framework/resources/Datasources/tpcds/createTablesJson.sh +++ b/framework/resources/Datasources/tpcds/createTablesJson.sh @@ -5,8 +5,8 @@ hadoop fs -test -d /drill/testdata/tpcds_sf1/json ;if [ `echo $?` -eq 0 ]; then if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_json;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesJson.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_json;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesJson.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_json;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesJson.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_json;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesJson.sql ${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsJson.sh fi diff --git a/framework/resources/Datasources/tpcds/createTablesJson.sql b/framework/resources/Datasources/tpcds/createTablesJson.sql index e38d683c3..f4dcea8e6 100755 --- a/framework/resources/Datasources/tpcds/createTablesJson.sql +++ b/framework/resources/Datasources/tpcds/createTablesJson.sql @@ -1,6 +1,6 @@ alter session set `store.format` = 'json'; -use dfs.tpcds_sf1_json; +use dfs_test.tpcds_sf1_json; create table customer as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as c_customer_sk, @@ -21,7 +21,7 @@ create table customer as select case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] as varchar(200)) end as c_login, case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as c_email_address, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/text/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer`; create table customer_address as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ca_address_sk, @@ -37,7 +37,7 @@ case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] a case when (columns[10]='') then cast(null as varchar(200)) else cast(columns[10] as varchar(200)) end as ca_country, case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as ca_gmt_offset, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as ca_location_type -from dfs.`/drill/testdata/tpcds_sf1/text/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_address`; create table customer_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cd_demo_sk, @@ -49,7 +49,7 @@ case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] a case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cd_dep_count, case when (columns[7]='') then cast(null as integer) else cast(columns[7] as integer) end as cd_dep_employed_count, case when (columns[8]='') then cast(null as integer) else cast(columns[8] as integer) end as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf1/text/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_demographics`; create table household_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as hd_demo_sk, @@ -57,7 +57,7 @@ case when (columns[1]='') then cast(null as integer) else cast(columns[1] as int case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as hd_buy_potential, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as hd_dep_count, case when (columns[4]='') then cast(null as integer) else cast(columns[4] as integer) end as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf1/text/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/household_demographics`; create table item as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as i_item_sk, @@ -82,7 +82,7 @@ case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] case when (columns[19]='') then cast(null as varchar(200)) else cast(columns[19] as varchar(200)) end as i_container, case when (columns[20]='') then cast(null as integer) else cast(columns[20] as integer) end as i_manager_id, case when (columns[21]='') then cast(null as varchar(200)) else cast(columns[21] as varchar(200)) end as i_product_name -from dfs.`/drill/testdata/tpcds_sf1/text/item`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/item`; create table promotion as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as p_promo_sk, @@ -104,7 +104,7 @@ case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as p_channel_details, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as p_purpose, case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] as varchar(200)) end as p_discount_active -from dfs.`/drill/testdata/tpcds_sf1/text/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/promotion`; create table time_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as t_time_sk, @@ -117,7 +117,7 @@ case when (columns[6]='') then cast(null as varchar(200)) else cast(columns[6] a case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as t_shift, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as t_sub_shift , case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] as varchar(200)) end as t_meal_time -from dfs.`/drill/testdata/tpcds_sf1/text/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/time_dim`; create table date_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as d_date_sk, @@ -148,7 +148,7 @@ case when (columns[24]='') then cast(null as varchar(200)) else cast(columns[24] case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] as varchar(200)) end as d_current_month, case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as d_current_quarter, case when (columns[27]='') then cast(null as varchar(200)) else cast(columns[27] as varchar(200)) end as d_current_year -from dfs.`/drill/testdata/tpcds_sf1/text/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/date_dim`; create table store as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as s_store_sk, @@ -180,7 +180,7 @@ case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as s_country, case when (columns[27]='') then cast(null as double) else cast(columns[27] as double) end as s_gmt_offset, case when (columns[28]='') then cast(null as double) else cast(columns[28] as double) end as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/text/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store`; create table store_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ss_sold_date_sk, @@ -206,7 +206,7 @@ case when (columns[19]='') then cast(null as double) else cast(columns[19] as do case when (columns[20]='') then cast(null as double) else cast(columns[20] as double) end as ss_net_paid, case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as ss_net_paid_inc_tax, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_sales`; create table warehouse as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as w_warehouse_sk, @@ -223,7 +223,7 @@ create table warehouse as select case when (columns[11]='') then cast(null as varchar(200)) else cast(columns[11] as varchar(200)) end as w_zip, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as w_country, case when (columns[13]='') then cast(null as double) else cast(columns[13] as double) end as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf1/text/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/warehouse`; create table ship_mode as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sm_ship_mode_sk, @@ -232,19 +232,19 @@ create table ship_mode as select case when (columns[3]='') then cast(null as varchar(200)) else cast(columns[3] as varchar(200)) end as sm_code, case when (columns[4]='') then cast(null as varchar(200)) else cast(columns[4] as varchar(200)) end as sm_carrier, case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] as varchar(200)) end as sm_contract -from dfs.`/drill/testdata/tpcds_sf1/text/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/ship_mode`; create table reason as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as r_reason_sk, case when (columns[1]='') then cast(null as varchar(200)) else cast(columns[1] as varchar(200)) end as r_reason_id, case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf1/text/reason`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/reason`; create table income_band as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ib_income_band_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as ib_lower_bound, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf1/text/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/income_band`; create table call_center as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cc_call_center_sk, @@ -278,7 +278,7 @@ create table call_center as select case when (columns[28]='') then cast(null as varchar(200)) else cast(columns[28] as varchar(200)) end as cc_country, case when (columns[29]='') then cast(null as double) else cast(columns[29] as double) end as cc_gmt_offset, case when (columns[30]='') then cast(null as double) else cast(columns[30] as double) end as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/call_center`; create table web_site as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as web_site_sk, @@ -307,7 +307,7 @@ create table web_site as select case when (columns[23]='') then cast(null as varchar(200)) else cast(columns[23] as varchar(200)) end as web_country, case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as web_gmt_offset, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_site`; create table store_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sr_returned_date_sk, @@ -330,7 +330,7 @@ create table store_returns as select case when (columns[17]='') then cast(null as double) else cast(columns[17] as double) end as sr_reversed_charge, case when (columns[18]='') then cast(null as double) else cast(columns[18] as double) end as sr_store_credit, case when (columns[19]='') then cast(null as double) else cast(columns[19] as double) end as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_returns`; create table web_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wp_web_page_sk, @@ -347,7 +347,7 @@ create table web_page as select case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as wp_link_count, case when (columns[12]='') then cast(null as integer) else cast(columns[12] as integer) end as wp_image_count, case when (columns[13]='') then cast(null as integer) else cast(columns[13] as integer) end as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf1/text/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_page`; create table catalog_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cp_catalog_page_sk, @@ -359,14 +359,14 @@ create table catalog_page as select case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cp_catalog_page_number, case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as cp_description, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as cp_type -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_page`; create table inventory as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as inv_date_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as inv_item_sk, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as inv_warehouse_sk, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf1/text/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/inventory`; create table catalog_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cr_returned_date_sk, @@ -396,7 +396,7 @@ create table catalog_returns as select case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as cr_reversed_charge, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as cr_store_credit, case when (columns[26]='') then cast(null as double) else cast(columns[26] as double) end as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_returns`; create table web_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wr_returned_date_sk, @@ -423,7 +423,7 @@ create table web_returns as select case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as wr_reversed_charge, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as wr_account_credit, case when (columns[23]='') then cast(null as double) else cast(columns[23] as double) end as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_returns`; create table web_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ws_sold_date_sk, @@ -460,7 +460,7 @@ create table web_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as ws_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as ws_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_sales`; create table catalog_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cs_sold_date_sk, @@ -497,6 +497,6 @@ create table catalog_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as cs_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as cs_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_sales`; alter session set `store.format` = 'parquet'; diff --git a/framework/resources/Datasources/tpcds/createTablesParquet.sh b/framework/resources/Datasources/tpcds/createTablesParquet.sh index c166c5fc0..955d4416c 100755 --- a/framework/resources/Datasources/tpcds/createTablesParquet.sh +++ b/framework/resources/Datasources/tpcds/createTablesParquet.sh @@ -5,7 +5,7 @@ hadoop fs -test -d /drill/testdata/tpcds_sf1/parquet ;if [ `echo $?` -eq 0 ]; th if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesParquet.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesParquet.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesParquet.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createTablesParquet.sql fi diff --git a/framework/resources/Datasources/tpcds/createTablesParquet.sql b/framework/resources/Datasources/tpcds/createTablesParquet.sql index bde4b30a3..e588e7173 100755 --- a/framework/resources/Datasources/tpcds/createTablesParquet.sql +++ b/framework/resources/Datasources/tpcds/createTablesParquet.sql @@ -1,4 +1,4 @@ -use dfs.tpcds_sf1_parquet; +use dfs_test.tpcds_sf1_parquet; create table customer as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as c_customer_sk, @@ -19,7 +19,7 @@ create table customer as select case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] as varchar(200)) end as c_login, case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as c_email_address, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/text/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer`; create table customer_address as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ca_address_sk, @@ -35,7 +35,7 @@ case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] a case when (columns[10]='') then cast(null as varchar(200)) else cast(columns[10] as varchar(200)) end as ca_country, case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as ca_gmt_offset, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as ca_location_type -from dfs.`/drill/testdata/tpcds_sf1/text/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_address`; create table customer_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cd_demo_sk, @@ -47,7 +47,7 @@ case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] a case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cd_dep_count, case when (columns[7]='') then cast(null as integer) else cast(columns[7] as integer) end as cd_dep_employed_count, case when (columns[8]='') then cast(null as integer) else cast(columns[8] as integer) end as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf1/text/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_demographics`; create table household_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as hd_demo_sk, @@ -55,7 +55,7 @@ case when (columns[1]='') then cast(null as integer) else cast(columns[1] as int case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as hd_buy_potential, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as hd_dep_count, case when (columns[4]='') then cast(null as integer) else cast(columns[4] as integer) end as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf1/text/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/household_demographics`; create table item as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as i_item_sk, @@ -80,7 +80,7 @@ case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] case when (columns[19]='') then cast(null as varchar(200)) else cast(columns[19] as varchar(200)) end as i_container, case when (columns[20]='') then cast(null as integer) else cast(columns[20] as integer) end as i_manager_id, case when (columns[21]='') then cast(null as varchar(200)) else cast(columns[21] as varchar(200)) end as i_product_name -from dfs.`/drill/testdata/tpcds_sf1/text/item`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/item`; create table promotion as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as p_promo_sk, @@ -102,7 +102,7 @@ case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as p_channel_details, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as p_purpose, case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] as varchar(200)) end as p_discount_active -from dfs.`/drill/testdata/tpcds_sf1/text/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/promotion`; create table time_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as t_time_sk, @@ -115,7 +115,7 @@ case when (columns[6]='') then cast(null as varchar(200)) else cast(columns[6] a case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as t_shift, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as t_sub_shift , case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] as varchar(200)) end as t_meal_time -from dfs.`/drill/testdata/tpcds_sf1/text/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/time_dim`; create table date_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as d_date_sk, @@ -146,7 +146,7 @@ case when (columns[24]='') then cast(null as varchar(200)) else cast(columns[24] case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] as varchar(200)) end as d_current_month, case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as d_current_quarter, case when (columns[27]='') then cast(null as varchar(200)) else cast(columns[27] as varchar(200)) end as d_current_year -from dfs.`/drill/testdata/tpcds_sf1/text/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/date_dim`; create table store as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as s_store_sk, @@ -178,7 +178,7 @@ case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as s_country, case when (columns[27]='') then cast(null as double) else cast(columns[27] as double) end as s_gmt_offset, case when (columns[28]='') then cast(null as double) else cast(columns[28] as double) end as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/text/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store`; create table store_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ss_sold_date_sk, @@ -204,7 +204,7 @@ case when (columns[19]='') then cast(null as double) else cast(columns[19] as do case when (columns[20]='') then cast(null as double) else cast(columns[20] as double) end as ss_net_paid, case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as ss_net_paid_inc_tax, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_sales`; create table warehouse as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as w_warehouse_sk, @@ -221,7 +221,7 @@ create table warehouse as select case when (columns[11]='') then cast(null as varchar(200)) else cast(columns[11] as varchar(200)) end as w_zip, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as w_country, case when (columns[13]='') then cast(null as double) else cast(columns[13] as double) end as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf1/text/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/warehouse`; create table ship_mode as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sm_ship_mode_sk, @@ -230,19 +230,19 @@ create table ship_mode as select case when (columns[3]='') then cast(null as varchar(200)) else cast(columns[3] as varchar(200)) end as sm_code, case when (columns[4]='') then cast(null as varchar(200)) else cast(columns[4] as varchar(200)) end as sm_carrier, case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] as varchar(200)) end as sm_contract -from dfs.`/drill/testdata/tpcds_sf1/text/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/ship_mode`; create table reason as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as r_reason_sk, case when (columns[1]='') then cast(null as varchar(200)) else cast(columns[1] as varchar(200)) end as r_reason_id, case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf1/text/reason`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/reason`; create table income_band as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ib_income_band_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as ib_lower_bound, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf1/text/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/income_band`; create table call_center as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cc_call_center_sk, @@ -276,7 +276,7 @@ create table call_center as select case when (columns[28]='') then cast(null as varchar(200)) else cast(columns[28] as varchar(200)) end as cc_country, case when (columns[29]='') then cast(null as double) else cast(columns[29] as double) end as cc_gmt_offset, case when (columns[30]='') then cast(null as double) else cast(columns[30] as double) end as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/call_center`; create table web_site as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as web_site_sk, @@ -305,7 +305,7 @@ create table web_site as select case when (columns[23]='') then cast(null as varchar(200)) else cast(columns[23] as varchar(200)) end as web_country, case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as web_gmt_offset, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_site`; create table store_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sr_returned_date_sk, @@ -328,7 +328,7 @@ create table store_returns as select case when (columns[17]='') then cast(null as double) else cast(columns[17] as double) end as sr_reversed_charge, case when (columns[18]='') then cast(null as double) else cast(columns[18] as double) end as sr_store_credit, case when (columns[19]='') then cast(null as double) else cast(columns[19] as double) end as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_returns`; create table web_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wp_web_page_sk, @@ -345,7 +345,7 @@ create table web_page as select case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as wp_link_count, case when (columns[12]='') then cast(null as integer) else cast(columns[12] as integer) end as wp_image_count, case when (columns[13]='') then cast(null as integer) else cast(columns[13] as integer) end as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf1/text/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_page`; create table catalog_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cp_catalog_page_sk, @@ -357,14 +357,14 @@ create table catalog_page as select case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cp_catalog_page_number, case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as cp_description, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as cp_type -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_page`; create table inventory as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as inv_date_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as inv_item_sk, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as inv_warehouse_sk, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf1/text/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/inventory`; create table catalog_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cr_returned_date_sk, @@ -394,7 +394,7 @@ create table catalog_returns as select case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as cr_reversed_charge, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as cr_store_credit, case when (columns[26]='') then cast(null as double) else cast(columns[26] as double) end as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_returns`; create table web_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wr_returned_date_sk, @@ -421,7 +421,7 @@ create table web_returns as select case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as wr_reversed_charge, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as wr_account_credit, case when (columns[23]='') then cast(null as double) else cast(columns[23] as double) end as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_returns`; create table web_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ws_sold_date_sk, @@ -458,7 +458,7 @@ create table web_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as ws_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as ws_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_sales`; create table catalog_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cs_sold_date_sk, @@ -495,4 +495,4 @@ create table catalog_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as cs_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as cs_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_sales`; diff --git a/framework/resources/Datasources/tpcds/createViewsJson.sh b/framework/resources/Datasources/tpcds/createViewsJson.sh index f989b5d86..e80b78fb6 100755 --- a/framework/resources/Datasources/tpcds/createViewsJson.sh +++ b/framework/resources/Datasources/tpcds/createViewsJson.sh @@ -5,7 +5,7 @@ hadoop fs -test -d /drill/testdata/tpcds_sf1/json/views ;if [ `echo $?` -eq 1 ]; if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_json_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsJson.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_json_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsJson.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_json_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsJson.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_json_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsJson.sql fi diff --git a/framework/resources/Datasources/tpcds/createViewsJson.sql b/framework/resources/Datasources/tpcds/createViewsJson.sql index ae183f80f..d2df1ce59 100755 --- a/framework/resources/Datasources/tpcds/createViewsJson.sql +++ b/framework/resources/Datasources/tpcds/createViewsJson.sql @@ -1,4 +1,4 @@ -use dfs.tpcds_sf1_json_views; +use dfs_test.tpcds_sf1_json_views; create or replace view customer as select cast(c_customer_sk as integer) as c_customer_sk, @@ -19,7 +19,7 @@ cast(c_birth_country as varchar(200)) as c_birth_country, cast(c_login as varchar(200)) as c_login, cast(c_email_address as varchar(200)) as c_email_address, cast(c_last_review_date as varchar(200)) as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/json/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/customer`; create or replace view customer_address as select cast(ca_address_sk as integer) as ca_address_sk, @@ -35,7 +35,7 @@ cast(ca_zip as varchar(200)) as ca_zip, cast(ca_country as varchar(200)) as ca_country, cast(ca_gmt_offset as integer) as ca_gmt_offset, cast(ca_location_type as varchar(200)) as ca_location_type -from dfs.`/drill/testdata/tpcds_sf1/json/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/customer_address`; create or replace view customer_demographics as select cast( cd_demo_sk as integer) as cd_demo_sk, @@ -47,7 +47,7 @@ cast( cd_credit_rating as varchar(200)) as cd_credit_rating, cast( cd_dep_count as integer) as cd_dep_count, cast( cd_dep_employed_count as integer) as cd_dep_employed_count, cast( cd_dep_college_count as integer) as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf1/json/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/customer_demographics`; create or replace view household_demographics as select cast( hd_demo_sk as integer) as hd_demo_sk, @@ -55,7 +55,7 @@ cast( hd_income_band_sk as integer) as hd_income_band_sk, cast( hd_buy_potential as varchar(200)) as hd_buy_potential, cast( hd_dep_count as integer) as hd_dep_count, cast( hd_vehicle_count as integer) as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf1/json/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/household_demographics`; create or replace view item as select cast( i_item_sk as integer) as i_item_sk, @@ -80,7 +80,7 @@ cast( i_units as varchar(200)) as i_units, cast( i_container as varchar(200)) as i_container, cast( i_manager_id as integer) as i_manager_id, cast( i_product_name as varchar(200)) as i_product_name -from dfs.`/drill/testdata/tpcds_sf1/json/item`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/item`; create or replace view promotion as select cast( p_promo_sk as integer) as p_promo_sk, @@ -102,7 +102,7 @@ cast( p_channel_demo as varchar(200)) as p_channel_demo, cast( p_channel_details as varchar(200)) as p_channel_details, cast( p_purpose as varchar(200)) as p_purpose, cast( p_discount_active as varchar(200)) as p_discount_active -from dfs.`/drill/testdata/tpcds_sf1/json/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/promotion`; create or replace view time_dim as select cast( t_time_sk as integer) as t_time_sk, @@ -115,7 +115,7 @@ cast( t_am_pm as varchar(200)) as t_am_pm, cast( t_shift as varchar(200)) as t_shift, cast( t_sub_shift as varchar(200)) as t_sub_shift , cast( t_meal_time as varchar(200)) as t_meal_time -from dfs.`/drill/testdata/tpcds_sf1/json/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/time_dim`; create or replace view date_dim as select cast( d_date_sk as integer) as d_date_sk, @@ -146,7 +146,7 @@ cast( d_current_week as varchar(200)) as d_current_week, cast( d_current_month as varchar(200)) as d_current_month, cast( d_current_quarter as varchar(200)) as d_current_quarter, cast( d_current_year as varchar(200)) as d_current_year -from dfs.`/drill/testdata/tpcds_sf1/json/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/date_dim`; create or replace view store as select cast( s_store_sk as integer) as s_store_sk, @@ -178,7 +178,7 @@ cast( s_zip as varchar(200)) as s_zip, cast( s_country as varchar(200)) as s_country, cast( s_gmt_offset as double) as s_gmt_offset, cast( s_tax_precentage as double) as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/json/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/store`; create or replace view store_sales as select cast( ss_sold_date_sk as integer) as ss_sold_date_sk, @@ -204,7 +204,7 @@ cast( ss_coupon_amt as double) as ss_coupon_amt, cast( ss_net_paid as double) as ss_net_paid, cast( ss_net_paid_inc_tax as double) as ss_net_paid_inc_tax, cast( ss_net_profit as double) as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/json/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/store_sales`; create or replace view warehouse as select cast( w_warehouse_sk as integer) as w_warehouse_sk, @@ -221,7 +221,7 @@ cast( w_state as varchar(200)) as w_state, cast( w_zip as varchar(200)) as w_zip, cast( w_country as varchar(200)) as w_country, cast( w_gmt_offset as double) as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf1/json/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/warehouse`; create or replace view ship_mode as select cast( sm_ship_mode_sk as integer) as sm_ship_mode_sk, @@ -230,19 +230,19 @@ cast( sm_type as varchar(200)) as sm_type, cast( sm_code as varchar(200)) as sm_code, cast( sm_carrier as varchar(200)) as sm_carrier, cast( sm_contract as varchar(200)) as sm_contract -from dfs.`/drill/testdata/tpcds_sf1/json/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/ship_mode`; create or replace view reason as select cast( r_reason_sk as integer) as r_reason_sk, cast( r_reason_id as varchar(200)) as r_reason_id, cast( r_reason_desc as varchar(200)) as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf1/json/reason`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/reason`; create or replace view income_band as select cast( ib_income_band_sk as integer) as ib_income_band_sk, cast( ib_lower_bound as integer) as ib_lower_bound, cast( ib_upper_bound as integer) as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf1/json/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/income_band`; create or replace view call_center as select cast( cc_call_center_sk as integer) as cc_call_center_sk, @@ -276,7 +276,7 @@ cast( cc_zip as varchar(200)) as cc_zip, cast( cc_country as varchar(200)) as cc_country, cast( cc_gmt_offset as double) as cc_gmt_offset, cast( cc_tax_percentage as double) as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/json/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/call_center`; create or replace view web_site as select cast( web_site_sk as integer) as web_site_sk, @@ -305,7 +305,7 @@ cast( web_zip as varchar(200)) as web_zip, cast( web_country as varchar(200)) as web_country, cast( web_gmt_offset as double) as web_gmt_offset, cast( web_tax_percentage as double) as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/json/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/web_site`; create or replace view store_returns as select cast( sr_returned_date_sk as integer) as sr_returned_date_sk, @@ -328,7 +328,7 @@ cast( sr_refunded_cash as double) as sr_refunded_cash, cast( sr_reversed_charge as double) as sr_reversed_charge, cast( sr_store_credit as double) as sr_store_credit, cast( sr_net_loss as double) as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/json/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/store_returns`; create or replace view web_page as select cast( wp_web_page_sk as integer) as wp_web_page_sk, @@ -345,7 +345,7 @@ cast( wp_char_count as integer) as wp_char_count, cast( wp_link_count as integer) as wp_link_count, cast( wp_image_count as integer) as wp_image_count, cast( wp_max_ad_count as integer) as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf1/json/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/web_page`; create or replace view catalog_page as select cast( cp_catalog_page_sk as integer) as cp_catalog_page_sk, @@ -357,14 +357,14 @@ cast( cp_catalog_number as integer) as cp_catalog_number, cast( cp_catalog_page_number as integer) as cp_catalog_page_number, cast( cp_description as varchar(200)) as cp_description, cast( cp_type as varchar(200)) as cp_type -from dfs.`/drill/testdata/tpcds_sf1/json/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/catalog_page`; create or replace view inventory as select cast( inv_date_sk as integer) as inv_date_sk, cast( inv_item_sk as integer) as inv_item_sk, cast( inv_warehouse_sk as integer) as inv_warehouse_sk, cast( inv_quantity_on_hand as integer) as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf1/json/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/inventory`; create or replace view catalog_returns as select cast( cr_returned_date_sk as integer) as cr_returned_date_sk, @@ -394,7 +394,7 @@ cast( cr_refunded_cash as double) as cr_refunded_cash, cast( cr_reversed_charge as double) as cr_reversed_charge, cast( cr_store_credit as double) as cr_store_credit, cast( cr_net_loss as double) as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/json/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/catalog_returns`; create or replace view web_returns as select cast( wr_returned_date_sk as integer) as wr_returned_date_sk, @@ -421,7 +421,7 @@ cast( wr_refunded_cash as double) as wr_refunded_cash, cast( wr_reversed_charge as double) as wr_reversed_charge, cast( wr_account_credit as double) as wr_account_credit, cast( wr_net_loss as double) as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/json/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/web_returns`; create or replace view web_sales as select cast( ws_sold_date_sk as integer) as ws_sold_date_sk, @@ -458,7 +458,7 @@ cast( ws_net_paid_inc_tax as double) as ws_net_paid_inc_tax, cast( ws_net_paid_inc_ship as double) as ws_net_paid_inc_ship, cast( ws_net_paid_inc_ship_tax as double) as ws_net_paid_inc_ship_tax, cast( ws_net_profit as double) as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf1/json/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/web_sales`; create or replace view catalog_sales as select cast( cs_sold_date_sk as integer) as cs_sold_date_sk, @@ -495,5 +495,5 @@ cast( cs_net_paid_inc_tax as double) as cs_net_paid_inc_tax, cast( cs_net_paid_inc_ship as double) as cs_net_paid_inc_ship, cast( cs_net_paid_inc_ship_tax as double) as cs_net_paid_inc_ship_tax, cast( cs_net_profit as double) as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf1/json/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/json/catalog_sales`; diff --git a/framework/resources/Datasources/tpcds/createViewsParquet.sh b/framework/resources/Datasources/tpcds/createViewsParquet.sh index 63b49d0aa..7c25dc3b5 100755 --- a/framework/resources/Datasources/tpcds/createViewsParquet.sh +++ b/framework/resources/Datasources/tpcds/createViewsParquet.sh @@ -5,7 +5,7 @@ hadoop fs -test -d /drill/testdata/tpcds_sf1/parquet/views ;if [ `echo $?` -eq 1 if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet.sql fi diff --git a/framework/resources/Datasources/tpcds/createViewsParquet.sql b/framework/resources/Datasources/tpcds/createViewsParquet.sql index c1b00471b..fb94b90ef 100755 --- a/framework/resources/Datasources/tpcds/createViewsParquet.sql +++ b/framework/resources/Datasources/tpcds/createViewsParquet.sql @@ -1,4 +1,4 @@ -use dfs.tpcds_sf1_parquet_views; +use dfs_test.tpcds_sf1_parquet_views; create or replace view customer as select cast(c_customer_sk as integer) as c_customer_sk, @@ -19,7 +19,7 @@ cast(c_birth_country as varchar(200)) as c_birth_country, cast(c_login as varchar(200)) as c_login, cast(c_email_address as varchar(200)) as c_email_address, cast(c_last_review_date as varchar(200)) as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/parquet/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/customer`; create or replace view customer_address as select cast(ca_address_sk as integer) as ca_address_sk, @@ -35,7 +35,7 @@ cast(ca_zip as varchar(200)) as ca_zip, cast(ca_country as varchar(200)) as ca_country, cast(ca_gmt_offset as integer) as ca_gmt_offset, cast(ca_location_type as varchar(200)) as ca_location_type -from dfs.`/drill/testdata/tpcds_sf1/parquet/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/customer_address`; create or replace view customer_demographics as select cast( cd_demo_sk as integer) as cd_demo_sk, @@ -47,7 +47,7 @@ cast( cd_credit_rating as varchar(200)) as cd_credit_rating, cast( cd_dep_count as integer) as cd_dep_count, cast( cd_dep_employed_count as integer) as cd_dep_employed_count, cast( cd_dep_college_count as integer) as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf1/parquet/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/customer_demographics`; create or replace view household_demographics as select cast( hd_demo_sk as integer) as hd_demo_sk, @@ -55,7 +55,7 @@ cast( hd_income_band_sk as integer) as hd_income_band_sk, cast( hd_buy_potential as varchar(200)) as hd_buy_potential, cast( hd_dep_count as integer) as hd_dep_count, cast( hd_vehicle_count as integer) as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf1/parquet/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/household_demographics`; create or replace view item as select cast( i_item_sk as integer) as i_item_sk, @@ -80,7 +80,7 @@ cast( i_units as varchar(200)) as i_units, cast( i_container as varchar(200)) as i_container, cast( i_manager_id as integer) as i_manager_id, cast( i_product_name as varchar(200)) as i_product_name -from dfs.`/drill/testdata/tpcds_sf1/parquet/item`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/item`; create or replace view promotion as select cast( p_promo_sk as integer) as p_promo_sk, @@ -102,7 +102,7 @@ cast( p_channel_demo as varchar(200)) as p_channel_demo, cast( p_channel_details as varchar(200)) as p_channel_details, cast( p_purpose as varchar(200)) as p_purpose, cast( p_discount_active as varchar(200)) as p_discount_active -from dfs.`/drill/testdata/tpcds_sf1/parquet/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/promotion`; create or replace view time_dim as select cast( t_time_sk as integer) as t_time_sk, @@ -115,7 +115,7 @@ cast( t_am_pm as varchar(200)) as t_am_pm, cast( t_shift as varchar(200)) as t_shift, cast( t_sub_shift as varchar(200)) as t_sub_shift , cast( t_meal_time as varchar(200)) as t_meal_time -from dfs.`/drill/testdata/tpcds_sf1/parquet/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/time_dim`; create or replace view date_dim as select cast( d_date_sk as integer) as d_date_sk, @@ -146,7 +146,7 @@ cast( d_current_week as varchar(200)) as d_current_week, cast( d_current_month as varchar(200)) as d_current_month, cast( d_current_quarter as varchar(200)) as d_current_quarter, cast( d_current_year as varchar(200)) as d_current_year -from dfs.`/drill/testdata/tpcds_sf1/parquet/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/date_dim`; create or replace view store as select cast( s_store_sk as integer) as s_store_sk, @@ -178,7 +178,7 @@ cast( s_zip as varchar(200)) as s_zip, cast( s_country as varchar(200)) as s_country, cast( s_gmt_offset as double) as s_gmt_offset, cast( s_tax_precentage as double) as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/parquet/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/store`; create or replace view store_sales as select cast( ss_sold_date_sk as integer) as ss_sold_date_sk, @@ -204,7 +204,7 @@ cast( ss_coupon_amt as double) as ss_coupon_amt, cast( ss_net_paid as double) as ss_net_paid, cast( ss_net_paid_inc_tax as double) as ss_net_paid_inc_tax, cast( ss_net_profit as double) as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/parquet/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/store_sales`; create or replace view warehouse as select cast( w_warehouse_sk as integer) as w_warehouse_sk, @@ -221,7 +221,7 @@ cast( w_state as varchar(200)) as w_state, cast( w_zip as varchar(200)) as w_zip, cast( w_country as varchar(200)) as w_country, cast( w_gmt_offset as double) as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf1/parquet/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/warehouse`; create or replace view ship_mode as select cast( sm_ship_mode_sk as integer) as sm_ship_mode_sk, @@ -230,19 +230,19 @@ cast( sm_type as varchar(200)) as sm_type, cast( sm_code as varchar(200)) as sm_code, cast( sm_carrier as varchar(200)) as sm_carrier, cast( sm_contract as varchar(200)) as sm_contract -from dfs.`/drill/testdata/tpcds_sf1/parquet/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/ship_mode`; create or replace view reason as select cast( r_reason_sk as integer) as r_reason_sk, cast( r_reason_id as varchar(200)) as r_reason_id, cast( r_reason_desc as varchar(200)) as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf1/parquet/reason`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/reason`; create or replace view income_band as select cast( ib_income_band_sk as integer) as ib_income_band_sk, cast( ib_lower_bound as integer) as ib_lower_bound, cast( ib_upper_bound as integer) as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf1/parquet/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/income_band`; create or replace view call_center as select cast( cc_call_center_sk as integer) as cc_call_center_sk, @@ -276,7 +276,7 @@ cast( cc_zip as varchar(200)) as cc_zip, cast( cc_country as varchar(200)) as cc_country, cast( cc_gmt_offset as double) as cc_gmt_offset, cast( cc_tax_percentage as double) as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/parquet/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/call_center`; create or replace view web_site as select cast( web_site_sk as integer) as web_site_sk, @@ -305,7 +305,7 @@ cast( web_zip as varchar(200)) as web_zip, cast( web_country as varchar(200)) as web_country, cast( web_gmt_offset as double) as web_gmt_offset, cast( web_tax_percentage as double) as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/parquet/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/web_site`; create or replace view store_returns as select cast( sr_returned_date_sk as integer) as sr_returned_date_sk, @@ -328,7 +328,7 @@ cast( sr_refunded_cash as double) as sr_refunded_cash, cast( sr_reversed_charge as double) as sr_reversed_charge, cast( sr_store_credit as double) as sr_store_credit, cast( sr_net_loss as double) as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/parquet/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/store_returns`; create or replace view web_page as select cast( wp_web_page_sk as integer) as wp_web_page_sk, @@ -345,7 +345,7 @@ cast( wp_char_count as integer) as wp_char_count, cast( wp_link_count as integer) as wp_link_count, cast( wp_image_count as integer) as wp_image_count, cast( wp_max_ad_count as integer) as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf1/parquet/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/web_page`; create or replace view catalog_page as select cast( cp_catalog_page_sk as integer) as cp_catalog_page_sk, @@ -357,14 +357,14 @@ cast( cp_catalog_number as integer) as cp_catalog_number, cast( cp_catalog_page_number as integer) as cp_catalog_page_number, cast( cp_description as varchar(200)) as cp_description, cast( cp_type as varchar(200)) as cp_type -from dfs.`/drill/testdata/tpcds_sf1/parquet/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/catalog_page`; create or replace view inventory as select cast( inv_date_sk as integer) as inv_date_sk, cast( inv_item_sk as integer) as inv_item_sk, cast( inv_warehouse_sk as integer) as inv_warehouse_sk, cast( inv_quantity_on_hand as integer) as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf1/parquet/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/inventory`; create or replace view catalog_returns as select cast( cr_returned_date_sk as integer) as cr_returned_date_sk, @@ -394,7 +394,7 @@ cast( cr_refunded_cash as double) as cr_refunded_cash, cast( cr_reversed_charge as double) as cr_reversed_charge, cast( cr_store_credit as double) as cr_store_credit, cast( cr_net_loss as double) as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/parquet/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/catalog_returns`; create or replace view web_returns as select cast( wr_returned_date_sk as integer) as wr_returned_date_sk, @@ -421,7 +421,7 @@ cast( wr_refunded_cash as double) as wr_refunded_cash, cast( wr_reversed_charge as double) as wr_reversed_charge, cast( wr_account_credit as double) as wr_account_credit, cast( wr_net_loss as double) as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/parquet/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/web_returns`; create or replace view web_sales as select cast( ws_sold_date_sk as integer) as ws_sold_date_sk, @@ -458,7 +458,7 @@ cast( ws_net_paid_inc_tax as double) as ws_net_paid_inc_tax, cast( ws_net_paid_inc_ship as double) as ws_net_paid_inc_ship, cast( ws_net_paid_inc_ship_tax as double) as ws_net_paid_inc_ship_tax, cast( ws_net_profit as double) as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf1/parquet/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/web_sales`; create or replace view catalog_sales as select cast( cs_sold_date_sk as integer) as cs_sold_date_sk, @@ -495,5 +495,5 @@ cast( cs_net_paid_inc_tax as double) as cs_net_paid_inc_tax, cast( cs_net_paid_inc_ship as double) as cs_net_paid_inc_ship, cast( cs_net_paid_inc_ship_tax as double) as cs_net_paid_inc_ship_tax, cast( cs_net_profit as double) as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf1/parquet/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/parquet/catalog_sales`; diff --git a/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sh b/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sh index 724b64b29..532c98d17 100755 --- a/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sh +++ b/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sh @@ -5,7 +5,7 @@ hadoop fs -test -d /drill/testdata/tpcds_sf100/parquet/views ;if [ `echo $?` -eq if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf100_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet_sf100.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf100_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet_sf100.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf100_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet_sf100.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf100_parquet_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsParquet_sf100.sql fi diff --git a/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sql b/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sql index 0182f0e6a..6a4239806 100755 --- a/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sql +++ b/framework/resources/Datasources/tpcds/createViewsParquet_sf100.sql @@ -1,4 +1,4 @@ -use dfs.tpcds_sf100_parquet_views; +use dfs_test.tpcds_sf100_parquet_views; create or replace view customer as select cast(c_customer_sk as integer) as c_customer_sk, @@ -19,7 +19,7 @@ cast(c_birth_country as varchar(200)) as c_birth_country, cast(c_login as varchar(200)) as c_login, cast(c_email_address as varchar(200)) as c_email_address, cast(c_last_review_date as varchar(200)) as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf100/parquet/customer`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/customer`; create or replace view customer_address as select cast(ca_address_sk as integer) as ca_address_sk, @@ -35,7 +35,7 @@ cast(ca_zip as varchar(200)) as ca_zip, cast(ca_country as varchar(200)) as ca_country, cast(ca_gmt_offset as integer) as ca_gmt_offset, cast(ca_location_type as varchar(200)) as ca_location_type -from dfs.`/drill/testdata/tpcds_sf100/parquet/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/customer_address`; create or replace view customer_demographics as select cast( cd_demo_sk as integer) as cd_demo_sk, @@ -47,7 +47,7 @@ cast( cd_credit_rating as varchar(200)) as cd_credit_rating, cast( cd_dep_count as integer) as cd_dep_count, cast( cd_dep_employed_count as integer) as cd_dep_employed_count, cast( cd_dep_college_count as integer) as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf100/parquet/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/customer_demographics`; create or replace view household_demographics as select cast( hd_demo_sk as integer) as hd_demo_sk, @@ -55,7 +55,7 @@ cast( hd_income_band_sk as integer) as hd_income_band_sk, cast( hd_buy_potential as varchar(200)) as hd_buy_potential, cast( hd_dep_count as integer) as hd_dep_count, cast( hd_vehicle_count as integer) as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf100/parquet/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/household_demographics`; create or replace view item as select cast( i_item_sk as integer) as i_item_sk, @@ -80,7 +80,7 @@ cast( i_units as varchar(200)) as i_units, cast( i_container as varchar(200)) as i_container, cast( i_manager_id as integer) as i_manager_id, cast( i_product_name as varchar(200)) as i_product_name -from dfs.`/drill/testdata/tpcds_sf100/parquet/item`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/item`; create or replace view promotion as select cast( p_promo_sk as integer) as p_promo_sk, @@ -102,7 +102,7 @@ cast( p_channel_demo as varchar(200)) as p_channel_demo, cast( p_channel_details as varchar(200)) as p_channel_details, cast( p_purpose as varchar(200)) as p_purpose, cast( p_discount_active as varchar(200)) as p_discount_active -from dfs.`/drill/testdata/tpcds_sf100/parquet/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/promotion`; create or replace view time_dim as select cast( t_time_sk as integer) as t_time_sk, @@ -115,7 +115,7 @@ cast( t_am_pm as varchar(200)) as t_am_pm, cast( t_shift as varchar(200)) as t_shift, cast( t_sub_shift as varchar(200)) as t_sub_shift , cast( t_meal_time as varchar(200)) as t_meal_time -from dfs.`/drill/testdata/tpcds_sf100/parquet/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/time_dim`; create or replace view date_dim as select cast( d_date_sk as integer) as d_date_sk, @@ -146,7 +146,7 @@ cast( d_current_week as varchar(200)) as d_current_week, cast( d_current_month as varchar(200)) as d_current_month, cast( d_current_quarter as varchar(200)) as d_current_quarter, cast( d_current_year as varchar(200)) as d_current_year -from dfs.`/drill/testdata/tpcds_sf100/parquet/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/date_dim`; create or replace view store as select cast( s_store_sk as integer) as s_store_sk, @@ -178,7 +178,7 @@ cast( s_zip as varchar(200)) as s_zip, cast( s_country as varchar(200)) as s_country, cast( s_gmt_offset as double) as s_gmt_offset, cast( s_tax_precentage as double) as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf100/parquet/store`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/store`; create or replace view store_sales as select cast( ss_sold_date_sk as integer) as ss_sold_date_sk, @@ -204,7 +204,7 @@ cast( ss_coupon_amt as double) as ss_coupon_amt, cast( ss_net_paid as double) as ss_net_paid, cast( ss_net_paid_inc_tax as double) as ss_net_paid_inc_tax, cast( ss_net_profit as double) as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf100/parquet/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/store_sales`; create or replace view warehouse as select cast( w_warehouse_sk as integer) as w_warehouse_sk, @@ -221,7 +221,7 @@ cast( w_state as varchar(200)) as w_state, cast( w_zip as varchar(200)) as w_zip, cast( w_country as varchar(200)) as w_country, cast( w_gmt_offset as double) as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf100/parquet/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/warehouse`; create or replace view ship_mode as select cast( sm_ship_mode_sk as integer) as sm_ship_mode_sk, @@ -230,19 +230,19 @@ cast( sm_type as varchar(200)) as sm_type, cast( sm_code as varchar(200)) as sm_code, cast( sm_carrier as varchar(200)) as sm_carrier, cast( sm_contract as varchar(200)) as sm_contract -from dfs.`/drill/testdata/tpcds_sf100/parquet/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/ship_mode`; create or replace view reason as select cast( r_reason_sk as integer) as r_reason_sk, cast( r_reason_id as varchar(200)) as r_reason_id, cast( r_reason_desc as varchar(200)) as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf100/parquet/reason`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/reason`; create or replace view income_band as select cast( ib_income_band_sk as integer) as ib_income_band_sk, cast( ib_lower_bound as integer) as ib_lower_bound, cast( ib_upper_bound as integer) as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf100/parquet/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/income_band`; create or replace view call_center as select cast( cc_call_center_sk as integer) as cc_call_center_sk, @@ -276,7 +276,7 @@ cast( cc_zip as varchar(200)) as cc_zip, cast( cc_country as varchar(200)) as cc_country, cast( cc_gmt_offset as double) as cc_gmt_offset, cast( cc_tax_percentage as double) as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf100/parquet/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/call_center`; create or replace view web_site as select cast( web_site_sk as integer) as web_site_sk, @@ -305,7 +305,7 @@ cast( web_zip as varchar(200)) as web_zip, cast( web_country as varchar(200)) as web_country, cast( web_gmt_offset as double) as web_gmt_offset, cast( web_tax_percentage as double) as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf100/parquet/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/web_site`; create or replace view store_returns as select cast( sr_returned_date_sk as integer) as sr_returned_date_sk, @@ -328,7 +328,7 @@ cast( sr_refunded_cash as double) as sr_refunded_cash, cast( sr_reversed_charge as double) as sr_reversed_charge, cast( sr_store_credit as double) as sr_store_credit, cast( sr_net_loss as double) as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf100/parquet/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/store_returns`; create or replace view web_page as select cast( wp_web_page_sk as integer) as wp_web_page_sk, @@ -345,7 +345,7 @@ cast( wp_char_count as integer) as wp_char_count, cast( wp_link_count as integer) as wp_link_count, cast( wp_image_count as integer) as wp_image_count, cast( wp_max_ad_count as integer) as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf100/parquet/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/web_page`; create or replace view catalog_page as select cast( cp_catalog_page_sk as integer) as cp_catalog_page_sk, @@ -357,14 +357,14 @@ cast( cp_catalog_number as integer) as cp_catalog_number, cast( cp_catalog_page_number as integer) as cp_catalog_page_number, cast( cp_description as varchar(200)) as cp_description, cast( cp_type as varchar(200)) as cp_type -from dfs.`/drill/testdata/tpcds_sf100/parquet/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/catalog_page`; create or replace view inventory as select cast( inv_date_sk as integer) as inv_date_sk, cast( inv_item_sk as integer) as inv_item_sk, cast( inv_warehouse_sk as integer) as inv_warehouse_sk, cast( inv_quantity_on_hand as integer) as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf100/parquet/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/inventory`; create or replace view catalog_returns as select cast( cr_returned_date_sk as integer) as cr_returned_date_sk, @@ -394,7 +394,7 @@ cast( cr_refunded_cash as double) as cr_refunded_cash, cast( cr_reversed_charge as double) as cr_reversed_charge, cast( cr_store_credit as double) as cr_store_credit, cast( cr_net_loss as double) as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf100/parquet/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/catalog_returns`; create or replace view web_returns as select cast( wr_returned_date_sk as integer) as wr_returned_date_sk, @@ -421,7 +421,7 @@ cast( wr_refunded_cash as double) as wr_refunded_cash, cast( wr_reversed_charge as double) as wr_reversed_charge, cast( wr_account_credit as double) as wr_account_credit, cast( wr_net_loss as double) as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf100/parquet/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/web_returns`; create or replace view web_sales as select cast( ws_sold_date_sk as integer) as ws_sold_date_sk, @@ -458,7 +458,7 @@ cast( ws_net_paid_inc_tax as double) as ws_net_paid_inc_tax, cast( ws_net_paid_inc_ship as double) as ws_net_paid_inc_ship, cast( ws_net_paid_inc_ship_tax as double) as ws_net_paid_inc_ship_tax, cast( ws_net_profit as double) as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf100/parquet/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/web_sales`; create or replace view catalog_sales as select cast( cs_sold_date_sk as integer) as cs_sold_date_sk, @@ -495,5 +495,5 @@ cast( cs_net_paid_inc_tax as double) as cs_net_paid_inc_tax, cast( cs_net_paid_inc_ship as double) as cs_net_paid_inc_ship, cast( cs_net_paid_inc_ship_tax as double) as cs_net_paid_inc_ship_tax, cast( cs_net_profit as double) as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf100/parquet/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf100/parquet/catalog_sales`; diff --git a/framework/resources/Datasources/tpcds/createViewsText.sh b/framework/resources/Datasources/tpcds/createViewsText.sh index ffe94508c..f40b45d06 100755 --- a/framework/resources/Datasources/tpcds/createViewsText.sh +++ b/framework/resources/Datasources/tpcds/createViewsText.sh @@ -4,7 +4,7 @@ hadoop fs -test -d /drill/testdata/tpcds_sf1/text/views ;if [ `echo $?` -eq 1 ]; if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.tpcds_sf1_text_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsText.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_text_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsText.sql else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.tpcds_sf1_text_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsText.sql + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.tpcds_sf1_text_views;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/tpcds/createViewsText.sql fi diff --git a/framework/resources/Datasources/tpcds/createViewsText.sql b/framework/resources/Datasources/tpcds/createViewsText.sql index 65b222694..fdfc04b0a 100755 --- a/framework/resources/Datasources/tpcds/createViewsText.sql +++ b/framework/resources/Datasources/tpcds/createViewsText.sql @@ -1,4 +1,4 @@ -use dfs.tpcds_sf1_text_views; +use dfs_test.tpcds_sf1_text_views; create or replace view customer as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as c_customer_sk, @@ -19,7 +19,7 @@ create or replace view customer as select case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] as varchar(200)) end as c_login, case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as c_email_address, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as c_last_review_date -from dfs.`/drill/testdata/tpcds_sf1/text/customer`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer`; create or replace view customer_address as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ca_address_sk, @@ -35,7 +35,7 @@ case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] a case when (columns[10]='') then cast(null as varchar(200)) else cast(columns[10] as varchar(200)) end as ca_country, case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as ca_gmt_offset, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as ca_location_type -from dfs.`/drill/testdata/tpcds_sf1/text/customer_address`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_address`; create or replace view customer_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cd_demo_sk, @@ -47,7 +47,7 @@ case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] a case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cd_dep_count, case when (columns[7]='') then cast(null as integer) else cast(columns[7] as integer) end as cd_dep_employed_count, case when (columns[8]='') then cast(null as integer) else cast(columns[8] as integer) end as cd_dep_college_count -from dfs.`/drill/testdata/tpcds_sf1/text/customer_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/customer_demographics`; create or replace view household_demographics as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as hd_demo_sk, @@ -55,7 +55,7 @@ case when (columns[1]='') then cast(null as integer) else cast(columns[1] as int case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as hd_buy_potential, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as hd_dep_count, case when (columns[4]='') then cast(null as integer) else cast(columns[4] as integer) end as hd_vehicle_count -from dfs.`/drill/testdata/tpcds_sf1/text/household_demographics`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/household_demographics`; create or replace view item as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as i_item_sk, @@ -80,7 +80,7 @@ case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] case when (columns[19]='') then cast(null as varchar(200)) else cast(columns[19] as varchar(200)) end as i_container, case when (columns[20]='') then cast(null as integer) else cast(columns[20] as integer) end as i_manager_id, case when (columns[21]='') then cast(null as varchar(200)) else cast(columns[21] as varchar(200)) end as i_product_name -from dfs.`/drill/testdata/tpcds_sf1/text/item`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/item`; create or replace view promotion as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as p_promo_sk, @@ -102,7 +102,7 @@ case when (columns[15]='') then cast(null as varchar(200)) else cast(columns[15] case when (columns[16]='') then cast(null as varchar(200)) else cast(columns[16] as varchar(200)) end as p_channel_details, case when (columns[17]='') then cast(null as varchar(200)) else cast(columns[17] as varchar(200)) end as p_purpose, case when (columns[18]='') then cast(null as varchar(200)) else cast(columns[18] as varchar(200)) end as p_discount_active -from dfs.`/drill/testdata/tpcds_sf1/text/promotion`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/promotion`; create or replace view time_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as t_time_sk, @@ -115,7 +115,7 @@ case when (columns[6]='') then cast(null as varchar(200)) else cast(columns[6] a case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as t_shift, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as t_sub_shift , case when (columns[9]='') then cast(null as varchar(200)) else cast(columns[9] as varchar(200)) end as t_meal_time -from dfs.`/drill/testdata/tpcds_sf1/text/time_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/time_dim`; create or replace view date_dim as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as d_date_sk, @@ -146,7 +146,7 @@ case when (columns[24]='') then cast(null as varchar(200)) else cast(columns[24] case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] as varchar(200)) end as d_current_month, case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as d_current_quarter, case when (columns[27]='') then cast(null as varchar(200)) else cast(columns[27] as varchar(200)) end as d_current_year -from dfs.`/drill/testdata/tpcds_sf1/text/date_dim`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/date_dim`; create or replace view store as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as s_store_sk, @@ -178,7 +178,7 @@ case when (columns[25]='') then cast(null as varchar(200)) else cast(columns[25] case when (columns[26]='') then cast(null as varchar(200)) else cast(columns[26] as varchar(200)) end as s_country, case when (columns[27]='') then cast(null as double) else cast(columns[27] as double) end as s_gmt_offset, case when (columns[28]='') then cast(null as double) else cast(columns[28] as double) end as s_tax_precentage -from dfs.`/drill/testdata/tpcds_sf1/text/store`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store`; create or replace view store_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ss_sold_date_sk, @@ -204,7 +204,7 @@ case when (columns[19]='') then cast(null as double) else cast(columns[19] as do case when (columns[20]='') then cast(null as double) else cast(columns[20] as double) end as ss_net_paid, case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as ss_net_paid_inc_tax, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as ss_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/store_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_sales`; create or replace view warehouse as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as w_warehouse_sk, @@ -221,7 +221,7 @@ create or replace view warehouse as select case when (columns[11]='') then cast(null as varchar(200)) else cast(columns[11] as varchar(200)) end as w_zip, case when (columns[12]='') then cast(null as varchar(200)) else cast(columns[12] as varchar(200)) end as w_country, case when (columns[13]='') then cast(null as double) else cast(columns[13] as double) end as w_gmt_offset -from dfs.`/drill/testdata/tpcds_sf1/text/warehouse`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/warehouse`; create or replace view ship_mode as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sm_ship_mode_sk, @@ -230,19 +230,19 @@ create or replace view ship_mode as select case when (columns[3]='') then cast(null as varchar(200)) else cast(columns[3] as varchar(200)) end as sm_code, case when (columns[4]='') then cast(null as varchar(200)) else cast(columns[4] as varchar(200)) end as sm_carrier, case when (columns[5]='') then cast(null as varchar(200)) else cast(columns[5] as varchar(200)) end as sm_contract -from dfs.`/drill/testdata/tpcds_sf1/text/ship_mode`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/ship_mode`; create or replace view reason as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as r_reason_sk, case when (columns[1]='') then cast(null as varchar(200)) else cast(columns[1] as varchar(200)) end as r_reason_id, case when (columns[2]='') then cast(null as varchar(200)) else cast(columns[2] as varchar(200)) end as r_reason_desc -from dfs.`/drill/testdata/tpcds_sf1/text/reason`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/reason`; create or replace view income_band as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ib_income_band_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as ib_lower_bound, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as ib_upper_bound -from dfs.`/drill/testdata/tpcds_sf1/text/income_band`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/income_band`; create or replace view call_center as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cc_call_center_sk, @@ -276,7 +276,7 @@ create or replace view call_center as select case when (columns[28]='') then cast(null as varchar(200)) else cast(columns[28] as varchar(200)) end as cc_country, case when (columns[29]='') then cast(null as double) else cast(columns[29] as double) end as cc_gmt_offset, case when (columns[30]='') then cast(null as double) else cast(columns[30] as double) end as cc_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/call_center`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/call_center`; create or replace view web_site as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as web_site_sk, @@ -305,7 +305,7 @@ create or replace view web_site as select case when (columns[23]='') then cast(null as varchar(200)) else cast(columns[23] as varchar(200)) end as web_country, case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as web_gmt_offset, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as web_tax_percentage -from dfs.`/drill/testdata/tpcds_sf1/text/web_site`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_site`; create or replace view store_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as sr_returned_date_sk, @@ -328,7 +328,7 @@ create or replace view store_returns as select case when (columns[17]='') then cast(null as double) else cast(columns[17] as double) end as sr_reversed_charge, case when (columns[18]='') then cast(null as double) else cast(columns[18] as double) end as sr_store_credit, case when (columns[19]='') then cast(null as double) else cast(columns[19] as double) end as sr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/store_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/store_returns`; create or replace view web_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wp_web_page_sk, @@ -345,7 +345,7 @@ create or replace view web_page as select case when (columns[11]='') then cast(null as integer) else cast(columns[11] as integer) end as wp_link_count, case when (columns[12]='') then cast(null as integer) else cast(columns[12] as integer) end as wp_image_count, case when (columns[13]='') then cast(null as integer) else cast(columns[13] as integer) end as wp_max_ad_count -from dfs.`/drill/testdata/tpcds_sf1/text/web_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_page`; create or replace view catalog_page as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cp_catalog_page_sk, @@ -357,14 +357,14 @@ create or replace view catalog_page as select case when (columns[6]='') then cast(null as integer) else cast(columns[6] as integer) end as cp_catalog_page_number, case when (columns[7]='') then cast(null as varchar(200)) else cast(columns[7] as varchar(200)) end as cp_description, case when (columns[8]='') then cast(null as varchar(200)) else cast(columns[8] as varchar(200)) end as cp_type -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_page`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_page`; create or replace view inventory as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as inv_date_sk, case when (columns[1]='') then cast(null as integer) else cast(columns[1] as integer) end as inv_item_sk, case when (columns[2]='') then cast(null as integer) else cast(columns[2] as integer) end as inv_warehouse_sk, case when (columns[3]='') then cast(null as integer) else cast(columns[3] as integer) end as inv_quantity_on_hand -from dfs.`/drill/testdata/tpcds_sf1/text/inventory`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/inventory`; create or replace view catalog_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cr_returned_date_sk, @@ -394,7 +394,7 @@ create or replace view catalog_returns as select case when (columns[24]='') then cast(null as double) else cast(columns[24] as double) end as cr_reversed_charge, case when (columns[25]='') then cast(null as double) else cast(columns[25] as double) end as cr_store_credit, case when (columns[26]='') then cast(null as double) else cast(columns[26] as double) end as cr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_returns`; create or replace view web_returns as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as wr_returned_date_sk, @@ -421,7 +421,7 @@ create or replace view web_returns as select case when (columns[21]='') then cast(null as double) else cast(columns[21] as double) end as wr_reversed_charge, case when (columns[22]='') then cast(null as double) else cast(columns[22] as double) end as wr_account_credit, case when (columns[23]='') then cast(null as double) else cast(columns[23] as double) end as wr_net_loss -from dfs.`/drill/testdata/tpcds_sf1/text/web_returns`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_returns`; create or replace view web_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as ws_sold_date_sk, @@ -458,7 +458,7 @@ create or replace view web_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as ws_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as ws_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as ws_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/web_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/web_sales`; create or replace view catalog_sales as select case when (columns[0]='') then cast(null as integer) else cast(columns[0] as integer) end as cs_sold_date_sk, @@ -495,5 +495,5 @@ create or replace view catalog_sales as select case when (columns[31]='') then cast(null as double) else cast(columns[31] as double) end as cs_net_paid_inc_ship, case when (columns[32]='') then cast(null as double) else cast(columns[32] as double) end as cs_net_paid_inc_ship_tax, case when (columns[33]='') then cast(null as double) else cast(columns[33] as double) end as cs_net_profit -from dfs.`/drill/testdata/tpcds_sf1/text/catalog_sales`; +from dfs_test.`/drill/testdata/tpcds_sf1/text/catalog_sales`; diff --git a/framework/resources/Datasources/tpcds/refresh_metadata_tpcds.sh b/framework/resources/Datasources/tpcds/refresh_metadata_tpcds.sh index 496d1cec9..c6a605b9e 100755 --- a/framework/resources/Datasources/tpcds/refresh_metadata_tpcds.sh +++ b/framework/resources/Datasources/tpcds/refresh_metadata_tpcds.sh @@ -5,9 +5,9 @@ ${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/delete_cache.sh "/drill/test if [ -z "$PASSWORD" ] then - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl else - ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl + ${DRILL_HOME}/bin/sqlline -n ${USERNAME} -p ${PASSWORD} -u "jdbc:drill:schema=dfs_test.$1;drillbit=${DRILL_STORAGE_PLUGIN_SERVER}" --run=${DRILL_TEST_DATA_DIR}/Datasources/metadata_caching/refresh_metadata_tpch.ddl fi # it seems that sqlline does not exit, if one of the queries failed. diff --git a/framework/resources/Functional/aggregates/aggregate/aggregate.json b/framework/resources/Functional/aggregates/aggregate/aggregate.json index 27b63a674..61b10c447 100644 --- a/framework/resources/Functional/aggregates/aggregate/aggregate.json +++ b/framework/resources/Functional/aggregates/aggregate/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/bugs/bugs.json b/framework/resources/Functional/aggregates/aggregation/bugs/bugs.json index d830df0e1..84a639214 100644 --- a/framework/resources/Functional/aggregates/aggregation/bugs/bugs.json +++ b/framework/resources/Functional/aggregates/aggregation/bugs/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/count_distinct/count_distinct.json b/framework/resources/Functional/aggregates/aggregation/count_distinct/count_distinct.json index 4d26a3a3e..8ebd0975b 100644 --- a/framework/resources/Functional/aggregates/aggregation/count_distinct/count_distinct.json +++ b/framework/resources/Functional/aggregates/aggregation/count_distinct/count_distinct.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/group_by_case/case.json b/framework/resources/Functional/aggregates/aggregation/group_by_case/case.json index b7c9d7961..3ea319516 100644 --- a/framework/resources/Functional/aggregates/aggregation/group_by_case/case.json +++ b/framework/resources/Functional/aggregates/aggregation/group_by_case/case.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/group_by_expression/group_by_expression.json b/framework/resources/Functional/aggregates/aggregation/group_by_expression/group_by_expression.json index fbc2d53e4..1423bc7a0 100644 --- a/framework/resources/Functional/aggregates/aggregation/group_by_expression/group_by_expression.json +++ b/framework/resources/Functional/aggregates/aggregation/group_by_expression/group_by_expression.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/multicolumn/multicolumn.json b/framework/resources/Functional/aggregates/aggregation/multicolumn/multicolumn.json index 6243bd5a8..1db6d3ca8 100644 --- a/framework/resources/Functional/aggregates/aggregation/multicolumn/multicolumn.json +++ b/framework/resources/Functional/aggregates/aggregation/multicolumn/multicolumn.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/sanity/sanity.json b/framework/resources/Functional/aggregates/aggregation/sanity/sanity.json index 5ac633bd6..5fd8e6883 100644 --- a/framework/resources/Functional/aggregates/aggregation/sanity/sanity.json +++ b/framework/resources/Functional/aggregates/aggregation/sanity/sanity.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/aggregation/scalar/scalar.json b/framework/resources/Functional/aggregates/aggregation/scalar/scalar.json index ecfcee2b7..694d1ecbb 100644 --- a/framework/resources/Functional/aggregates/aggregation/scalar/scalar.json +++ b/framework/resources/Functional/aggregates/aggregation/scalar/scalar.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/tpcds_variants/parquet/aggregate.json b/framework/resources/Functional/aggregates/tpcds_variants/parquet/aggregate.json index 36c645cb3..f64316fcf 100644 --- a/framework/resources/Functional/aggregates/tpcds_variants/parquet/aggregate.json +++ b/framework/resources/Functional/aggregates/tpcds_variants/parquet/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_parquet", + "schema": "dfs_test.tpcds_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/aggregates/tpcds_variants/text/aggregate.json b/framework/resources/Functional/aggregates/tpcds_variants/text/aggregate.json index 25334dbb2..c94b2d075 100644 --- a/framework/resources/Functional/aggregates/tpcds_variants/text/aggregate.json +++ b/framework/resources/Functional/aggregates/tpcds_variants/text/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_text", + "schema": "dfs_test.tpcds_sf1_text", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/amplab/amplab.json b/framework/resources/Functional/amplab/amplab.json index b4da60936..d7f39e37c 100755 --- a/framework/resources/Functional/amplab/amplab.json +++ b/framework/resources/Functional/amplab/amplab.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirAmplab", + "schema": "dfs_test.drillTestDirAmplab", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/case_expr/casexpr.json b/framework/resources/Functional/case_expr/casexpr.json index b52628e50..19480b5b1 100644 --- a/framework/resources/Functional/case_expr/casexpr.json +++ b/framework/resources/Functional/case_expr/casexpr.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.Join", + "schema": "dfs_test.Join", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/complex.json b/framework/resources/Functional/complex/json/complex.json index 116bc914d..85d011422 100644 --- a/framework/resources/Functional/complex/json/complex.json +++ b/framework/resources/Functional/complex/json/complex.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirComplexJson", + "schema": "dfs_test.drillTestDirComplexJson", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-2879.json b/framework/resources/Functional/complex/json/drill-2879.json index 553571e11..dcf51eee1 100644 --- a/framework/resources/Functional/complex/json/drill-2879.json +++ b/framework/resources/Functional/complex/json/drill-2879.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-2879.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-2879.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-3537a.json b/framework/resources/Functional/complex/json/drill-3537a.json index 777b14d9f..7d562f392 100644 --- a/framework/resources/Functional/complex/json/drill-3537a.json +++ b/framework/resources/Functional/complex/json/drill-3537a.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-3537a.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-3537a.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-3537b.json b/framework/resources/Functional/complex/json/drill-3537b.json index bd921a8cd..83e46fd1f 100644 --- a/framework/resources/Functional/complex/json/drill-3537b.json +++ b/framework/resources/Functional/complex/json/drill-3537b.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-3537b.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-3537b.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-4180.json b/framework/resources/Functional/complex/json/drill-4180.json index 135fd7afb..d5acef655 100644 --- a/framework/resources/Functional/complex/json/drill-4180.json +++ b/framework/resources/Functional/complex/json/drill-4180.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-4180.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-4180.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-4479a.json b/framework/resources/Functional/complex/json/drill-4479a.json index 0504845ba..5874d668d 100644 --- a/framework/resources/Functional/complex/json/drill-4479a.json +++ b/framework/resources/Functional/complex/json/drill-4479a.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-4479a.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-4479a.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-4479b.json b/framework/resources/Functional/complex/json/drill-4479b.json index 90f7be59f..de6efe519 100644 --- a/framework/resources/Functional/complex/json/drill-4479b.json +++ b/framework/resources/Functional/complex/json/drill-4479b.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-4479b.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-4479b.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/json/drill-4664.json b/framework/resources/Functional/complex/json/drill-4664.json index d58a8917c..6df300ce4 100644 --- a/framework/resources/Functional/complex/json/drill-4664.json +++ b/framework/resources/Functional/complex/json/drill-4664.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": "drill-4664.sql", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": "drill-4664.res", "verification-type": [ diff --git a/framework/resources/Functional/complex/parquet/complex.json b/framework/resources/Functional/complex/parquet/complex.json index 7f55eac4f..7c7780f32 100644 --- a/framework/resources/Functional/complex/parquet/complex.json +++ b/framework/resources/Functional/complex/parquet/complex.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirComplexParquet", + "schema": "dfs_test.drillTestDirComplexParquet", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/convert/convert.json b/framework/resources/Functional/convert/convert.json index 78f104bb7..06d329a5b 100644 --- a/framework/resources/Functional/convert/convert.json +++ b/framework/resources/Functional/convert/convert.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirConvert", + "schema": "dfs_test.drillTestDirConvert", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/cross-sources/cross-sources.json b/framework/resources/Functional/cross-sources/cross-sources.json index 21a3ec807..259105494 100644 --- a/framework/resources/Functional/cross-sources/cross-sources.json +++ b/framework/resources/Functional/cross-sources/cross-sources.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-hive-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-hive-join.q index e5c558355..dcd4a42cc 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-hive-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-hive-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-text-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-text-join.q index b098fbc02..a7741843f 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-text-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-json-text-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, cast(o.float_col as float), o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, cast(p.float_col as float), p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -15,7 +15,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and cast(p.date_col as date) = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-join.q index af0edee97..bfb41696a 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-leftjoin.q index 3375634e9..a490ee1d5 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-hive-leftjoin.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-json-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-json-join.q index 10aac7373..50c50fa08 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-json-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-json-join.q @@ -2,8 +2,8 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p -inner join dfs.`cross-sources`.`fewtypes_null.json` o +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p +inner join dfs_test.`cross-sources`.`fewtypes_null.json` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-join.q index 3ed8d06b9..5640dd2c9 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -15,7 +15,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-view-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-view-join.q index 3f4d8851e..7f17be0d2 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-view-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_nullable-parquet-text-view-join.q @@ -1,5 +1,5 @@ alter session set `planner.enable_hashjoin` = false; -create or replace view dfs.`cross-sources`.fewtypes_null_view1 as +create or replace view dfs_test.`cross-sources`.fewtypes_null_view1 as select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, cast(case when columns[1] = 'null' then NULL else columns[1] end as bigint) bigint_col, @@ -11,13 +11,13 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`; +from dfs_test.`cross-sources`.`fewtypes_null.tbl`; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p -inner join dfs.`cross-sources`.fewtypes_null_view1 o +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p +inner join dfs_test.`cross-sources`.fewtypes_null_view1 o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) @@ -29,5 +29,5 @@ inner join dfs.`cross-sources`.fewtypes_null_view1 o and p.double_col = o.double_col and p.bool_col = o.bool_col; -drop view dfs.`cross-sources`.fewtypes_null_view1; +drop view dfs_test.`cross-sources`.fewtypes_null_view1; alter session set `planner.enable_hashjoin` = true; diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-hive-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-hive-join.q index f9b293829..32613ad25 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-hive-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-json-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-json-join.q index b6298b986..77972a39d 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-json-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-json-join.q @@ -2,8 +2,8 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p -inner join dfs.`cross-sources`.`fewtypes_null.json` o +from dfs_test.`cross-sources`.`fewtypes.parquet` p +inner join dfs_test.`cross-sources`.`fewtypes_null.json` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-text-join.q b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-text-join.q index 08ac532b6..853019983 100644 --- a/framework/resources/Functional/cross-sources/merge-join/mj_parquet-text-join.q +++ b/framework/resources/Functional/cross-sources/merge-join/mj_parquet-text-join.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -15,7 +15,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/nullable-json-hive-join.q b/framework/resources/Functional/cross-sources/nullable-json-hive-join.q index 5c123c448..1dd9879d3 100644 --- a/framework/resources/Functional/cross-sources/nullable-json-hive-join.q +++ b/framework/resources/Functional/cross-sources/nullable-json-hive-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/nullable-json-hive-unionall.q b/framework/resources/Functional/cross-sources/nullable-json-hive-unionall.q index 467cc0c2b..11193b52f 100644 --- a/framework/resources/Functional/cross-sources/nullable-json-hive-unionall.q +++ b/framework/resources/Functional/cross-sources/nullable-json-hive-unionall.q @@ -1,6 +1,6 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, cast(p.float_col as float), p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.crosssources.fewtypes_null_hive o; diff --git a/framework/resources/Functional/cross-sources/nullable-json-hivehbase-join.q b/framework/resources/Functional/cross-sources/nullable-json-hivehbase-join.q index f08bbdd76..22a3ceab3 100644 --- a/framework/resources/Functional/cross-sources/nullable-json-hivehbase-join.q +++ b/framework/resources/Functional/cross-sources/nullable-json-hivehbase-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.crosssources.fewtypes_null_hbase o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/nullable-json-text-join.q b/framework/resources/Functional/cross-sources/nullable-json-text-join.q index 99cc4b12a..b40b278a0 100644 --- a/framework/resources/Functional/cross-sources/nullable-json-text-join.q +++ b/framework/resources/Functional/cross-sources/nullable-json-text-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, cast(o.float_col as float), o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, cast(p.float_col as float), p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -14,7 +14,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and cast(p.date_col as date) = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/nullable-json-text-unionall.q b/framework/resources/Functional/cross-sources/nullable-json-text-unionall.q index e347195af..d6d39252d 100644 --- a/framework/resources/Functional/cross-sources/nullable-json-text-unionall.q +++ b/framework/resources/Functional/cross-sources/nullable-json-text-unionall.q @@ -1,7 +1,7 @@ select cast(p.int_col as int), cast(p.bigint_col as bigint), cast(p.date_col as date), cast(p.time_col as time), cast(p.timestamp_col as timestamp), p.interval_col, p.varchar_col, cast(p.float_col as float), cast(p.double_col as double), cast(p.bool_col as boolean) -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p union all select cast(o.int_col as int), cast(o.bigint_col as bigint), cast(o.date_col as date), cast(o.time_col as time), cast(o.timestamp_col as timestamp), o.interval_col, o.varchar_col, cast(o.float_col as float), cast(o.double_col as double), cast(o.bool_col as boolean) -from dfs.`cross-sources`.`fewtypes_null.json` o; +from dfs_test.`cross-sources`.`fewtypes_null.json` o; diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-hive-fulljoin.q b/framework/resources/Functional/cross-sources/nullable-parquet-hive-fulljoin.q index 8e7d7fc98..d086b1fa7 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-hive-fulljoin.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-hive-fulljoin.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p full outer join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-hive-join.q b/framework/resources/Functional/cross-sources/nullable-parquet-hive-join.q index 714369250..22fdc77ab 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-hive-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/cross-sources/nullable-parquet-hive-leftjoin.q index f7b7538b9..037d7422b 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-hive-leftjoin.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-hive-unionall.q b/framework/resources/Functional/cross-sources/nullable-parquet-hive-unionall.q index afba54f1d..149abc189 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-hive-unionall.q @@ -1,6 +1,6 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.crosssources.fewtypes_null_hive o; diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-json-join.q b/framework/resources/Functional/cross-sources/nullable-parquet-json-join.q index d9716d46d..cac140eae 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-json-join.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-json-join.q @@ -1,8 +1,8 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p -inner join dfs.`cross-sources`.`fewtypes_null.json` o +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p +inner join dfs_test.`cross-sources`.`fewtypes_null.json` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-json-unionall.q b/framework/resources/Functional/cross-sources/nullable-parquet-json-unionall.q index a74dcc69e..8f494e717 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-json-unionall.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-json-unionall.q @@ -1,7 +1,7 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, cast(o.date_col as date), cast(o.time_col as time), cast(o.timestamp_col as timestamp), o.interval_col, o.varchar_col, cast(o.float_col as float), o.double_col, o.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` o; +from dfs_test.`cross-sources`.`fewtypes_null.json` o; diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-text-join.q b/framework/resources/Functional/cross-sources/nullable-parquet-text-join.q index cf5fc9414..74249a630 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-text-join.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-text-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -14,7 +14,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-text-unionall.q b/framework/resources/Functional/cross-sources/nullable-parquet-text-unionall.q index b1702f16a..13329bf5c 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-text-unionall.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-text-unionall.q @@ -1,7 +1,7 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select cast(o.int_col as int), cast(o.bigint_col as bigint), cast(o.date_col as date), cast(o.time_col as time), cast(o.timestamp_col as timestamp), o.interval_col, o.varchar_col, cast(o.float_col as float), cast(o.double_col as double), cast(o.bool_col as boolean) -from dfs.`cross-sources`.`fewtypes_null.json` o; +from dfs_test.`cross-sources`.`fewtypes_null.json` o; diff --git a/framework/resources/Functional/cross-sources/nullable-parquet-text-view-join.q b/framework/resources/Functional/cross-sources/nullable-parquet-text-view-join.q index 4821616e0..b77f4ea7c 100644 --- a/framework/resources/Functional/cross-sources/nullable-parquet-text-view-join.q +++ b/framework/resources/Functional/cross-sources/nullable-parquet-text-view-join.q @@ -1,4 +1,4 @@ -create or replace view dfs.`cross-sources`.fewtypes_null_view2 as +create or replace view dfs_test.`cross-sources`.fewtypes_null_view2 as select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, cast(case when columns[1] = 'null' then NULL else columns[1] end as bigint) bigint_col, @@ -10,13 +10,13 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`; +from dfs_test.`cross-sources`.`fewtypes_null.tbl`; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p -inner join dfs.`cross-sources`.fewtypes_null_view2 o +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p +inner join dfs_test.`cross-sources`.fewtypes_null_view2 o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) @@ -28,4 +28,4 @@ inner join dfs.`cross-sources`.fewtypes_null_view2 o and p.double_col = o.double_col and p.bool_col = o.bool_col; -drop view dfs.`cross-sources`.fewtypes_null_view2; +drop view dfs_test.`cross-sources`.fewtypes_null_view2; diff --git a/framework/resources/Functional/cross-sources/parquet-hive-fulljoin_DRILL-2707.q b/framework/resources/Functional/cross-sources/parquet-hive-fulljoin_DRILL-2707.q index e3f06a548..56479d40c 100644 --- a/framework/resources/Functional/cross-sources/parquet-hive-fulljoin_DRILL-2707.q +++ b/framework/resources/Functional/cross-sources/parquet-hive-fulljoin_DRILL-2707.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p full outer join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/parquet-hive-join.q b/framework/resources/Functional/cross-sources/parquet-hive-join.q index 4508d5302..4be7c871e 100644 --- a/framework/resources/Functional/cross-sources/parquet-hive-join.q +++ b/framework/resources/Functional/cross-sources/parquet-hive-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.crosssources.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/cross-sources/parquet-hive-unionall.q b/framework/resources/Functional/cross-sources/parquet-hive-unionall.q index fd6257a95..9c40dfbc8 100644 --- a/framework/resources/Functional/cross-sources/parquet-hive-unionall.q +++ b/framework/resources/Functional/cross-sources/parquet-hive-unionall.q @@ -1,6 +1,6 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.crosssources.fewtypes_null_hive o; diff --git a/framework/resources/Functional/cross-sources/parquet-json-fulljoin_DRILL-2707.q b/framework/resources/Functional/cross-sources/parquet-json-fulljoin_DRILL-2707.q index b44f9cf88..b35b6efd7 100644 --- a/framework/resources/Functional/cross-sources/parquet-json-fulljoin_DRILL-2707.q +++ b/framework/resources/Functional/cross-sources/parquet-json-fulljoin_DRILL-2707.q @@ -1,8 +1,8 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p -full outer join dfs.`cross-sources`.`fewtypes_null.json` o +from dfs_test.`cross-sources`.`fewtypes.parquet` p +full outer join dfs_test.`cross-sources`.`fewtypes_null.json` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/parquet-json-join.q b/framework/resources/Functional/cross-sources/parquet-json-join.q index 5468599ee..874de2158 100644 --- a/framework/resources/Functional/cross-sources/parquet-json-join.q +++ b/framework/resources/Functional/cross-sources/parquet-json-join.q @@ -1,8 +1,8 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p -inner join dfs.`cross-sources`.`fewtypes_null.json` o +from dfs_test.`cross-sources`.`fewtypes.parquet` p +inner join dfs_test.`cross-sources`.`fewtypes_null.json` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/parquet-json-unionall.q b/framework/resources/Functional/cross-sources/parquet-json-unionall.q index 47c555b5e..41591fb32 100644 --- a/framework/resources/Functional/cross-sources/parquet-json-unionall.q +++ b/framework/resources/Functional/cross-sources/parquet-json-unionall.q @@ -1,7 +1,7 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, cast(o.date_col as date), cast(o.time_col as time), cast(o.timestamp_col as timestamp), o.interval_col, o.varchar_col, cast(o.float_col as float), o.double_col, o.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` o; +from dfs_test.`cross-sources`.`fewtypes_null.json` o; diff --git a/framework/resources/Functional/cross-sources/parquet-text-join.q b/framework/resources/Functional/cross-sources/parquet-text-join.q index 309d2b1d3..ce73a06c6 100644 --- a/framework/resources/Functional/cross-sources/parquet-text-join.q +++ b/framework/resources/Functional/cross-sources/parquet-text-join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join ( select cast(case when columns[0] = 'null' then NULL else columns[0] end as int) int_col, @@ -14,7 +14,7 @@ select cast(case when columns[7] = 'null' then NULL else columns[7] end as float) float_col, cast(case when columns[8] = 'null' then NULL else columns[8] end as double) double_col, cast(case when columns[9] = 'null' then NULL else columns[9] end as boolean) bool_col -from dfs.`cross-sources`.`fewtypes_null.tbl`) o +from dfs_test.`cross-sources`.`fewtypes_null.tbl`) o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.date_col = cast(o.date_col as date) diff --git a/framework/resources/Functional/cross-sources/parquet-text-unionall.q b/framework/resources/Functional/cross-sources/parquet-text-unionall.q index 239f07d8b..570d8e820 100644 --- a/framework/resources/Functional/cross-sources/parquet-text-unionall.q +++ b/framework/resources/Functional/cross-sources/parquet-text-unionall.q @@ -1,7 +1,7 @@ select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select cast(o.int_col as int), cast(o.bigint_col as bigint), cast(o.date_col as date), cast(o.time_col as time), cast(o.timestamp_col as timestamp), o.interval_col, o.varchar_col, cast(o.float_col as float), cast(o.double_col as double), cast(o.bool_col as boolean) -from dfs.`cross-sources`.`fewtypes_null.json` o; +from dfs_test.`cross-sources`.`fewtypes_null.json` o; diff --git a/framework/resources/Functional/cross-sources/q_DRILL-2606.q b/framework/resources/Functional/cross-sources/q_DRILL-2606.q index b43b56884..fee694e66 100644 --- a/framework/resources/Functional/cross-sources/q_DRILL-2606.q +++ b/framework/resources/Functional/cross-sources/q_DRILL-2606.q @@ -1,3 +1,3 @@ alter session set `planner.slice_target`=1; -select cast(cast(bool_col as varchar(100)) as boolean) from dfs.`/drill/testdata/cross-sources/fewtypes_null.parquet`; +select cast(cast(bool_col as varchar(100)) as boolean) from dfs_test.`/drill/testdata/cross-sources/fewtypes_null.parquet`; alter session set `planner.slice_target`=100000; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/copy.sh b/framework/resources/Functional/ctas/ctas_auto_partition/copy.sh index a18dc029c..0b22d4ec4 100755 --- a/framework/resources/Functional/ctas/ctas_auto_partition/copy.sh +++ b/framework/resources/Functional/ctas/ctas_auto_partition/copy.sh @@ -11,7 +11,7 @@ else filenameWithoutExtension="${filename%.*}" ctas="_100000rows_ctas" - tablename="dfs.ctas_flatten.\`$filenameWithoutExtension$ctas\`" + tablename="dfs_test.ctas_flatten.\`$filenameWithoutExtension$ctas\`" outfile="copied/$filename" echo "alter session set \`drill.exec.storage.file.partition.column.label\` = 'partition_string1';" > $outfile echo "$line" >> $outfile diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json index 8718d9d37..4d3c8eafb 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/csv/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json index 63e5532bb..254cbb5c5 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json index ca0799003..e8ab567ec 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/hierarchical/plan/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json index 13bc84c82..752d70c1f 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/json/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json index d69ba6f66..6f8867120 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json index 4975e4a0e..6ad1bf046 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/existing_partition_pruning/parquet/plan/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json index b0a5d5cff..f0641eea0 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_1.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_1.q index b5c9bf5cc..c45663356 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_1.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/drill-3947` where dt= '2001-01-06'; +select * from dfs_test.`/drill/testdata/drill-3947` where dt= '2001-01-06'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_2.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_2.q index acf0fa77f..91755e831 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_2.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_2.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/drill-3947` where dt= '2001-01-06'; +select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt= '2001-01-06'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_3.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_3.q index 76c05f7fa..d845c290a 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_3.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_3.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/drill-3947` where dt> '2015-12-25'; +select * from dfs_test.`/drill/testdata/drill-3947` where dt> '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_4.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_4.q index e6fbbf61e..964665eff 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_4.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_4.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/drill-3947` where dt> '2015-12-25'; +select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt> '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_5.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_5.q index f6106c4d5..f2a4e31dd 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_5.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_5.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; +select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_6.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_6.q index fd8256504..111620437 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_6.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_6.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; +select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_7.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_7.q index 034edbedb..365cedf85 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_7.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_7.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or dt > '2015-12-25'; +select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_8.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_8.q index 31c0f3b5d..4d017e709 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_8.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_8.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; +select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_9.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_9.q index 630da52da..26650dcbf 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_9.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/data/drill3947_9.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; +select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_1.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_1.q index fdb2a1d9f..6212aa659 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_1.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/nation/0_0_1.parquet` where n_regionkey = 0; +select * from dfs_test.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/nation/0_0_1.parquet` where n_regionkey = 0; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_2.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_2.q index 24536b090..cb5603e50 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_2.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_2.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/lineitem/0_0_1.parquet` where c1='1992-01-01 05:30:44.8'; +select count(*) from dfs_test.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/lineitem/0_0_1.parquet` where c1='1992-01-01 05:30:44.8'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_3.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_3.q index dd582cca8..e3360ba9f 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_3.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/drill3965_3.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/lineitem/0_0_1.parquet` where c1<>'1992-01-01 05:30:44.8'; +select count(*) from dfs_test.`/drill/testdata/ctas_auto_partition/tpch_single_partition1/lineitem/0_0_1.parquet` where c1<>'1992-01-01 05:30:44.8'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json index 57fcdd631..29e3f6a73 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_1.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_1.q index ff22feaa6..a0fa8fa1a 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_1.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_1.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/drill-3947` where dt= '2001-01-06'; +explain plan for select * from dfs_test.`/drill/testdata/drill-3947` where dt= '2001-01-06'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_2.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_2.q index 208cc859c..fbda1ffde 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_2.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_2.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/drill-3947` where dt= '2001-01-06'; +explain plan for select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt= '2001-01-06'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_3.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_3.q index 677e4a4e3..590337f41 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_3.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_3.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/drill-3947` where dt> '2015-12-25'; +explain plan for select * from dfs_test.`/drill/testdata/drill-3947` where dt> '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_4.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_4.q index 15117b19c..eee988f09 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_4.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_4.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/drill-3947` where dt> '2015-12-25'; +explain plan for select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt> '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_5.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_5.q index 115547b98..444f9d847 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_5.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_5.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; +explain plan for select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_6.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_6.q index 560db59b6..725db1796 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_6.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_6.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; +explain plan for select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' and dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_7.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_7.q index 56a40f883..d3a25f724 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_7.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_7.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or dt > '2015-12-25'; +explain plan for select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or dt > '2015-12-25'; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_8.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_8.q index d94055e3b..91ebf63de 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_8.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_8.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; +explain plan for select * from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_9.q b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_9.q index 2f01bebb7..0a4bce24a 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_9.q +++ b/framework/resources/Functional/ctas/ctas_auto_partition/general/plan/drill3947_9.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; +explain plan for select count(*) from dfs_test.`/drill/testdata/drill-3947` where dt = '2015-12-25' or int_var=42; diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json index 2eca6961b..e5ac87931 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json index 2e5d3219d..eb3ba5312 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_multiple_partitions/plan/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json index 825f7e3c7..562422b75 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpchSinglePartition", + "schema": "dfs_test.tpchSinglePartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json index 66f47c050..cc9f1af23 100644 --- a/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json +++ b/framework/resources/Functional/ctas/ctas_auto_partition/tpch0.01_single_partition1/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpchSinglePartition1", + "schema": "dfs_test.tpchSinglePartition1", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json b/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json index 33324ab86..c8b800d82 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/ctas_flatten.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators_100000rows", + "schema": "dfs_test.flatten_operators_100000rows", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter0.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter0.q index 1a60ff578..86cd5a136 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter0.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter0.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter0_100000rows_ctas`; -create table dfs.ctas_flatten.`filter0_100000rows_ctas` as select uid, flatten(events) from `data.json` where uid = 1; +drop table if exists dfs_test.ctas_flatten.`filter0_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter0_100000rows_ctas` as select uid, flatten(events) from `data.json` where uid = 1; --@test -select * from dfs.ctas_flatten.`filter0_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter0_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter0_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter0_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter13.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter13.q index eb2f54f81..f0b573282 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter13.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter13.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter13_100000rows_ctas`; -create table dfs.ctas_flatten.`filter13_100000rows_ctas` as select s.evnts.evnt_id from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; +drop table if exists dfs_test.ctas_flatten.`filter13_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter13_100000rows_ctas` as select s.evnts.evnt_id from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; --@test -select * from dfs.ctas_flatten.`filter13_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter13_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter13_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter13_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter14.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter14.q index a2b1c99e2..69553fba1 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter14.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter14.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter14_100000rows_ctas`; -create table dfs.ctas_flatten.`filter14_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; +drop table if exists dfs_test.ctas_flatten.`filter14_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter14_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; --@test -select * from dfs.ctas_flatten.`filter14_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter14_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter14_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter14_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter15.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter15.q index 1750b0070..bd8b34643 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter15.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter15.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter15_100000rows_ctas`; -create table dfs.ctas_flatten.`filter15_100000rows_ctas` as select s.trans from (select d.max_trans_amount mx, flatten(d.transactions) trans from `data.json` d) s where s.trans.amount = s.mx; +drop table if exists dfs_test.ctas_flatten.`filter15_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter15_100000rows_ctas` as select s.trans from (select d.max_trans_amount mx, flatten(d.transactions) trans from `data.json` d) s where s.trans.amount = s.mx; --@test -select * from dfs.ctas_flatten.`filter15_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter15_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter15_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter15_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter16.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter16.q index c69c773b5..b2a2b095b 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter16.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter16.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter16_100000rows_ctas`; -create table dfs.ctas_flatten.`filter16_100000rows_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; +drop table if exists dfs_test.ctas_flatten.`filter16_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter16_100000rows_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; --@test -select * from dfs.ctas_flatten.`filter16_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter16_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter16_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter16_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter19.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter19.q index db9354795..ccacf9211 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter19.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter19.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter19_100000rows_ctas`; -create table dfs.ctas_flatten.`filter19_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; +drop table if exists dfs_test.ctas_flatten.`filter19_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter19_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; --@test -select * from dfs.ctas_flatten.`filter19_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter19_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter19_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter19_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter2.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter2.q index cd9fc1537..79af6e4f0 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter2.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter2.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter2_100000rows_ctas`; -create table dfs.ctas_flatten.`filter2_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid = 1; +drop table if exists dfs_test.ctas_flatten.`filter2_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter2_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid = 1; --@test -select * from dfs.ctas_flatten.`filter2_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter2_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter2_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter2_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter20.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter20.q index f202afefe..e00cdf867 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter20.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter20.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter20_100000rows_ctas`; -create table dfs.ctas_flatten.`filter20_100000rows_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; +drop table if exists dfs_test.ctas_flatten.`filter20_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter20_100000rows_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; --@test -select * from dfs.ctas_flatten.`filter20_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter20_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter20_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter20_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter21.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter21.q index 2ae568cff..80e6acfc7 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter21.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter21.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter21_100000rows_ctas`; -create table dfs.ctas_flatten.`filter21_100000rows_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; +drop table if exists dfs_test.ctas_flatten.`filter21_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter21_100000rows_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; --@test -select * from dfs.ctas_flatten.`filter21_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter21_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter21_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter21_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter24.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter24.q index 9877d8aea..88498b9e1 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter24.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter24.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter24_100000rows_ctas`; -create table dfs.ctas_flatten.`filter24_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; +drop table if exists dfs_test.ctas_flatten.`filter24_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter24_100000rows_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; --@test -select * from dfs.ctas_flatten.`filter24_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter24_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter24_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter24_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter25.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter25.q index 89f3bb9a6..b3cfdbc31 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter25.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter25.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter25_100000rows_ctas`; -create table dfs.ctas_flatten.`filter25_100000rows_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; +drop table if exists dfs_test.ctas_flatten.`filter25_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter25_100000rows_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; --@test -select * from dfs.ctas_flatten.`filter25_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter25_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter25_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter25_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter3.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter3.q index 03a36a07f..35a019742 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter3.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter3.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter3_100000rows_ctas`; -create table dfs.ctas_flatten.`filter3_100000rows_ctas` as select uid, flatten(events) from `data.json` where uid > 1; +drop table if exists dfs_test.ctas_flatten.`filter3_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter3_100000rows_ctas` as select uid, flatten(events) from `data.json` where uid > 1; --@test -select * from dfs.ctas_flatten.`filter3_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter3_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter3_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter3_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter4.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter4.q index 2e6d7f09a..401a1c1e8 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter4.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter4.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter4_100000rows_ctas`; -create table dfs.ctas_flatten.`filter4_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = '01'; +drop table if exists dfs_test.ctas_flatten.`filter4_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter4_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = '01'; --@test -select * from dfs.ctas_flatten.`filter4_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter4_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter4_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter4_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter5.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter5.q index 77d91a689..5fe74f9b6 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter5.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/filter5.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`filter5_100000rows_ctas`; -create table dfs.ctas_flatten.`filter5_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = 1; +drop table if exists dfs_test.ctas_flatten.`filter5_100000rows_ctas`; +create table dfs_test.ctas_flatten.`filter5_100000rows_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = 1; --@test -select * from dfs.ctas_flatten.`filter5_100000rows_ctas`; -drop table dfs.ctas_flatten.`filter5_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`filter5_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`filter5_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/general0_DRILL-2161.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/general0_DRILL-2161.q index 4877b68b2..368b4cb1a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/general0_DRILL-2161.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/general0_DRILL-2161.q @@ -1,5 +1,5 @@ -drop table if exists dfs.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; -create table dfs.ctas_flatten.`general0_DRILL-2161_100000rows_ctas` as select uid, flatten(d.lst_lst) lst from `data.json` d; +drop table if exists dfs_test.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; +create table dfs_test.ctas_flatten.`general0_DRILL-2161_100000rows_ctas` as select uid, flatten(d.lst_lst) lst from `data.json` d; --@test -select * from dfs.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; -drop table dfs.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; +select * from dfs_test.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; +drop table dfs_test.ctas_flatten.`general0_DRILL-2161_100000rows_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/100000rows/hash-join/join11.q b/framework/resources/Functional/ctas/ctas_flatten/100000rows/hash-join/join11.q index 63c0a37ff..b0ba4af76 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/100000rows/hash-join/join11.q +++ b/framework/resources/Functional/ctas/ctas_flatten/100000rows/hash-join/join11.q @@ -1,7 +1,7 @@ -create table dfs.ctas_flatten.`join11_100000_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from +create table dfs_test.ctas_flatten.`join11_100000_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from (select uid, flatten(transactions) transaction from `data.json`) t1) t3 inner join (select uid, flatten(transactions) transaction from `data.json`) t2 on t3.max_trans_amount = t2.transaction.amount order by t2.uid limit 1; -select * from dfs.ctas_flatten.`join11_100000_ctas`; -drop table dfs.ctas_flatten.`join11_100000_ctas`; +select * from dfs_test.ctas_flatten.`join11_100000_ctas`; +drop table dfs_test.ctas_flatten.`join11_100000_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes1.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes1.q index 7a45abd1f..e7e6b5c5d 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes1.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes1.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.alltypes1_ctas as select flatten(rep_map) from `alltypes-repeated.parquet`; -select * from dfs.ctas_flatten.alltypes1_ctas; -drop table dfs.ctas_flatten.alltypes1_ctas; +create table dfs_test.ctas_flatten.alltypes1_ctas as select flatten(rep_map) from `alltypes-repeated.parquet`; +select * from dfs_test.ctas_flatten.alltypes1_ctas; +drop table dfs_test.ctas_flatten.alltypes1_ctas; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes2.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes2.q index c2d6edd43..cfd0a8cb0 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes2.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/alltypes2.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.alltypes2_ctas as select flatten(rep_map) from `alltypes-repeated.json`; -select * from dfs.ctas_flatten.alltypes2_ctas; -drop table dfs.ctas_flatten.alltypes2_ctas; +create table dfs_test.ctas_flatten.alltypes2_ctas as select flatten(rep_map) from `alltypes-repeated.json`; +select * from dfs_test.ctas_flatten.alltypes2_ctas; +drop table dfs_test.ctas_flatten.alltypes2_ctas; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json b/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json index e95f6214e..ba4ae54fc 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/ctas_flatten.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators", + "schema": "dfs_test.flatten_operators", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter0.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter0.q index 3c8feeaa1..1603f02b9 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter0.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter0.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.filter0_ctas as select uid, flatten(events) flatten_evnts from `data.json` where uid = 1; -select * from dfs.ctas_flatten.filter0_ctas; -drop table dfs.ctas_flatten.filter0_ctas; +create table dfs_test.ctas_flatten.filter0_ctas as select uid, flatten(events) flatten_evnts from `data.json` where uid = 1; +select * from dfs_test.ctas_flatten.filter0_ctas; +drop table dfs_test.ctas_flatten.filter0_ctas; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter1.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter1.q index 2a3164398..8587acbef 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter1.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter1.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter1_ctas` as select uid, events, flatten(events) from `data.json` where uid = 1; -select * from dfs.ctas_flatten.`filter1_ctas`; -drop table dfs.ctas_flatten.`filter1_ctas`; +create table dfs_test.ctas_flatten.`filter1_ctas` as select uid, events, flatten(events) from `data.json` where uid = 1; +select * from dfs_test.ctas_flatten.`filter1_ctas`; +drop table dfs_test.ctas_flatten.`filter1_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter11.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter11.q index b113b7870..35c2908da 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter11.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter11.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter11_ctas` as select d.uid, d.type, flatten(events), flatten(transactions) from `data.json` d where d.type='web'; -select * from dfs.ctas_flatten.`filter11_ctas`; -drop table dfs.ctas_flatten.`filter11_ctas`; +create table dfs_test.ctas_flatten.`filter11_ctas` as select d.uid, d.type, flatten(events), flatten(transactions) from `data.json` d where d.type='web'; +select * from dfs_test.ctas_flatten.`filter11_ctas`; +drop table dfs_test.ctas_flatten.`filter11_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter12.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter12.q index 45755dc0f..89f7d0479 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter12.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter12.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter12_ctas` as select flatten(d.events), flatten(d.map.rm[0].rptd), flatten(d.features) from `data.json` d where features[0].type='Feature' and d.events[0].type='cmpgn1'; -select * from dfs.ctas_flatten.`filter12_ctas`; -drop table dfs.ctas_flatten.`filter12_ctas`; +create table dfs_test.ctas_flatten.`filter12_ctas` as select flatten(d.events), flatten(d.map.rm[0].rptd), flatten(d.features) from `data.json` d where features[0].type='Feature' and d.events[0].type='cmpgn1'; +select * from dfs_test.ctas_flatten.`filter12_ctas`; +drop table dfs_test.ctas_flatten.`filter12_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter13.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter13.q index bb521f616..7d23fc31c 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter13.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter13.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter13_ctas` as select s.evnts.evnt_id from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; -select * from dfs.ctas_flatten.`filter13_ctas`; -drop table dfs.ctas_flatten.`filter13_ctas`; +create table dfs_test.ctas_flatten.`filter13_ctas` as select s.evnts.evnt_id from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; +select * from dfs_test.ctas_flatten.`filter13_ctas`; +drop table dfs_test.ctas_flatten.`filter13_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter14.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter14.q index 4ab62a69b..8ef755be6 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter14.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter14.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter14_ctas` as select * from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; -select * from dfs.ctas_flatten.`filter14_ctas`; -drop table dfs.ctas_flatten.`filter14_ctas`; +create table dfs_test.ctas_flatten.`filter14_ctas` as select * from (select d.type type, flatten(d.events) evnts from `data.json` d where d.type='web') s where s.evnts.type = 'cmpgn4'; +select * from dfs_test.ctas_flatten.`filter14_ctas`; +drop table dfs_test.ctas_flatten.`filter14_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter15.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter15.q index 5687cdb94..8729ed4bd 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter15.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter15.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter15_ctas` as select s.trans from (select d.max_trans_amount mx, flatten(d.transactions) trans from `data.json` d) s where s.trans.amount = s.mx; -select * from dfs.ctas_flatten.`filter15_ctas`; -drop table dfs.ctas_flatten.`filter15_ctas`; +create table dfs_test.ctas_flatten.`filter15_ctas` as select s.trans from (select d.max_trans_amount mx, flatten(d.transactions) trans from `data.json` d) s where s.trans.amount = s.mx; +select * from dfs_test.ctas_flatten.`filter15_ctas`; +drop table dfs_test.ctas_flatten.`filter15_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter16.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter16.q index 191629c14..d397b3445 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter16.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter16.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter16_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; -select * from dfs.ctas_flatten.`filter16_ctas`; -drop table dfs.ctas_flatten.`filter16_ctas`; +create table dfs_test.ctas_flatten.`filter16_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; +select * from dfs_test.ctas_flatten.`filter16_ctas`; +drop table dfs_test.ctas_flatten.`filter16_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter18.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter18.q index f706f76de..9e9435e7f 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter18.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter18.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter18_ctas` as select s.rptds from (select d.type type, flatten(d.map.rm[1].rptd) rptds, flatten(d.features) feats from `data.json` d) s where s.rptds.c < s.feats.properties.mag and s.type='web'; -select * from dfs.ctas_flatten.`filter18_ctas`; -drop table dfs.ctas_flatten.`filter18_ctas`; +create table dfs_test.ctas_flatten.`filter18_ctas` as select s.rptds from (select d.type type, flatten(d.map.rm[1].rptd) rptds, flatten(d.features) feats from `data.json` d) s where s.rptds.c < s.feats.properties.mag and s.type='web'; +select * from dfs_test.ctas_flatten.`filter18_ctas`; +drop table dfs_test.ctas_flatten.`filter18_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter19.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter19.q index 31cae6aa6..044abd84a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter19.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter19.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter19_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; -select * from dfs.ctas_flatten.`filter19_ctas`; -drop table dfs.ctas_flatten.`filter19_ctas`; +create table dfs_test.ctas_flatten.`filter19_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; +select * from dfs_test.ctas_flatten.`filter19_ctas`; +drop table dfs_test.ctas_flatten.`filter19_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter2.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter2.q index c9eb766a5..13f058b0b 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter2.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter2.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter2_ctas` as select uid_str, flatten(events) from `data.json` where uid = 1; -select * from dfs.ctas_flatten.`filter2_ctas`; -drop table dfs.ctas_flatten.`filter2_ctas`; +create table dfs_test.ctas_flatten.`filter2_ctas` as select uid_str, flatten(events) from `data.json` where uid = 1; +select * from dfs_test.ctas_flatten.`filter2_ctas`; +drop table dfs_test.ctas_flatten.`filter2_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter20.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter20.q index 76e536d8a..369797ee7 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter20.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter20.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter20_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; -select * from dfs.ctas_flatten.`filter20_ctas`; -drop table dfs.ctas_flatten.`filter20_ctas`; +create table dfs_test.ctas_flatten.`filter20_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; +select * from dfs_test.ctas_flatten.`filter20_ctas`; +drop table dfs_test.ctas_flatten.`filter20_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter21.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter21.q index 10449f16f..4eae3332a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter21.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter21.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter21_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; -select * from dfs.ctas_flatten.`filter21_ctas`; -drop table dfs.ctas_flatten.`filter21_ctas`; +create table dfs_test.ctas_flatten.`filter21_ctas` as select s.rms.mapid from (select d.type type, flatten(d.map.rm) rms from `data.json` d where d.map.rm[0].mapid='m1') s where s.rms.mapid='m2'; +select * from dfs_test.ctas_flatten.`filter21_ctas`; +drop table dfs_test.ctas_flatten.`filter21_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter23.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter23.q index 3ed95feda..832e36f04 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter23.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter23.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter23_ctas` as select s.rptds from (select d.type type, flatten(d.map.rm[1].rptd) rptds, flatten(d.features) feats from `data.json` d) s where s.rptds.c < s.feats.properties.mag and s.type='web'; -select * from dfs.ctas_flatten.`filter23_ctas`; -drop table dfs.ctas_flatten.`filter23_ctas`; +create table dfs_test.ctas_flatten.`filter23_ctas` as select s.rptds from (select d.type type, flatten(d.map.rm[1].rptd) rptds, flatten(d.features) feats from `data.json` d) s where s.rptds.c < s.feats.properties.mag and s.type='web'; +select * from dfs_test.ctas_flatten.`filter23_ctas`; +drop table dfs_test.ctas_flatten.`filter23_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter24.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter24.q index 1b40a9fbe..e021cb49d 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter24.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter24.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter24_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; -select * from dfs.ctas_flatten.`filter24_ctas`; -drop table dfs.ctas_flatten.`filter24_ctas`; +create table dfs_test.ctas_flatten.`filter24_ctas` as select * from (select d.type type, flatten(d.events) evnts, flatten(d.features) feats from `data.json` d) s where s.evnts.type=s.feats.type; +select * from dfs_test.ctas_flatten.`filter24_ctas`; +drop table dfs_test.ctas_flatten.`filter24_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter25.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter25.q index 6507596ef..1c3c9f31e 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter25.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter25.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter25_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; -select * from dfs.ctas_flatten.`filter25_ctas`; -drop table dfs.ctas_flatten.`filter25_ctas`; +create table dfs_test.ctas_flatten.`filter25_ctas` as select * from (select s1.type type, flatten(s1.rms.rptd) rptds from (select d.type type, flatten(d.map.rm) rms from `data.json` d) s1 where s1.rms.mapid='m2') s2 where s2.rptds.c > 0 and s2.type='web'; +select * from dfs_test.ctas_flatten.`filter25_ctas`; +drop table dfs_test.ctas_flatten.`filter25_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter3.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter3.q index b882d3275..df235047d 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter3.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter3.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter3_ctas` as select uid, flatten(events) from `data.json` where uid > 1; -select * from dfs.ctas_flatten.`filter3_ctas`; -drop table dfs.ctas_flatten.`filter3_ctas`; +create table dfs_test.ctas_flatten.`filter3_ctas` as select uid, flatten(events) from `data.json` where uid > 1; +select * from dfs_test.ctas_flatten.`filter3_ctas`; +drop table dfs_test.ctas_flatten.`filter3_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter4.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter4.q index 4266d2b48..0a934c764 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter4.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter4.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter4_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = '01'; -select * from dfs.ctas_flatten.`filter4_ctas`; -drop table dfs.ctas_flatten.`filter4_ctas`; +create table dfs_test.ctas_flatten.`filter4_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = '01'; +select * from dfs_test.ctas_flatten.`filter4_ctas`; +drop table dfs_test.ctas_flatten.`filter4_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter5.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter5.q index f5103d064..bbfea937b 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter5.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter5.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter5_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = 1; -select * from dfs.ctas_flatten.`filter5_ctas`; -drop table dfs.ctas_flatten.`filter5_ctas`; +create table dfs_test.ctas_flatten.`filter5_ctas` as select uid_str, flatten(events) from `data.json` where uid_str = 1; +select * from dfs_test.ctas_flatten.`filter5_ctas`; +drop table dfs_test.ctas_flatten.`filter5_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter6.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter6.q index 80ce99ea7..b67279328 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter6.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter6.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter6_ctas` as select d.uid, flatten(d.map.rm) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo'; -select * from dfs.ctas_flatten.`filter6_ctas`; -drop table dfs.ctas_flatten.`filter6_ctas`; +create table dfs_test.ctas_flatten.`filter6_ctas` as select d.uid, flatten(d.map.rm) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo'; +select * from dfs_test.ctas_flatten.`filter6_ctas`; +drop table dfs_test.ctas_flatten.`filter6_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter7.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter7.q index af90cbf16..9fd58ff18 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter7.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter7.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter7_ctas` as select d.uid, flatten(d.map.rm) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo' or d.sub[1].z2=10; -select * from dfs.ctas_flatten.`filter7_ctas`; -drop table dfs.ctas_flatten.`filter7_ctas`; +create table dfs_test.ctas_flatten.`filter7_ctas` as select d.uid, flatten(d.map.rm) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo' or d.sub[1].z2=10; +select * from dfs_test.ctas_flatten.`filter7_ctas`; +drop table dfs_test.ctas_flatten.`filter7_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter8.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter8.q index c4de3d549..0cf46e40f 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter8.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter8.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter8_ctas` as select d.uid, flatten(d.map.rm), flatten(d.events) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo'; -select * from dfs.ctas_flatten.`filter8_ctas`; -drop table dfs.ctas_flatten.`filter8_ctas`; +create table dfs_test.ctas_flatten.`filter8_ctas` as select d.uid, flatten(d.map.rm), flatten(d.events) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo'; +select * from dfs_test.ctas_flatten.`filter8_ctas`; +drop table dfs_test.ctas_flatten.`filter8_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter9.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter9.q index ad990a51f..8a09bcb0c 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/filter9.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/filter9.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`filter9_ctas` as select d.uid, flatten(d.map.rm), flatten(d.events) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo' and d.events[4].type = 'cmpgn3'; -select * from dfs.ctas_flatten.`filter9_ctas`; -drop table dfs.ctas_flatten.`filter9_ctas`; +create table dfs_test.ctas_flatten.`filter9_ctas` as select d.uid, flatten(d.map.rm), flatten(d.events) from `data.json` d where d.map.rm[0].rptd[0].a = 'foo' and d.events[4].type = 'cmpgn3'; +select * from dfs_test.ctas_flatten.`filter9_ctas`; +drop table dfs_test.ctas_flatten.`filter9_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/functions1.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/functions1.q index f6c1f2b2a..92c1edef9 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/functions1.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/functions1.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`functions1_ctas` as select MAX(sub.val) from (select flatten(d.lst_lst[0]) val from `data1.json` d) sub; -select * from dfs.ctas_flatten.`functions1_ctas`; -drop table dfs.ctas_flatten.`functions1_ctas`; +create table dfs_test.ctas_flatten.`functions1_ctas` as select MAX(sub.val) from (select flatten(d.lst_lst[0]) val from `data1.json` d) sub; +select * from dfs_test.ctas_flatten.`functions1_ctas`; +drop table dfs_test.ctas_flatten.`functions1_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/hj_filter.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/hj_filter.q index ff8b2beff..54d7c15a0 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/hj_filter.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/hj_filter.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`hashfilter25_ctas` as select flatten(events), flatten(events), e1, events from (select flatten(events) e1, events from `data.json`) sub where sub.e1.campaign_id = 'c2'; -select * from dfs.ctas_flatten.`hashfilter25_ctas`; -drop table dfs.ctas_flatten.`hashfilter25_ctas`; +create table dfs_test.ctas_flatten.`hashfilter25_ctas` as select flatten(events), flatten(events), e1, events from (select flatten(events) e1, events from `data.json`) sub where sub.e1.campaign_id = 'c2'; +select * from dfs_test.ctas_flatten.`hashfilter25_ctas`; +drop table dfs_test.ctas_flatten.`hashfilter25_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join1.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join1.q index 14556f05f..5c4534111 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join1.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join1.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`join1_ctas` as select t1.uid, t1.events from `data.json` t1, `data.json` t2 where t1.uid = t2.uid; -select * from dfs.ctas_flatten.`join1_ctas`; -drop table dfs.ctas_flatten.join1_ctas; +create table dfs_test.ctas_flatten.`join1_ctas` as select t1.uid, t1.events from `data.json` t1, `data.json` t2 where t1.uid = t2.uid; +select * from dfs_test.ctas_flatten.`join1_ctas`; +drop table dfs_test.ctas_flatten.join1_ctas; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join10.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join10.q index b1029e8e2..58e1cdb5a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join10.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join10.q @@ -1,7 +1,7 @@ -create table dfs.ctas_flatten.`join10_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from +create table dfs_test.ctas_flatten.`join10_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from (select uid, flatten(transactions) transaction from `data.json`) t1 group by t1.transaction.type) t3 inner join (select uid, flatten(transactions) transaction from `data.json`) t2 on t3.max_trans_amount = t2.transaction.amount; -select * from dfs.ctas_flatten.`join10_ctas`; -drop table dfs.ctas_flatten.`join10_ctas`; +select * from dfs_test.ctas_flatten.`join10_ctas`; +drop table dfs_test.ctas_flatten.`join10_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join11.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join11.q index e1496b1e2..32d5a525d 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join11.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join11.q @@ -1,7 +1,7 @@ -create table dfs.ctas_flatten.`join11_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from +create table dfs_test.ctas_flatten.`join11_ctas` as select t2.uid, t2.transaction from (select MAX(t1.transaction.amount) max_trans_amount from (select uid, flatten(transactions) transaction from `data.json`) t1) t3 inner join (select uid, flatten(transactions) transaction from `data.json`) t2 on t3.max_trans_amount = t2.transaction.amount order by t2.uid limit 1; -select * from dfs.ctas_flatten.`join11_ctas`; -drop table dfs.ctas_flatten.`join11_ctas`; +select * from dfs_test.ctas_flatten.`join11_ctas`; +drop table dfs_test.ctas_flatten.`join11_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join6.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join6.q index 097655768..e6370f9da 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join6.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/hash-join/join6.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`join6_ctas` as select flatten(sub1.lst_lst[0]) from (select t1.lst_lst lst_lst from `data.json` t1 inner join `data.json` t2 on t1.uid=t2.uid) sub1; -select * from dfs.ctas_flatten.`join6_ctas`; -drop table dfs.ctas_flatten.`join6_ctas`; +create table dfs_test.ctas_flatten.`join6_ctas` as select flatten(sub1.lst_lst[0]) from (select t1.lst_lst lst_lst from `data.json` t1 inner join `data.json` t2 on t1.uid=t2.uid) sub1; +select * from dfs_test.ctas_flatten.`join6_ctas`; +drop table dfs_test.ctas_flatten.`join6_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/nested1_DRILL-2254.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/nested1_DRILL-2254.q index 4c401fa88..3390d7b73 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/nested1_DRILL-2254.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/nested1_DRILL-2254.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`nested1_DRILL-2254_ctas` as select uid, flatten(flatten(lst_lst)) from `data1.json`; -select * from dfs.ctas_flatten.`nested1_DRILL-2254_ctas`; -drop table dfs.ctas_flatten.`nested1_DRILL-2254_ctas`; +create table dfs_test.ctas_flatten.`nested1_DRILL-2254_ctas` as select uid, flatten(flatten(lst_lst)) from `data1.json`; +select * from dfs_test.ctas_flatten.`nested1_DRILL-2254_ctas`; +drop table dfs_test.ctas_flatten.`nested1_DRILL-2254_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby0.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby0.q index 60e766a52..84051e3ac 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby0.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby0.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby0_ctas` as select d.uid, flatten(d.events) from `data.json` d order by d.uid; -select * from dfs.ctas_flatten.`orderby0_ctas`; -drop table dfs.ctas_flatten.`orderby0_ctas`; +create table dfs_test.ctas_flatten.`orderby0_ctas` as select d.uid, flatten(d.events) from `data.json` d order by d.uid; +select * from dfs_test.ctas_flatten.`orderby0_ctas`; +drop table dfs_test.ctas_flatten.`orderby0_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby1.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby1.q index f161b0304..72729b799 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby1.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby1.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby1_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d order by d.uid) s order by s.evnts.event_time; -select * from dfs.ctas_flatten.`orderby1_ctas`; -drop table dfs.ctas_flatten.`orderby1_ctas`; +create table dfs_test.ctas_flatten.`orderby1_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d order by d.uid) s order by s.evnts.event_time; +select * from dfs_test.ctas_flatten.`orderby1_ctas`; +drop table dfs_test.ctas_flatten.`orderby1_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby15_DRILL-2254.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby15_DRILL-2254.q index 4946e7583..c3bf4656a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby15_DRILL-2254.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby15_DRILL-2254.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby15_DRILL-2254_ctas` as select s1.* from (select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst) lst from `data.json` d) s1 order by lst1, s1.lst[4]; -select * from dfs.ctas_flatten.`orderby15_DRILL-2254_ctas`; -drop table dfs.ctas_flatten.`orderby15_DRILL-2254_ctas`; +create table dfs_test.ctas_flatten.`orderby15_DRILL-2254_ctas` as select s1.* from (select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst) lst from `data.json` d) s1 order by lst1, s1.lst[4]; +select * from dfs_test.ctas_flatten.`orderby15_DRILL-2254_ctas`; +drop table dfs_test.ctas_flatten.`orderby15_DRILL-2254_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby16_DRILL-2254.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby16_DRILL-2254.q index 8ebe051d9..8888dd0b9 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby16_DRILL-2254.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby16_DRILL-2254.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby16_DRILL-2254_ctas` as select * from (select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst) lst from `data.json` d) s1 order by s1.lst[3], s1.lst1; -select * from dfs.ctas_flatten.`orderby16_DRILL-2254_ctas`; -drop table dfs.ctas_flatten.`orderby16_DRILL-2254_ctas`; +create table dfs_test.ctas_flatten.`orderby16_DRILL-2254_ctas` as select * from (select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst) lst from `data.json` d) s1 order by s1.lst[3], s1.lst1; +select * from dfs_test.ctas_flatten.`orderby16_DRILL-2254_ctas`; +drop table dfs_test.ctas_flatten.`orderby16_DRILL-2254_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby22_DRILL-2254.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby22_DRILL-2254.q index 1529ed7ab..e8f05791b 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby22_DRILL-2254.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby22_DRILL-2254.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby22_DRILL-2254_ctas` as select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst[0]) lst0, flatten(d.lst_lst) lst from `data.json` d order by d.lst_lst[0][0], d.events[0].event_time; -select * from dfs.ctas_flatten.`orderby22_DRILL-2254_ctas`; -drop table dfs.ctas_flatten.`orderby22_DRILL-2254_ctas`; +create table dfs_test.ctas_flatten.`orderby22_DRILL-2254_ctas` as select uid, flatten(d.lst_lst[1]) lst1, flatten(d.lst_lst[0]) lst0, flatten(d.lst_lst) lst from `data.json` d order by d.lst_lst[0][0], d.events[0].event_time; +select * from dfs_test.ctas_flatten.`orderby22_DRILL-2254_ctas`; +drop table dfs_test.ctas_flatten.`orderby22_DRILL-2254_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby4.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby4.q index 4621f6f55..fd8581267 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby4.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby4.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby4_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d order by d.uid) s order by s.evnts.campaign_id, s.evnts.event_time; -select * from dfs.ctas_flatten.`orderby4_ctas`; -drop table dfs.ctas_flatten.`orderby4_ctas`; +create table dfs_test.ctas_flatten.`orderby4_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d order by d.uid) s order by s.evnts.campaign_id, s.evnts.event_time; +select * from dfs_test.ctas_flatten.`orderby4_ctas`; +drop table dfs_test.ctas_flatten.`orderby4_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby5.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby5.q index ebd78799c..ee7b969d9 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby5.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby5.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby5_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d) s order by s.uid, s.evnts.campaign_id, s.evnts.event_time; -select * from dfs.ctas_flatten.`orderby5_ctas`; -drop table dfs.ctas_flatten.`orderby5_ctas`; +create table dfs_test.ctas_flatten.`orderby5_ctas` as select * from (select d.uid uid, flatten(d.events) evnts from `data.json` d) s order by s.uid, s.evnts.campaign_id, s.evnts.event_time; +select * from dfs_test.ctas_flatten.`orderby5_ctas`; +drop table dfs_test.ctas_flatten.`orderby5_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby7.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby7.q index 949c3ec92..b45eb12ab 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby7.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/orderby7.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`orderby7_ctas` as select s.uid from (select d.uid, flatten(d.map.rm) rms from `data.json` d) s order by s.rms.rptd[1].d; -select * from dfs.ctas_flatten.`orderby7_ctas`; -drop table dfs.ctas_flatten.`orderby7_ctas`; +create table dfs_test.ctas_flatten.`orderby7_ctas` as select s.uid from (select d.uid, flatten(d.map.rm) rms from `data.json` d) s order by s.rms.rptd[1].d; +select * from dfs_test.ctas_flatten.`orderby7_ctas`; +drop table dfs_test.ctas_flatten.`orderby7_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/q_DRILL-2292.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/q_DRILL-2292.q index a7e127e65..c52f1a693 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/q_DRILL-2292.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/q_DRILL-2292.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`DRILL-2292_ctas` as select d.map from `DRILL-2292.json` d; -select * from dfs.ctas_flatten.`DRILL-2292_ctas`; -drop table dfs.ctas_flatten.`DRILL-2292_ctas`; +create table dfs_test.ctas_flatten.`DRILL-2292_ctas` as select d.map from `DRILL-2292.json` d; +select * from dfs_test.ctas_flatten.`DRILL-2292_ctas`; +drop table dfs_test.ctas_flatten.`DRILL-2292_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_flatten/2rows/withinGroupBy9.q b/framework/resources/Functional/ctas/ctas_flatten/2rows/withinGroupBy9.q index e8c65565f..fdee4691a 100644 --- a/framework/resources/Functional/ctas/ctas_flatten/2rows/withinGroupBy9.q +++ b/framework/resources/Functional/ctas/ctas_flatten/2rows/withinGroupBy9.q @@ -1,3 +1,3 @@ -create table dfs.ctas_flatten.`withinGroupBy9_ctas` as select uid, MAX(l1+l2) from (select uid,flatten(lst_lst[1]) l1, flatten(lst_lst[0]) l2 from `data.json`) sub group by uid, l1, l2; -select * from dfs.ctas_flatten.`withinGroupBy9_ctas`; -drop table dfs.ctas_flatten.`withinGroupBy9_ctas`; +create table dfs_test.ctas_flatten.`withinGroupBy9_ctas` as select uid, MAX(l1+l2) from (select uid,flatten(lst_lst[1]) l1, flatten(lst_lst[0]) l2 from `data.json`) sub group by uid, l1, l2; +select * from dfs_test.ctas_flatten.`withinGroupBy9_ctas`; +drop table dfs_test.ctas_flatten.`withinGroupBy9_ctas`; diff --git a/framework/resources/Functional/ctas/ctas_joins_aggregates/README b/framework/resources/Functional/ctas/ctas_joins_aggregates/README index a922ac8a1..3b17e9ade 100644 --- a/framework/resources/Functional/ctas/ctas_joins_aggregates/README +++ b/framework/resources/Functional/ctas/ctas_joins_aggregates/README @@ -140,7 +140,7 @@ select sum(c_bigint) , count(c_date) from - dfs.`ctas`.`j4` + dfs_test.`ctas`.`j4` group by c_boolean, c_timestamp diff --git a/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json b/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json index bd147931e..78db15e63 100644 --- a/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json +++ b/framework/resources/Functional/ctas/ctas_joins_aggregates/ctas.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.ctas_parquet", + "schema": "dfs_test.ctas_parquet", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/ctas/ctas_joins_aggregates/drill_2293.sql b/framework/resources/Functional/ctas/ctas_joins_aggregates/drill_2293.sql index a8bb0b82f..199f1a727 100644 --- a/framework/resources/Functional/ctas/ctas_joins_aggregates/drill_2293.sql +++ b/framework/resources/Functional/ctas/ctas_joins_aggregates/drill_2293.sql @@ -1,3 +1,3 @@ -CREATE TABLE rep_map AS SELECT d.map FROM dfs.`/drill/testdata/ctas/drill_2293.json` d; -SELECT * FROM dfs.ctas_parquet.`rep_map`; -DROP TABLE dfs.ctas_parquet.`rep_map`; +CREATE TABLE rep_map AS SELECT d.map FROM dfs_test.`/drill/testdata/ctas/drill_2293.json` d; +SELECT * FROM dfs_test.ctas_parquet.`rep_map`; +DROP TABLE dfs_test.ctas_parquet.`rep_map`; diff --git a/framework/resources/Functional/cttas/cttas.json b/framework/resources/Functional/cttas/cttas.json index c8f22008a..9c6149aa9 100644 --- a/framework/resources/Functional/cttas/cttas.json +++ b/framework/resources/Functional/cttas/cttas.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.Join", + "schema": "dfs_test.Join", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/cttas/cttas_23.sql b/framework/resources/Functional/cttas/cttas_23.sql index 2636e1a76..a88ea2576 100644 --- a/framework/resources/Functional/cttas/cttas_23.sql +++ b/framework/resources/Functional/cttas/cttas_23.sql @@ -1,3 +1,3 @@ -CREATE TEMPORARY TABLE temp_tbl_23 PARTITION BY ( c1 ) as SELECT * FROM dfs.`Join`.ltbl; +CREATE TEMPORARY TABLE temp_tbl_23 PARTITION BY ( c1 ) as SELECT * FROM dfs_test.`Join`.ltbl; SELECT * FROM dfs.tmp.temp_tbl_23; DROP TABLE dfs.tmp.temp_tbl_23; diff --git a/framework/resources/Functional/cttas/cttas_24.sql b/framework/resources/Functional/cttas/cttas_24.sql index 16712efdc..8719c6b39 100644 --- a/framework/resources/Functional/cttas/cttas_24.sql +++ b/framework/resources/Functional/cttas/cttas_24.sql @@ -1,4 +1,4 @@ -use dfs.drillTestDir; -CREATE TEMPORARY TABLE temp_tbl_24 AS SELECT * FROM dfs.`Join`.ltbl; +use dfs_test.drillTestDir; +CREATE TEMPORARY TABLE temp_tbl_24 AS SELECT * FROM dfs_test.`Join`.ltbl; SELECT count(*) FROM dfs.tmp.temp_tbl_24; DROP TABLE dfs.tmp.temp_tbl_24; diff --git a/framework/resources/Functional/data-shapes/more-columns/morecolumns.json b/framework/resources/Functional/data-shapes/more-columns/morecolumns.json index 525ee3afe..4360e6d5d 100644 --- a/framework/resources/Functional/data-shapes/more-columns/morecolumns.json +++ b/framework/resources/Functional/data-shapes/more-columns/morecolumns.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.moreColumnsText", + "schema": "dfs_test.moreColumnsText", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/json/widestrings.json b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/json/widestrings.json new file mode 100644 index 000000000..aae29bedb --- /dev/null +++ b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/json/widestrings.json @@ -0,0 +1,27 @@ +{ + "testId": "widestrings-parquet-5000width-1000rows-json", + "type": "group", + "description": "Test wide-columns in drill using parquet as the format ", + "submit-type": "jdbc", + "categories": [ + "functional" + ], + "matrices": [ + { + "query-file": ".*.q", + "schema": "dfs_test.wideStringsParquet5000Width1000rows", + "output-format": "tsv", + "expected-file": ".*.e_tsv", + "verification-type": [ + "in-memory" + ] + } + ], + "datasources": [ + { + "mode": "cp", + "src": "Datasources/data-shapes/wide-columns/5000/1000rows", + "dest": "data-shapes/wide-columns/5000/1000rows" + } + ] +} diff --git a/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/parquet/widestrings.json b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/parquet/widestrings.json index f1440f335..41170e31c 100644 --- a/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/parquet/widestrings.json +++ b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/parquet/widestrings.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.wideStringsParquet5000Width1000rows", + "schema": "dfs_test.wideStringsParquet5000Width1000rows", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/text/widestrings.json b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/text/widestrings.json new file mode 100644 index 000000000..b0c76f4b9 --- /dev/null +++ b/framework/resources/Functional/data-shapes/wide-columns/5000/1000rows/text/widestrings.json @@ -0,0 +1,27 @@ +{ + "testId": "widestrings-parquet-5000width-1000rows-text", + "type": "group", + "description": "Test wide-columns in drill using parquet as the format ", + "submit-type": "jdbc", + "categories": [ + "functional" + ], + "matrices": [ + { + "query-file": ".*.q", + "schema": "dfs_test.wideStringsParquet5000Width1000rows", + "output-format": "tsv", + "expected-file": ".*.e_tsv", + "verification-type": [ + "in-memory" + ] + } + ], + "datasources": [ + { + "mode": "cp", + "src": "Datasources/data-shapes/wide-columns/5000/1000rows", + "dest": "data-shapes/wide-columns/5000/1000rows" + } + ] +} diff --git a/framework/resources/Functional/data-shapes/wide-columns/general/widestrings.json b/framework/resources/Functional/data-shapes/wide-columns/general/widestrings.json index 73b7b271f..883cc4a34 100644 --- a/framework/resources/Functional/data-shapes/wide-columns/general/widestrings.json +++ b/framework/resources/Functional/data-shapes/wide-columns/general/widestrings.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/date_trunc/dt_trunc.json.failing b/framework/resources/Functional/date_trunc/dt_trunc.json.failing index 3df7cf6e8..252a207ef 100644 --- a/framework/resources/Functional/date_trunc/dt_trunc.json.failing +++ b/framework/resources/Functional/date_trunc/dt_trunc.json.failing @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/datetime/datetime-basic.json b/framework/resources/Functional/datetime/datetime-basic.json index 411d35370..dcd14bd68 100644 --- a/framework/resources/Functional/datetime/datetime-basic.json +++ b/framework/resources/Functional/datetime/datetime-basic.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirDatetime", + "schema": "dfs_test.drillTestDirDatetime", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/decimal/decimal.json b/framework/resources/Functional/decimal/decimal.json index b11a6a9a8..d5dbc5361 100644 --- a/framework/resources/Functional/decimal/decimal.json +++ b/framework/resources/Functional/decimal/decimal.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/droptable/droptable.json b/framework/resources/Functional/droptable/droptable.json index 9e7292f92..017798bbd 100644 --- a/framework/resources/Functional/droptable/droptable.json +++ b/framework/resources/Functional/droptable/droptable.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.droptable", + "schema": "dfs_test.droptable", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/droptable/droptable11.q b/framework/resources/Functional/droptable/droptable11.q index e11df1aed..bc1347fcd 100644 --- a/framework/resources/Functional/droptable/droptable11.q +++ b/framework/resources/Functional/droptable/droptable11.q @@ -1 +1 @@ -drop table dfs.droptable.droptable11; +drop table dfs_test.droptable.droptable11; diff --git a/framework/resources/Functional/droptable/droptable12.q b/framework/resources/Functional/droptable/droptable12.q index ffe254f67..40d75eb90 100644 --- a/framework/resources/Functional/droptable/droptable12.q +++ b/framework/resources/Functional/droptable/droptable12.q @@ -1 +1 @@ -drop table dfs.droptable.`droptable12/complex.json`; +drop table dfs_test.droptable.`droptable12/complex.json`; diff --git a/framework/resources/Functional/droptable/droptable13.q_negative b/framework/resources/Functional/droptable/droptable13.q_negative index 09bede8a7..a3ac47131 100644 --- a/framework/resources/Functional/droptable/droptable13.q_negative +++ b/framework/resources/Functional/droptable/droptable13.q_negative @@ -1,3 +1,3 @@ use hive; drop table droptable.droptable13_parquet; -use dfs; +use dfs_test; diff --git a/framework/resources/Functional/droptable/droptable14.q b/framework/resources/Functional/droptable/droptable14.q index bcab45425..183df8fe5 100644 --- a/framework/resources/Functional/droptable/droptable14.q +++ b/framework/resources/Functional/droptable/droptable14.q @@ -1 +1 @@ -drop table dfs.droptable.`droptable14/t.dat`; +drop table dfs_test.droptable.`droptable14/t.dat`; diff --git a/framework/resources/Functional/droptable/droptable15.q b/framework/resources/Functional/droptable/droptable15.q index 54eb2817c..0a035ea4b 100644 --- a/framework/resources/Functional/droptable/droptable15.q +++ b/framework/resources/Functional/droptable/droptable15.q @@ -1 +1 @@ -drop table dfs.droptable.droptable15; +drop table dfs_test.droptable.droptable15; diff --git a/framework/resources/Functional/droptable/droptable25.q b/framework/resources/Functional/droptable/droptable25.q index 64c2239be..84a6fb3ff 100644 --- a/framework/resources/Functional/droptable/droptable25.q +++ b/framework/resources/Functional/droptable/droptable25.q @@ -1,3 +1,3 @@ -use dfs; +use dfs_test; drop table droptable.droptable25; -use dfs.droptable; +use dfs_test.droptable; diff --git a/framework/resources/Functional/droptable/droptable28.q_negative b/framework/resources/Functional/droptable/droptable28.q_negative index e01e2fe8c..d14dd5079 100644 --- a/framework/resources/Functional/droptable/droptable28.q_negative +++ b/framework/resources/Functional/droptable/droptable28.q_negative @@ -1 +1 @@ -drop table dfs.droptable.notfound; +drop table dfs_test.droptable.notfound; diff --git a/framework/resources/Functional/droptable/droptable29.q b/framework/resources/Functional/droptable/droptable29.q index ad4733785..bf7436286 100644 --- a/framework/resources/Functional/droptable/droptable29.q +++ b/framework/resources/Functional/droptable/droptable29.q @@ -1 +1 @@ -drop table dfs.droptable.droptable29; +drop table dfs_test.droptable.droptable29; diff --git a/framework/resources/Functional/droptable/droptable30.q b/framework/resources/Functional/droptable/droptable30.q index c746bbc18..75e132b32 100644 --- a/framework/resources/Functional/droptable/droptable30.q +++ b/framework/resources/Functional/droptable/droptable30.q @@ -1,3 +1,3 @@ -use dfs; +use dfs_test; drop table droptable.droptable30; -use dfs.droptable; +use dfs_test.droptable; diff --git a/framework/resources/Functional/droptable/droptable31.q b/framework/resources/Functional/droptable/droptable31.q index c064c5a27..f433258af 100644 --- a/framework/resources/Functional/droptable/droptable31.q +++ b/framework/resources/Functional/droptable/droptable31.q @@ -1 +1 @@ -drop table dfs.droptable.`droptable31/t.dat`; +drop table dfs_test.droptable.`droptable31/t.dat`; diff --git a/framework/resources/Functional/droptable/droptable32v.e b/framework/resources/Functional/droptable/droptable32v.e index 08959ecaf..b23fb38af 100644 --- a/framework/resources/Functional/droptable/droptable32v.e +++ b/framework/resources/Functional/droptable/droptable32v.e @@ -1 +1 @@ -true View [droptable32v] deleted successfully from schema [dfs.droptable]. +true View [droptable32v] deleted successfully from schema [dfs_test.droptable]. diff --git a/framework/resources/Functional/droptable/droptable4.q b/framework/resources/Functional/droptable/droptable4.q index 230037244..53699ce7f 100644 --- a/framework/resources/Functional/droptable/droptable4.q +++ b/framework/resources/Functional/droptable/droptable4.q @@ -1 +1 @@ -drop table dfs.droptable.droptable4; +drop table dfs_test.droptable.droptable4; diff --git a/framework/resources/Functional/droptable/droptable6.q b/framework/resources/Functional/droptable/droptable6.q index 28da08aa2..7816f08ce 100644 --- a/framework/resources/Functional/droptable/droptable6.q +++ b/framework/resources/Functional/droptable/droptable6.q @@ -1 +1 @@ -drop table dfs.droptable.droptable6; +drop table dfs_test.droptable.droptable6; diff --git a/framework/resources/Functional/droptable/droptable7.q_negative b/framework/resources/Functional/droptable/droptable7.q_negative index 7015ffdbb..96235b0e6 100644 --- a/framework/resources/Functional/droptable/droptable7.q_negative +++ b/framework/resources/Functional/droptable/droptable7.q_negative @@ -1 +1 @@ -drop table dfs.droptable.droptable7; +drop table dfs_test.droptable.droptable7; diff --git a/framework/resources/Functional/droptable/droptable8.q_negative b/framework/resources/Functional/droptable/droptable8.q_negative index c765047d1..ff4565b20 100644 --- a/framework/resources/Functional/droptable/droptable8.q_negative +++ b/framework/resources/Functional/droptable/droptable8.q_negative @@ -1 +1 @@ -drop table dfs.droptable.droptable8; +drop table dfs_test.droptable.droptable8; diff --git a/framework/resources/Functional/droptable/droptable_negative.json b/framework/resources/Functional/droptable/droptable_negative.json index 46f513be4..bf5cc019c 100644 --- a/framework/resources/Functional/droptable/droptable_negative.json +++ b/framework/resources/Functional/droptable/droptable_negative.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q_negative", - "schema": "dfs.droptable", + "schema": "dfs_test.droptable", "output-format": "tsv", "expected-file": ".*.e_negative", "verification-type": [ diff --git a/framework/resources/Functional/droptable/droptableif11.q b/framework/resources/Functional/droptable/droptableif11.q index fbefd414d..bd1482894 100644 --- a/framework/resources/Functional/droptable/droptableif11.q +++ b/framework/resources/Functional/droptable/droptableif11.q @@ -1 +1 @@ -drop table dfs.droptable.droptableif11; +drop table dfs_test.droptable.droptableif11; diff --git a/framework/resources/Functional/droptable/droptableif12.q b/framework/resources/Functional/droptable/droptableif12.q index d6f79ab56..5f639ce3c 100644 --- a/framework/resources/Functional/droptable/droptableif12.q +++ b/framework/resources/Functional/droptable/droptableif12.q @@ -1 +1 @@ -drop table if exists dfs.droptable.`droptableif12/complex.json`; +drop table if exists dfs_test.droptable.`droptableif12/complex.json`; diff --git a/framework/resources/Functional/droptable/droptableif14.q b/framework/resources/Functional/droptable/droptableif14.q index 8e26de2f4..40a810e26 100644 --- a/framework/resources/Functional/droptable/droptableif14.q +++ b/framework/resources/Functional/droptable/droptableif14.q @@ -1 +1 @@ -drop table dfs.droptable.`droptableif14/t.dat`; +drop table dfs_test.droptable.`droptableif14/t.dat`; diff --git a/framework/resources/Functional/droptable/droptableif15.q b/framework/resources/Functional/droptable/droptableif15.q index 2cf099652..3a74d377b 100644 --- a/framework/resources/Functional/droptable/droptableif15.q +++ b/framework/resources/Functional/droptable/droptableif15.q @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif15; +drop table if exists dfs_test.droptable.droptableif15; diff --git a/framework/resources/Functional/droptable/droptableif25.q b/framework/resources/Functional/droptable/droptableif25.q index 6949f25d3..f4cc9ba1a 100644 --- a/framework/resources/Functional/droptable/droptableif25.q +++ b/framework/resources/Functional/droptable/droptableif25.q @@ -1,3 +1,3 @@ -use dfs; +use dfs_test; drop table if exists droptable.droptableif25; -use dfs.droptable; +use dfs_test.droptable; diff --git a/framework/resources/Functional/droptable/droptableif28.q_negative b/framework/resources/Functional/droptable/droptableif28.q_negative index a4c440eae..ee58cfed4 100644 --- a/framework/resources/Functional/droptable/droptableif28.q_negative +++ b/framework/resources/Functional/droptable/droptableif28.q_negative @@ -1 +1 @@ -drop table if exists dfs.droptable.notfound; +drop table if exists dfs_test.droptable.notfound; diff --git a/framework/resources/Functional/droptable/droptableif29.q b/framework/resources/Functional/droptable/droptableif29.q index 6be813063..f9f460810 100644 --- a/framework/resources/Functional/droptable/droptableif29.q +++ b/framework/resources/Functional/droptable/droptableif29.q @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif29; +drop table if exists dfs_test.droptable.droptableif29; diff --git a/framework/resources/Functional/droptable/droptableif30.q b/framework/resources/Functional/droptable/droptableif30.q index 2e20bb3c2..81e07fbb2 100644 --- a/framework/resources/Functional/droptable/droptableif30.q +++ b/framework/resources/Functional/droptable/droptableif30.q @@ -1,3 +1,3 @@ -use dfs; +use dfs_test; drop table if exists droptable.droptableif30; -use dfs.droptable; +use dfs_test.droptable; diff --git a/framework/resources/Functional/droptable/droptableif31.q b/framework/resources/Functional/droptable/droptableif31.q index 4789092c1..172138d89 100644 --- a/framework/resources/Functional/droptable/droptableif31.q +++ b/framework/resources/Functional/droptable/droptableif31.q @@ -1 +1 @@ -drop table if exists dfs.droptable.`droptableif31/t.dat`; +drop table if exists dfs_test.droptable.`droptableif31/t.dat`; diff --git a/framework/resources/Functional/droptable/droptableif32v.e b/framework/resources/Functional/droptable/droptableif32v.e index e96a23fc9..0a0f5d371 100644 --- a/framework/resources/Functional/droptable/droptableif32v.e +++ b/framework/resources/Functional/droptable/droptableif32v.e @@ -1 +1 @@ -true View [droptableif32v] deleted successfully from schema [dfs.droptable]. +true View [droptableif32v] deleted successfully from schema [dfs_test.droptable]. diff --git a/framework/resources/Functional/droptable/droptableif4.q b/framework/resources/Functional/droptable/droptableif4.q index f29fb45c7..30a916929 100644 --- a/framework/resources/Functional/droptable/droptableif4.q +++ b/framework/resources/Functional/droptable/droptableif4.q @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif4; +drop table if exists dfs_test.droptable.droptableif4; diff --git a/framework/resources/Functional/droptable/droptableif6.q b/framework/resources/Functional/droptable/droptableif6.q index 9a2827c38..3834c90f9 100644 --- a/framework/resources/Functional/droptable/droptableif6.q +++ b/framework/resources/Functional/droptable/droptableif6.q @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif6; +drop table if exists dfs_test.droptable.droptableif6; diff --git a/framework/resources/Functional/droptable/droptableif7.q_negative b/framework/resources/Functional/droptable/droptableif7.q_negative index 08a4d07f6..2250e0556 100644 --- a/framework/resources/Functional/droptable/droptableif7.q_negative +++ b/framework/resources/Functional/droptable/droptableif7.q_negative @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif7; +drop table if exists dfs_test.droptable.droptableif7; diff --git a/framework/resources/Functional/droptable/droptableif8.q_negative b/framework/resources/Functional/droptable/droptableif8.q_negative index eb7eee0e0..f1d414d5b 100644 --- a/framework/resources/Functional/droptable/droptableif8.q_negative +++ b/framework/resources/Functional/droptable/droptableif8.q_negative @@ -1 +1 @@ -drop table if exists dfs.droptable.droptableif8; +drop table if exists dfs_test.droptable.droptableif8; diff --git a/framework/resources/Functional/explicit_cast/explicit_cast.json b/framework/resources/Functional/explicit_cast/explicit_cast.json index 2f7ed276e..fe31f9093 100644 --- a/framework/resources/Functional/explicit_cast/explicit_cast.json +++ b/framework/resources/Functional/explicit_cast/explicit_cast.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/filter/pushdown/data/data.json b/framework/resources/Functional/filter/pushdown/data/data.json index 526f186cb..8997a4d8a 100644 --- a/framework/resources/Functional/filter/pushdown/data/data.json +++ b/framework/resources/Functional/filter/pushdown/data/data.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/filter/pushdown/plan/plan.json b/framework/resources/Functional/filter/pushdown/plan/plan.json index 55edbf746..9d90aa120 100644 --- a/framework/resources/Functional/filter/pushdown/plan/plan.json +++ b/framework/resources/Functional/filter/pushdown/plan/plan.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/filter/selectivity/negative/plan.json b/framework/resources/Functional/filter/selectivity/negative/plan.json index 0b8f722ab..69793dd1c 100644 --- a/framework/resources/Functional/filter/selectivity/negative/plan.json +++ b/framework/resources/Functional/filter/selectivity/negative/plan.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.drillTestDirTpch01Parquet", + "schema": "dfs_test.drillTestDirTpch01Parquet", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/filter/selectivity/plan/plan.json b/framework/resources/Functional/filter/selectivity/plan/plan.json index 19b03eb35..78e5abe70 100644 --- a/framework/resources/Functional/filter/selectivity/plan/plan.json +++ b/framework/resources/Functional/filter/selectivity/plan/plan.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.drillTestDirTpch01Parquet", + "schema": "dfs_test.drillTestDirTpch01Parquet", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/flatten_operators/100000rows/flatten_operators.json b/framework/resources/Functional/flatten_operators/100000rows/flatten_operators.json index 33858fcc2..dd6c3c498 100644 --- a/framework/resources/Functional/flatten_operators/100000rows/flatten_operators.json +++ b/framework/resources/Functional/flatten_operators/100000rows/flatten_operators.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators_100000rows", + "schema": "dfs_test.flatten_operators_100000rows", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/flatten_operators/2rows/flatten_operators.json b/framework/resources/Functional/flatten_operators/2rows/flatten_operators.json index e81dcac6d..5efa62595 100644 --- a/framework/resources/Functional/flatten_operators/2rows/flatten_operators.json +++ b/framework/resources/Functional/flatten_operators/2rows/flatten_operators.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators", + "schema": "dfs_test.flatten_operators", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/flatten_operators/general/flatten_operators.json b/framework/resources/Functional/flatten_operators/general/flatten_operators.json index e0c83c962..81efcccec 100644 --- a/framework/resources/Functional/flatten_operators/general/flatten_operators.json +++ b/framework/resources/Functional/flatten_operators/general/flatten_operators.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators", + "schema": "dfs_test.flatten_operators", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/flatten_operators/general/jira1679.q b/framework/resources/Functional/flatten_operators/general/jira1679.q index 57feb7f7e..c1459d7cb 100644 --- a/framework/resources/Functional/flatten_operators/general/jira1679.q +++ b/framework/resources/Functional/flatten_operators/general/jira1679.q @@ -1 +1 @@ -with tmp as (select 1 as fixedcolumn, flatten(columns) as newcol from dfs.`/drill/testdata/flatten_operators/jira1679/b.csv`) select regexp_replace(cast(tmp.newcol as VARCHAR(4)), 'a', 'b') from tmp; +with tmp as (select 1 as fixedcolumn, flatten(columns) as newcol from dfs_test.`/drill/testdata/flatten_operators/jira1679/b.csv`) select regexp_replace(cast(tmp.newcol as VARCHAR(4)), 'a', 'b') from tmp; diff --git a/framework/resources/Functional/flatten_operators/general/q3.q b/framework/resources/Functional/flatten_operators/general/q3.q index b34e5d41e..e1ca67365 100644 --- a/framework/resources/Functional/flatten_operators/general/q3.q +++ b/framework/resources/Functional/flatten_operators/general/q3.q @@ -1 +1 @@ -select flatten(columns) from dfs.`/drill/testdata/views/dfs/text/rankings`; +select flatten(columns) from dfs_test.`/drill/testdata/views/dfs/text/rankings`; diff --git a/framework/resources/Functional/flatten_operators/negative/drill-2777.sql b/framework/resources/Functional/flatten_operators/negative/drill-2777.sql index 014f90528..587b2eb85 100644 --- a/framework/resources/Functional/flatten_operators/negative/drill-2777.sql +++ b/framework/resources/Functional/flatten_operators/negative/drill-2777.sql @@ -1 +1 @@ -create table dfs.ctas_flatten.`test` as select uid, flatten(lst_lst) lst, flatten(lst_lst[0]) from `data1.json` order by flatten(lst_lst[0]); +create table dfs_test.ctas_flatten.`test` as select uid, flatten(lst_lst) lst, flatten(lst_lst[0]) from `data1.json` order by flatten(lst_lst[0]); diff --git a/framework/resources/Functional/flatten_operators/negative/negative.json b/framework/resources/Functional/flatten_operators/negative/negative.json index 721275139..c75237a26 100644 --- a/framework/resources/Functional/flatten_operators/negative/negative.json +++ b/framework/resources/Functional/flatten_operators/negative/negative.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.flatten_operators", + "schema": "dfs_test.flatten_operators", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/flatten_operators/schema_changes/flatten_operators.json b/framework/resources/Functional/flatten_operators/schema_changes/flatten_operators.json index e53810694..ed025259b 100644 --- a/framework/resources/Functional/flatten_operators/schema_changes/flatten_operators.json +++ b/framework/resources/Functional/flatten_operators/schema_changes/flatten_operators.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.flatten_operators_schema_changes", + "schema": "dfs_test.flatten_operators_schema_changes", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_functions/drill_hive_chain3.q b/framework/resources/Functional/hive/hive_functions/drill_hive_chain3.q index 1b6bda913..8428723a6 100644 --- a/framework/resources/Functional/hive/hive_functions/drill_hive_chain3.q +++ b/framework/resources/Functional/hive/hive_functions/drill_hive_chain3.q @@ -1 +1 @@ -select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs.`cross-sources`.`fewtypes_null.parquet`; +select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs_test.`cross-sources`.`fewtypes_null.parquet`; diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/hive_native.json b/framework/resources/Functional/hive/hive_native/hash-join/data/hive_native.json index eb7204ff6..2bc20f9a9 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q index b080b1b29..2540f6d5f 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q index 51d858ef5..ac278f76e 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q index 4f9301641..baa4a5835 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q index 334afc513..29a223af8 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q index 0e0dea6bb..100231b29 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-join.q index 115e9179c..0719a5281 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-unionall.q index 9bd7884e8..6ec137ef6 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/data/parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/hive_native.json b/framework/resources/Functional/hive/hive_native/hash-join/plan/hive_native.json index 40e82b146..9b30c0b32 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q index 0fc9de9e0..e654be810 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q index 157905389..34902cc06 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q index 0956b2f9a..a9ee80f37 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q index 3efb46e9e..83135a9fb 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q index 0a577dbdf..333f5fa22 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-join.q index 75a7cecc7..78e78de45 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-unionall.q index eca20a4a7..5e02e3f8c 100644 --- a/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/hash-join/plan/parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/hive_native.json b/framework/resources/Functional/hive/hive_native/merge-join/data/hive_native.json index 8732ae9c9..9966177e9 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q index bdf20cee9..c1f8b5383 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q index eff6fc073..2adc291ec 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q index ea6ea3caa..e8e3ca43a 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-join.q index 1f117a7a8..bc6a7c4f4 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-unionall.q b/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-unionall.q index 5fcf5b3af..4fd75742e 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-unionall.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/data/parquet-hive-unionall.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/hive/hive_native/merge-join/plan/hive_native.json b/framework/resources/Functional/hive/hive_native/merge-join/plan/hive_native.json index a0391a44c..ed8b20475 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/plan/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/merge-join/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q index 9236a0cf0..f17163611 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q index ad59af41c..3f15fba69 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/merge-join/plan/parquet-hive-join.q b/framework/resources/Functional/hive/hive_native/merge-join/plan/parquet-hive-join.q index 4fc1a5bc8..c556cbf1d 100644 --- a/framework/resources/Functional/hive/hive_native/merge-join/plan/parquet-hive-join.q +++ b/framework/resources/Functional/hive/hive_native/merge-join/plan/parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_native/operators/data/hive_native.json b/framework/resources/Functional/hive/hive_native/operators/data/hive_native.json index 5cb0c1fb6..7c6d1bd7f 100644 --- a/framework/resources/Functional/hive/hive_native/operators/data/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/operators/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_native/operators/plan/hive_native.json b/framework/resources/Functional/hive/hive_native/operators/plan/hive_native.json index 77f1b80a6..ea9f3bf3b 100644 --- a/framework/resources/Functional/hive/hive_native/operators/plan/hive_native.json +++ b/framework/resources/Functional/hive/hive_native/operators/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_storage/compressed/rc_snappy_json_join.q.fail b/framework/resources/Functional/hive/hive_storage/compressed/rc_snappy_json_join.q.fail index eadf9f903..968074fb3 100644 --- a/framework/resources/Functional/hive/hive_storage/compressed/rc_snappy_json_join.q.fail +++ b/framework/resources/Functional/hive/hive_storage/compressed/rc_snappy_json_join.q.fail @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.fewtypes_null_compressed_rc_snappy o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/compressed/seq_snappy_json_join.q b/framework/resources/Functional/hive/hive_storage/compressed/seq_snappy_json_join.q index 81e5ec503..729881c52 100644 --- a/framework/resources/Functional/hive/hive_storage/compressed/seq_snappy_json_join.q +++ b/framework/resources/Functional/hive/hive_storage/compressed/seq_snappy_json_join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.fewtypes_null_compressed_seq_snappy o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/compressed/text_bz2_json_join.q b/framework/resources/Functional/hive/hive_storage/compressed/text_bz2_json_join.q index 13cab8207..7958dd0d7 100644 --- a/framework/resources/Functional/hive/hive_storage/compressed/text_bz2_json_join.q +++ b/framework/resources/Functional/hive/hive_storage/compressed/text_bz2_json_join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.fewtypes_null_compressed_bz2 o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/compressed/text_gz_json_join.q b/framework/resources/Functional/hive/hive_storage/compressed/text_gz_json_join.q index 12b6d75c3..957f88053 100644 --- a/framework/resources/Functional/hive/hive_storage/compressed/text_gz_json_join.q +++ b/framework/resources/Functional/hive/hive_storage/compressed/text_gz_json_join.q @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive.fewtypes_null_compressed_gz o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/fileformats/parquet2.q.fail b/framework/resources/Functional/hive/hive_storage/fileformats/parquet2.q.fail index 024e30d47..1f18a1ae1 100644 --- a/framework/resources/Functional/hive/hive_storage/fileformats/parquet2.q.fail +++ b/framework/resources/Functional/hive/hive_storage/fileformats/parquet2.q.fail @@ -2,7 +2,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col from hive.hive_storage.fewtypes_parquet p -inner join dfs.`/drill/testdata/hive_storage/fewtypes_null_parquet` o +inner join dfs_test.`/drill/testdata/hive_storage/fewtypes_null_parquet` o on p.int_col=o.int_col and p.bigint_col = o.bigint_col and p.interval_col = o.interval_col diff --git a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/hivegeneratedParquet.json b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/hivegeneratedParquet.json index 139517eef..fe4aa4e9e 100644 --- a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/hivegeneratedParquet.json +++ b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/hivegeneratedParquet.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.hive_storage", + "schema": "dfs_test.hive_storage", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/q7.sql.drill4337 b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/q7.sql.drill4337 index 45e911126..646a2b0cc 100644 --- a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/q7.sql.drill4337 +++ b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/dfs_parquet/q7.sql.drill4337 @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive1_fewtypes_null o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hive_native/q7.sql.drill4337 b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hive_native/q7.sql.drill4337 index 653f27497..4f40fdc8a 100644 --- a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hive_native/q7.sql.drill4337 +++ b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hive_native/q7.sql.drill4337 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive1_fewtypes_null_parquet o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hiveplugin/q7.sql b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hiveplugin/q7.sql index 352b14e90..80c3fe65d 100644 --- a/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hiveplugin/q7.sql +++ b/framework/resources/Functional/hive/hive_storage/hivegenerated_parquet/hiveplugin/q7.sql @@ -1,7 +1,7 @@ select o.int_col, o.bigint_col, o.date_col, o.time_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.json` p +from dfs_test.`cross-sources`.`fewtypes_null.json` p inner join hive1_fewtypes_null_parquet o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/hive_functions_nonhivedata/hive_functions.json b/framework/resources/Functional/hive_functions_nonhivedata/hive_functions.json index 286e20033..d5d790ae1 100644 --- a/framework/resources/Functional/hive_functions_nonhivedata/hive_functions.json +++ b/framework/resources/Functional/hive_functions_nonhivedata/hive_functions.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.hive_functions_nonhivedata", + "schema": "dfs_test.hive_functions_nonhivedata", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/identifiers/identifiers.json b/framework/resources/Functional/identifiers/identifiers.json index f9f9d0bb1..99acef86c 100644 --- a/framework/resources/Functional/identifiers/identifiers.json +++ b/framework/resources/Functional/identifiers/identifiers.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.identifiers", + "schema": "dfs_test.identifiers", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/impersonation/dfs/groupusershowfiles.q b/framework/resources/Functional/impersonation/dfs/groupusershowfiles.q index 0852b0bdd..4d5d10f4c 100644 --- a/framework/resources/Functional/impersonation/dfs/groupusershowfiles.q +++ b/framework/resources/Functional/impersonation/dfs/groupusershowfiles.q @@ -1,3 +1,3 @@ -use dfs.drillTestDirImpersonationThreeparquettables; +use dfs_test.drillTestDirImpersonationThreeparquettables; show files; -use dfs.drillTestDirImpersonationThreeparquettables; +use dfs_test.drillTestDirImpersonationThreeparquettables; diff --git a/framework/resources/Functional/impersonation/dfs/noaccessnestedviews.q b/framework/resources/Functional/impersonation/dfs/noaccessnestedviews.q index 9dd92ef01..ee7c93c82 100644 --- a/framework/resources/Functional/impersonation/dfs/noaccessnestedviews.q +++ b/framework/resources/Functional/impersonation/dfs/noaccessnestedviews.q @@ -1,3 +1,3 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; select v.c1, b.c2, b.c3, v.c4 from user1datanoaccessv1 v, user1datanoaccessb1 b where v.c1 = b.c2; select 1 from sys.version; diff --git a/framework/resources/Functional/impersonation/dfs/ownertwotables.q b/framework/resources/Functional/impersonation/dfs/ownertwotables.q index 20c42f6b3..386f9ca40 100644 --- a/framework/resources/Functional/impersonation/dfs/ownertwotables.q +++ b/framework/resources/Functional/impersonation/dfs/ownertwotables.q @@ -1,3 +1,3 @@ -use dfs; +use dfs_test; select t1.c_row, t1.c_groupby, t2.c_int, t2.c_bigint from `/drill/testdata/impersonation/t1.parquet` t1, `/drill/testdata/impersonation/t2.parquet` t2 where t1.c_row = t2.c_int; -use dfs; +use dfs_test; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryuserctas.q b/framework/resources/Functional/impersonation/dfs/secondaryuserctas.q index 6c0e3977b..d88136dc5 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryuserctas.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryuserctas.q @@ -1,8 +1,8 @@ alter session set `store.format` = 'json'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; create table user1data(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; select count(*) from user1data; drop table user1data; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; alter session set `store.format` = 'parquet'; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryuserctasparquet.q b/framework/resources/Functional/impersonation/dfs/secondaryuserctasparquet.q index 864d6c02e..74a6e9fba 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryuserctasparquet.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryuserctasparquet.q @@ -1,8 +1,8 @@ alter session set `store.format` = 'parquet'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; create table user1dataparquet(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; select count(*) from user1dataparquet; drop table user1dataparquet; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; alter session set `store.format` = 'parquet'; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryusernestedviews.q b/framework/resources/Functional/impersonation/dfs/secondaryusernestedviews.q index 5d5a09a66..b65779d7b 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryusernestedviews.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryusernestedviews.q @@ -1,5 +1,5 @@ alter session set `store.format` = 'json'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; create or replace view user1datav4(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; create or replace view user1datav3(c1, c2, c3) as select c1, c2, c3 from user1datav4; create or replace view user1datav2(c1, c2) as select c1, c2 from user1datav3; @@ -9,6 +9,6 @@ select 1 from sys.version; select 1 from sys.version; select 1 from sys.version; select 1 from sys.version; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; alter session set `store.format` = 'parquet'; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryusernestedviewscsv.q b/framework/resources/Functional/impersonation/dfs/secondaryusernestedviewscsv.q index fd5362301..9fc87c8a6 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryusernestedviewscsv.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryusernestedviewscsv.q @@ -1,4 +1,4 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; alter session set `store.format` = 'csv'; create table secondarydata(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; create or replace view secondaryv4(c1, c2, c3, c4) as select columns[0], columns[1], columns[2], columns[3] from secondarydata; @@ -12,4 +12,4 @@ drop view secondaryv3; drop view secondaryv4; drop table secondarydata; alter session set `store.format` = 'parquet'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryuserreplaceview.q b/framework/resources/Functional/impersonation/dfs/secondaryuserreplaceview.q index 0cb1eae00..fa1c804f1 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryuserreplaceview.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryuserreplaceview.q @@ -1,7 +1,7 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; create or replace view datav(c1, c2, c3) as select c_row, c_int, c_date from data; create or replace view datav(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; select c1, c2, c3, c4 from datav; select 1 from sys.version; select 1 from sys.version; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/secondaryuserview.q b/framework/resources/Functional/impersonation/dfs/secondaryuserview.q index d11e23076..d4e18b6b9 100644 --- a/framework/resources/Functional/impersonation/dfs/secondaryuserview.q +++ b/framework/resources/Functional/impersonation/dfs/secondaryuserview.q @@ -1,3 +1,3 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; select * from impersonationdata; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.e b/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.e index 90860962a..858383815 100644 --- a/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.e +++ b/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.e @@ -1,4 +1,4 @@ -dfs.drillTestDirImpersonationThreecsvtables user1datav1 -dfs.drillTestDirImpersonationThreecsvtables user1datav2 -dfs.drillTestDirImpersonationThreecsvtables user1datav3 -dfs.drillTestDirImpersonationThreecsvtables user1datav4 +dfs_test.drillTestDirImpersonationThreecsvtables user1datav1 +dfs_test.drillTestDirImpersonationThreecsvtables user1datav2 +dfs_test.drillTestDirImpersonationThreecsvtables user1datav3 +dfs_test.drillTestDirImpersonationThreecsvtables user1datav4 diff --git a/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.q b/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.q index 0f99d50fd..7a6790c1f 100644 --- a/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.q +++ b/framework/resources/Functional/impersonation/dfs/showtabledifferentacl.q @@ -1,3 +1,3 @@ -use dfs.drillTestDirImpersonationThreecsvtables; +use dfs_test.drillTestDirImpersonationThreecsvtables; show tables; -use dfs.drillTestDirImpersonationThreecsvtables; +use dfs_test.drillTestDirImpersonationThreecsvtables; diff --git a/framework/resources/Functional/impersonation/dfs/superusernestedviews.q b/framework/resources/Functional/impersonation/dfs/superusernestedviews.q index daa37c3ff..9754185a8 100644 --- a/framework/resources/Functional/impersonation/dfs/superusernestedviews.q +++ b/framework/resources/Functional/impersonation/dfs/superusernestedviews.q @@ -1,4 +1,4 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; alter session set `store.format` = 'csv'; create table rootdata(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from data; create or replace view v4(c1, c2, c3, c4) as select columns[0], columns[1], columns[2], columns[3] from rootdata; @@ -12,4 +12,4 @@ drop view v3; drop view v4; drop table rootdata; alter session set `store.format` = 'parquet'; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/superusershowfiles.q b/framework/resources/Functional/impersonation/dfs/superusershowfiles.q index 7de26d66c..ca7945334 100644 --- a/framework/resources/Functional/impersonation/dfs/superusershowfiles.q +++ b/framework/resources/Functional/impersonation/dfs/superusershowfiles.q @@ -1,4 +1,4 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; show files; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/superusershowtables.q b/framework/resources/Functional/impersonation/dfs/superusershowtables.q index b49eb30f8..73bfe7bd7 100644 --- a/framework/resources/Functional/impersonation/dfs/superusershowtables.q +++ b/framework/resources/Functional/impersonation/dfs/superusershowtables.q @@ -1,4 +1,4 @@ -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; show tables; -use dfs.drillTestDirImpersonation; +use dfs_test.drillTestDirImpersonation; diff --git a/framework/resources/Functional/impersonation/dfs/unknownusercreatecsvtable.q b/framework/resources/Functional/impersonation/dfs/unknownusercreatecsvtable.q index f7491d8ad..e2f1501bc 100644 --- a/framework/resources/Functional/impersonation/dfs/unknownusercreatecsvtable.q +++ b/framework/resources/Functional/impersonation/dfs/unknownusercreatecsvtable.q @@ -1 +1 @@ -create table unknowdata(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from dfs.`/drill/testdata/impersonation/data`; +create table unknowdata(c1, c2, c3, c4) as select c_row, c_int, c_float4, c_date from dfs_test.`/drill/testdata/impersonation/data`; diff --git a/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.json b/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.json index aa106b07f..f9bf16cb6 100644 --- a/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.json +++ b/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": "unknownuserthreetables.q", - "schema": "dfs.drillTestDirImpersonation", + "schema": "dfs_test.drillTestDirImpersonation", "output-format": "tsv", "expected-file": "unknownuserthreetables.e", "username": "unknown", diff --git a/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.q b/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.q index 56cfad929..f479293dd 100644 --- a/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.q +++ b/framework/resources/Functional/impersonation/dfs/unknownuserthreetables.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/impersonation/threetables`; +select count(*) from dfs_test.`/drill/testdata/impersonation/threetables`; diff --git a/framework/resources/Functional/implicit_cast/implicit_cast.json b/framework/resources/Functional/implicit_cast/implicit_cast.json index 64df939e1..30ec2220f 100644 --- a/framework/resources/Functional/implicit_cast/implicit_cast.json +++ b/framework/resources/Functional/implicit_cast/implicit_cast.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/int96/int96_data.json b/framework/resources/Functional/int96/int96_data.json index 5bdae7d69..e4229b35e 100644 --- a/framework/resources/Functional/int96/int96_data.json +++ b/framework/resources/Functional/int96/int96_data.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/int96/int96_plan.json b/framework/resources/Functional/int96/int96_plan.json index a1282cd56..fb0bc407f 100644 --- a/framework/resources/Functional/int96/int96_plan.json +++ b/framework/resources/Functional/int96/int96_plan.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/int96/q11.sql_drill5389 b/framework/resources/Functional/int96/q11.sql_drill5389 index e9cd9e7dd..29e92667b 100644 --- a/framework/resources/Functional/int96/q11.sql_drill5389 +++ b/framework/resources/Functional/int96/q11.sql_drill5389 @@ -1 +1 @@ -select convert_from(create_timestamp2, 'TIMESTAMP_IMPALA') - convert_from(create_timestamp1, 'TIMESTAMP_IMPALA') from dfs.`/user/hive/warehouse/hive1_parquet` where voter_id=3; +select convert_from(create_timestamp2, 'TIMESTAMP_IMPALA') - convert_from(create_timestamp1, 'TIMESTAMP_IMPALA') from dfs_test.`/user/hive/warehouse/hive1_parquet` where voter_id=3; diff --git a/framework/resources/Functional/int96/q8.sql_drill5389 b/framework/resources/Functional/int96/q8.sql_drill5389 index c795206eb..eab9d7926 100644 --- a/framework/resources/Functional/int96/q8.sql_drill5389 +++ b/framework/resources/Functional/int96/q8.sql_drill5389 @@ -1 +1 @@ -select age(convert_from(create_timestamp2, 'TIMESTAMP_IMPALA'),convert_from(create_timestamp1, 'TIMESTAMP_IMPALA')) from dfs.`/user/hive/warehouse/hive1_parquet` where voter_id=3; +select age(convert_from(create_timestamp2, 'TIMESTAMP_IMPALA'),convert_from(create_timestamp1, 'TIMESTAMP_IMPALA')) from dfs_test.`/user/hive/warehouse/hive1_parquet` where voter_id=3; diff --git a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json index 1282cd902..739532101 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json +++ b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/data/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json index 18ab92fb6..03cad8f7c 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json +++ b/framework/resources/Functional/interpreted_partition_pruning/ctas_auto_partition/hierarchical/plan/ctas_auto_partition.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.ctasAutoPartition", + "schema": "dfs_test.ctasAutoPartition", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/data/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/data/partitionDirectory.json index b9c1f7c59..d7f4573e8 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/data/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/plan/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/plan/partitionDirectory.json index e5885ec2c..e12315501 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/plan/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/csv/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/1.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/1.q index d84322326..a32684813 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/1.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/1.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1>10; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/2.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/2.q index 977f23d89..285ebf4e4 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/2.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/2.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0>1992 and dir0<1994 and dir1 like '%ec'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0>1992 and dir0<1994 and dir1 like '%ec'; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/3.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/3.q index 11fb4f8b5..6cada43c8 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/3.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/3.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; +select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/4.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/4.q index 11fb4f8b5..6cada43c8 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/4.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/4.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; +select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/6.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/6.q index 1f80dbe6d..eb2a5f36a 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/6.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/6.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1='dec'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1='dec'; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/partitionDirectory.json index bbb54e74e..8b6f1743c 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/1.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/1.q index 70312e652..8b1dccf40 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/1.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/1.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1>10; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0>1992 and dir0<1994 and dir1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/2.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/2.q index a53410600..ec088d545 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/2.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/2.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0>1992 and dir0<1994 and dir1 like '%ec'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0>1992 and dir0<1994 and dir1 like '%ec'; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/3.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/3.q index cd12c23a6..61b76e132 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/3.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/3.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; +explain plan for select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/4.q b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/4.q index cd12c23a6..61b76e132 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/4.q +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/4.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; +explain plan for select columns[0], columns[1], columns[4], columns[10],columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intint` where dir0<1983+dir1 and dir0<1994 and dir1+1>10; diff --git a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/partitionDirectory.json index fb0bc7e53..33a178563 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/dfs/hierarchical/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/data/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/data/partitionDirectory.json index ca62dff62..65f3e4742 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/plan/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/plan/partitionDirectory.json index 0056a72c9..3c12b796e 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/plan/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/hive/text/date_partition/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/data/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/data/partitionDirectory.json index bcc0d020d..19edd44a5 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/data/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json b/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json index ea2deaef5..ad2bc09ae 100644 --- a/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json +++ b/framework/resources/Functional/interpreted_partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/joins/explicit_cast/explicit_cast.json b/framework/resources/Functional/joins/explicit_cast/explicit_cast.json index 0876e9621..368a2a08b 100644 --- a/framework/resources/Functional/joins/explicit_cast/explicit_cast.json +++ b/framework/resources/Functional/joins/explicit_cast/explicit_cast.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/full_outer/full_outer.json b/framework/resources/Functional/joins/full_outer/full_outer.json index c85a013ee..9cd469cf2 100644 --- a/framework/resources/Functional/joins/full_outer/full_outer.json +++ b/framework/resources/Functional/joins/full_outer/full_outer.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/implicit_cast/implicit_cast.json b/framework/resources/Functional/joins/implicit_cast/implicit_cast.json index 6f0b11413..f907dc1ca 100644 --- a/framework/resources/Functional/joins/implicit_cast/implicit_cast.json +++ b/framework/resources/Functional/joins/implicit_cast/implicit_cast.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/implicit_cast_not_supported/implicit_cast.json b/framework/resources/Functional/joins/implicit_cast_not_supported/implicit_cast.json index e1d92f942..3598c2d13 100644 --- a/framework/resources/Functional/joins/implicit_cast_not_supported/implicit_cast.json +++ b/framework/resources/Functional/joins/implicit_cast_not_supported/implicit_cast.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/inner/inner.json b/framework/resources/Functional/joins/inner/inner.json index 80a9b7c0e..dafa1d475 100644 --- a/framework/resources/Functional/joins/inner/inner.json +++ b/framework/resources/Functional/joins/inner/inner.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/join/join.json b/framework/resources/Functional/joins/join/join.json index 43747e5cc..e12c82b85 100644 --- a/framework/resources/Functional/joins/join/join.json +++ b/framework/resources/Functional/joins/join/join.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.Join", + "schema": "dfs_test.Join", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/joins/left_outer/left_outer.json b/framework/resources/Functional/joins/left_outer/left_outer.json index 8ad2076f0..7e6b3de75 100644 --- a/framework/resources/Functional/joins/left_outer/left_outer.json +++ b/framework/resources/Functional/joins/left_outer/left_outer.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json b/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json index 62aa3c937..b3aca39e5 100644 --- a/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json +++ b/framework/resources/Functional/joins/nulleqjoin/nulleqjoin.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.Join", + "schema": "dfs_test.Join", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/joins/on_expression/on_expression.json b/framework/resources/Functional/joins/on_expression/on_expression.json index 6c4ef3172..1ac8f9498 100644 --- a/framework/resources/Functional/joins/on_expression/on_expression.json +++ b/framework/resources/Functional/joins/on_expression/on_expression.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/order_by/queries/order_by_queries.json b/framework/resources/Functional/joins/order_by/queries/order_by_queries.json index 5ae1ffbfd..8dd02881a 100644 --- a/framework/resources/Functional/joins/order_by/queries/order_by_queries.json +++ b/framework/resources/Functional/joins/order_by/queries/order_by_queries.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/right_outer/right_outer.json b/framework/resources/Functional/joins/right_outer/right_outer.json index cb8d2de58..ecf13f946 100644 --- a/framework/resources/Functional/joins/right_outer/right_outer.json +++ b/framework/resources/Functional/joins/right_outer/right_outer.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/subqueries/subqueries.json b/framework/resources/Functional/joins/subqueries/subqueries.json index 57e3f3246..e21d8a3f9 100644 --- a/framework/resources/Functional/joins/subqueries/subqueries.json +++ b/framework/resources/Functional/joins/subqueries/subqueries.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/joins/views/views.json b/framework/resources/Functional/joins/views/views.json index 67214b9f4..11f8af085 100644 --- a/framework/resources/Functional/joins/views/views.json +++ b/framework/resources/Functional/joins/views/views.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/json/extended_json/extended_json.json b/framework/resources/Functional/json/extended_json/extended_json.json index 155947b4f..34c85e461 100644 --- a/framework/resources/Functional/json/extended_json/extended_json.json +++ b/framework/resources/Functional/json/extended_json/extended_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.extended_json", + "schema": "dfs_test.extended_json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/json/json_bracketless/json_storage.json b/framework/resources/Functional/json/json_bracketless/json_storage.json index 660980e29..c282500c8 100644 --- a/framework/resources/Functional/json/json_bracketless/json_storage.json +++ b/framework/resources/Functional/json/json_bracketless/json_storage.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirJson", + "schema": "dfs_test.drillTestDirJson", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_1.q b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_1.q index 10e9a5c81..736f69bfe 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_1.q +++ b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_1.q @@ -1 +1 @@ -select d.id, flatten(d.evnts) from dfs.`/drill/testdata/json_kvgenflatten/empty-array.json` d; +select d.id, flatten(d.evnts) from dfs_test.`/drill/testdata/json_kvgenflatten/empty-array.json` d; diff --git a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_2.q b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_2.q index 7c2cc25a5..85a52c220 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_2.q +++ b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_2.q @@ -1 +1 @@ -select d.id, flatten(d.evnts) from dfs.`/drill/testdata/json_kvgenflatten/empty-null-map.json` d; +select d.id, flatten(d.evnts) from dfs_test.`/drill/testdata/json_kvgenflatten/empty-null-map.json` d; diff --git a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_3.q b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_3.q index 940a2ce59..cd7140f11 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_3.q +++ b/framework/resources/Functional/json/json_kvgenflatten/flatten/drill2217_3.q @@ -1 +1 @@ -select d.id, flatten(d.evnts) from dfs.`/drill/testdata/json_kvgenflatten/empty-array1.json` d; +select d.id, flatten(d.evnts) from dfs_test.`/drill/testdata/json_kvgenflatten/empty-array1.json` d; diff --git a/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562.q b/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562.q index 5dee1fafb..d7acc1a2d 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562.q +++ b/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562.q @@ -1 +1 @@ -select FLATTEN(t.a.b.c) AS c from dfs.`/drill/testdata/json_kvgenflatten/drill3562/drill3562.json` t; +select FLATTEN(t.a.b.c) AS c from dfs_test.`/drill/testdata/json_kvgenflatten/drill3562/drill3562.json` t; diff --git a/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562_1.q.f b/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562_1.q.f index 81ec537df..c7cdb4ed9 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562_1.q.f +++ b/framework/resources/Functional/json/json_kvgenflatten/flatten/flatten_drill3562_1.q.f @@ -1 +1 @@ -select FLATTEN(t.a.b.c) AS c from dfs.`/drill/testdata/json_kvgenflatten/drill3562` t; +select FLATTEN(t.a.b.c) AS c from dfs_test.`/drill/testdata/json_kvgenflatten/drill3562` t; diff --git a/framework/resources/Functional/json/json_kvgenflatten/kvgenFlatten.json b/framework/resources/Functional/json/json_kvgenflatten/kvgenFlatten.json index ff72df093..e6b8f504c 100644 --- a/framework/resources/Functional/json/json_kvgenflatten/kvgenFlatten.json +++ b/framework/resources/Functional/json/json_kvgenflatten/kvgenFlatten.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/json/json_storage/jsonGenericGroup.json b/framework/resources/Functional/json/json_storage/jsonGenericGroup.json index 73a45f659..02837fc76 100644 --- a/framework/resources/Functional/json/json_storage/jsonGenericGroup.json +++ b/framework/resources/Functional/json/json_storage/jsonGenericGroup.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/json/kvgenflatten-nulls/kvgenflatten-nulls.json b/framework/resources/Functional/json/kvgenflatten-nulls/kvgenflatten-nulls.json index 2904df375..b54218472 100644 --- a/framework/resources/Functional/json/kvgenflatten-nulls/kvgenflatten-nulls.json +++ b/framework/resources/Functional/json/kvgenflatten-nulls/kvgenflatten-nulls.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.kvgenflatten-nulls", + "schema": "dfs_test.kvgenflatten-nulls", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregate/data/aggregate.json b/framework/resources/Functional/limit0/aggregates/aggregate/data/aggregate.json index 550ee9e7c..ddd7f70d8 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregate/data/aggregate.json +++ b/framework/resources/Functional/limit0/aggregates/aggregate/data/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregate/plan/aggregate.json b/framework/resources/Functional/limit0/aggregates/aggregate/plan/aggregate.json index f929dbfba..a27bf75ad 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregate/plan/aggregate.json +++ b/framework/resources/Functional/limit0/aggregates/aggregate/plan/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json index b3d59fd63..b36f866b0 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/data/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json index 1493dbf88..b4105a52c 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/bugs/plan/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json index cdd9486e4..2ade82e67 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/data/count_distinct.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json index 67fae1a43..0aea0b3eb 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/count_distinct/plan/count_distinct.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json index 4ef74cabe..3f4f5c04e 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/data/case.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json index 08ddad7b3..bda12993a 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_case/plan/case.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json index 148ad31cc..484ece716 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/data/group_by_expression.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json index 26be65f68..6f0366718 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/group_by_expression/plan/group_by_expression.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json index 871ed7d48..e8e155999 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/data/multicolumn.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json index 93295f68b..c2e7e7c47 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/multicolumn/plan/multicolumn.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json index 0feecf9b9..6ceb5c64e 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/data/sanity.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json index 522cc3966..632f1a437 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/sanity/plan/sanity.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json index 884c6fc95..21ba57a58 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/data/scalar.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json index 98d1e0981..bc759a5da 100644 --- a/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json +++ b/framework/resources/Functional/limit0/aggregates/aggregation/scalar/plan/scalar.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.aggregation", + "schema": "dfs_test.aggregation", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json index bc80b3f61..0504b92fe 100644 --- a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json +++ b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/data/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_parquet", + "schema": "dfs_test.tpcds_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json index e0676f7bd..ff864f5f4 100644 --- a/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json +++ b/framework/resources/Functional/limit0/aggregates/tpcds_variants/parquet/plan/aggregate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_parquet", + "schema": "dfs_test.tpcds_sf1_parquet", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/convert/data/convert.json b/framework/resources/Functional/limit0/convert/data/convert.json index 4c549c1c5..48abd4536 100644 --- a/framework/resources/Functional/limit0/convert/data/convert.json +++ b/framework/resources/Functional/limit0/convert/data/convert.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirConvert", + "schema": "dfs_test.drillTestDirConvert", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/convert/plan/convert.json b/framework/resources/Functional/limit0/convert/plan/convert.json index 7bd470bd1..0bddcf4f4 100644 --- a/framework/resources/Functional/limit0/convert/plan/convert.json +++ b/framework/resources/Functional/limit0/convert/plan/convert.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirConvert", + "schema": "dfs_test.drillTestDirConvert", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/datetime/data/datetime-basic.json b/framework/resources/Functional/limit0/datetime/data/datetime-basic.json index 2d274b0e2..9756b4717 100644 --- a/framework/resources/Functional/limit0/datetime/data/datetime-basic.json +++ b/framework/resources/Functional/limit0/datetime/data/datetime-basic.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirDatetime", + "schema": "dfs_test.drillTestDirDatetime", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/datetime/plan/datetime-basic.json b/framework/resources/Functional/limit0/datetime/plan/datetime-basic.json index 5bbc6b24b..a63e1ca57 100644 --- a/framework/resources/Functional/limit0/datetime/plan/datetime-basic.json +++ b/framework/resources/Functional/limit0/datetime/plan/datetime-basic.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirDatetime", + "schema": "dfs_test.drillTestDirDatetime", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/decimal/data/decimal.json b/framework/resources/Functional/limit0/decimal/data/decimal.json index 280b22aaf..5bf90a532 100644 --- a/framework/resources/Functional/limit0/decimal/data/decimal.json +++ b/framework/resources/Functional/limit0/decimal/data/decimal.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/filters/data/data.json b/framework/resources/Functional/limit0/filters/data/data.json index af3021abc..1d02c014a 100644 --- a/framework/resources/Functional/limit0/filters/data/data.json +++ b/framework/resources/Functional/limit0/filters/data/data.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/filters/plan/data.json b/framework/resources/Functional/limit0/filters/plan/data.json index 183565ad2..4e679c3e2 100644 --- a/framework/resources/Functional/limit0/filters/plan/data.json +++ b/framework/resources/Functional/limit0/filters/plan/data.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/functions/data/limit0_functions.json b/framework/resources/Functional/limit0/functions/data/limit0_functions.json index 71dde643b..d2c606d74 100644 --- a/framework/resources/Functional/limit0/functions/data/limit0_functions.json +++ b/framework/resources/Functional/limit0/functions/data/limit0_functions.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/functions/plan/limit0_functions.json b/framework/resources/Functional/limit0/functions/plan/limit0_functions.json index aa7bb2fa6..e9606b91a 100644 --- a/framework/resources/Functional/limit0/functions/plan/limit0_functions.json +++ b/framework/resources/Functional/limit0/functions/plan/limit0_functions.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_functions/data/drill_hive_chain3.q b/framework/resources/Functional/limit0/hive/hive_functions/data/drill_hive_chain3.q index 1b6bda913..8428723a6 100644 --- a/framework/resources/Functional/limit0/hive/hive_functions/data/drill_hive_chain3.q +++ b/framework/resources/Functional/limit0/hive/hive_functions/data/drill_hive_chain3.q @@ -1 +1 @@ -select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs.`cross-sources`.`fewtypes_null.parquet`; +select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs_test.`cross-sources`.`fewtypes_null.parquet`; diff --git a/framework/resources/Functional/limit0/hive/hive_functions/plan/drill_hive_chain3.q b/framework/resources/Functional/limit0/hive/hive_functions/plan/drill_hive_chain3.q index 3e847fbfb..300468154 100644 --- a/framework/resources/Functional/limit0/hive/hive_functions/plan/drill_hive_chain3.q +++ b/framework/resources/Functional/limit0/hive/hive_functions/plan/drill_hive_chain3.q @@ -1,2 +1,2 @@ explain plan for select * from ( -select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs.`cross-sources`.`fewtypes_null.parquet`) t limit 0; +select concat(cast(locate('df', varchar_col, 5) as varchar(2)), 'hello') from dfs_test.`cross-sources`.`fewtypes_null.parquet`) t limit 0; diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/hive_native.json index 2de22a789..65bf0e1a0 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q_MD-766 index b080b1b29..2540f6d5f 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-fulljoin.q_MD-766 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q_MD-766 index 51d858ef5..ac278f76e 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-join.q_MD-766 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q_MD-766 index 4f9301641..baa4a5835 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-leftjoin.q_MD-766 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q index 334afc513..29a223af8 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/nullable-parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q_MD-766 index 0e0dea6bb..100231b29 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-fulljoin_DRILL-2707.q_MD-766 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-join.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-join.q_MD-766 index 115e9179c..0719a5281 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-join.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-join.q_MD-766 @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-unionall.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-unionall.q index 9bd7884e8..6ec137ef6 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-unionall.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/data/parquet-hive-unionall.q @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/hive_native.json index 2bd35ac94..0583d8b21 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q index 4b1a61b52..bb2f35d3f 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-fulljoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q index 4ccdd9e7f..6f9b08ec9 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q index c2d6f8483..bf3f4251e 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-leftjoin.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q.fail b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q.fail index 9a3dd422e..64fb2a41a 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q.fail +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/nullable-parquet-hive-unionall.q.fail @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o) t limit 0; diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q index 46cebf4ac..017b66520 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-fulljoin_DRILL-2707.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p full outer join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-join.q b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-join.q index d4c8d23f1..c8f337f34 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-join.q +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-join.q @@ -2,7 +2,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-unionall.q.fail b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-unionall.q.fail index bdf86b506..e910e6f4c 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-unionall.q.fail +++ b/framework/resources/Functional/limit0/hive/hive_native/hash-join/plan/parquet-hive-unionall.q.fail @@ -1,7 +1,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o) t limit 0; diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/hive_native.json index b269e43fe..12c1912ec 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q_MD-766 index bdf20cee9..c1f8b5383 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-join.q_MD-766 @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q_Md-766 b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q_Md-766 index eff6fc073..2adc291ec 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q_Md-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-leftjoin.q_Md-766 @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q index ea6ea3caa..e8e3ca43a 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/nullable-parquet-hive-unionall.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-join.q_MD-766 b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-join.q_MD-766 index 1f117a7a8..bc6a7c4f4 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-join.q_MD-766 +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-join.q_MD-766 @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-unionall.q b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-unionall.q index 5fcf5b3af..4fd75742e 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-unionall.q +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/data/parquet-hive-unionall.q @@ -2,7 +2,7 @@ alter session set `planner.enable_hashjoin` = false; alter session set `store.hive.optimize_scan_with_native_readers` = true; select p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p union all select o.int_col, o.bigint_col, o.date_col, cast(o.time_col as time), o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col from hive.hive_native.fewtypes_null_hive o; diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/hive_native.json index 141a13b08..0d229a1bb 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q index 9b13cc073..6da9953ad 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q index 15f0d7a09..356a801fa 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/nullable-parquet-hive-leftjoin.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes_null.parquet` p +from dfs_test.`cross-sources`.`fewtypes_null.parquet` p left join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/parquet-hive-join.q b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/parquet-hive-join.q index e19aa790b..c24d75df7 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/parquet-hive-join.q +++ b/framework/resources/Functional/limit0/hive/hive_native/merge-join/plan/parquet-hive-join.q @@ -3,7 +3,7 @@ alter session set `store.hive.optimize_scan_with_native_readers` = true; explain plan for select * from ( select o.int_col, o.bigint_col, o.date_col, o.time_col, o.timestamp_col, o.interval_col, o.varchar_col, o.float_col, o.double_col, o.bool_col, p.int_col, p.bigint_col, p.date_col, p.time_col, p.timestamp_col, p.interval_col, p.varchar_col, p.float_col, p.double_col, p.bool_col -from dfs.`cross-sources`.`fewtypes.parquet` p +from dfs_test.`cross-sources`.`fewtypes.parquet` p inner join hive.hive_native.fewtypes_null_hive o on p.int_col=o.int_col and p.bigint_col = o.bigint_col diff --git a/framework/resources/Functional/limit0/hive/hive_native/operators/data/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/operators/data/hive_native.json index 134ded0f6..dfe2d37b7 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/operators/data/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/operators/data/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/hive/hive_native/operators/plan/hive_native.json b/framework/resources/Functional/limit0/hive/hive_native/operators/plan/hive_native.json index 667de5aab..95c9ba2b1 100644 --- a/framework/resources/Functional/limit0/hive/hive_native/operators/plan/hive_native.json +++ b/framework/resources/Functional/limit0/hive/hive_native/operators/plan/hive_native.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json b/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json index bc9e8feff..4665b9527 100644 --- a/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json +++ b/framework/resources/Functional/limit0/implicit_cast_with_views/data/implicit_cast.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json b/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json index 1596be625..f61ac3bc6 100644 --- a/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json +++ b/framework/resources/Functional/limit0/implicit_cast_with_views/plan/implicit_cast.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.joins", + "schema": "dfs_test.joins", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/p1tests/parquet/data/p1tests.json b/framework/resources/Functional/limit0/p1tests/parquet/data/p1tests.json index d8daebc98..1b9a1523a 100644 --- a/framework/resources/Functional/limit0/p1tests/parquet/data/p1tests.json +++ b/framework/resources/Functional/limit0/p1tests/parquet/data/p1tests.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/p1tests/parquet/plan/p1tests.json b/framework/resources/Functional/limit0/p1tests/parquet/plan/p1tests.json index f09c63ffd..c7bd128df 100644 --- a/framework/resources/Functional/limit0/p1tests/parquet/plan/p1tests.json +++ b/framework/resources/Functional/limit0/p1tests/parquet/plan/p1tests.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/p1tests/text/data/p1tests.json b/framework/resources/Functional/limit0/p1tests/text/data/p1tests.json index 98dabbd11..c0d657ab9 100644 --- a/framework/resources/Functional/limit0/p1tests/text/data/p1tests.json +++ b/framework/resources/Functional/limit0/p1tests/text/data/p1tests.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/p1tests/text/plan/p1tests.json b/framework/resources/Functional/limit0/p1tests/text/plan/p1tests.json index 60af15e48..c2a1ab236 100644 --- a/framework/resources/Functional/limit0/p1tests/text/plan/p1tests.json +++ b/framework/resources/Functional/limit0/p1tests/text/plan/p1tests.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/union/data/queries.json b/framework/resources/Functional/limit0/union/data/queries.json index 0efb2c966..bafca9d7a 100644 --- a/framework/resources/Functional/limit0/union/data/queries.json +++ b/framework/resources/Functional/limit0/union/data/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union", + "schema": "dfs_test.union", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/union/plan/queries.json b/framework/resources/Functional/limit0/union/plan/queries.json index e24c9e9b9..fefdd3e26 100644 --- a/framework/resources/Functional/limit0/union/plan/queries.json +++ b/framework/resources/Functional/limit0/union/plan/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union", + "schema": "dfs_test.union", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json b/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json index af88df54f..adbebe052 100644 --- a/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json +++ b/framework/resources/Functional/limit0/union_all/prq_union_all/data/prq_union_all.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union_all", + "schema": "dfs_test.union_all", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json b/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json index 41eab7259..a5c66b3bc 100644 --- a/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json +++ b/framework/resources/Functional/limit0/union_all/prq_union_all/plan/prq_union_all.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union_all", + "schema": "dfs_test.union_all", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/views/data/query35.e_tsv b/framework/resources/Functional/limit0/views/data/query35.e_tsv index 2d84e731f..9a04460e8 100644 --- a/framework/resources/Functional/limit0/views/data/query35.e_tsv +++ b/framework/resources/Functional/limit0/views/data/query35.e_tsv @@ -1 +1 @@ -DRILL dfs.drillTestDirViews v1 +DRILL dfs_test.drillTestDirViews v1 diff --git a/framework/resources/Functional/limit0/views/data/views.json b/framework/resources/Functional/limit0/views/data/views.json index c5d4d2d67..9b46b96a9 100644 --- a/framework/resources/Functional/limit0/views/data/views.json +++ b/framework/resources/Functional/limit0/views/data/views.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirViews", + "schema": "dfs_test.drillTestDirViews", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/views/plan/views.json b/framework/resources/Functional/limit0/views/plan/views.json index 9bd908a99..6e6116af9 100644 --- a/framework/resources/Functional/limit0/views/plan/views.json +++ b/framework/resources/Functional/limit0/views/plan/views.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirViews", + "schema": "dfs_test.drillTestDirViews", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json b/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json index 2a52227c8..1c88a5816 100644 --- a/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json +++ b/framework/resources/Functional/limit0/window_functions/aggregates/data/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json b/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json index dda9d2441..d659d4ab6 100644 --- a/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json +++ b/framework/resources/Functional/limit0/window_functions/aggregates/plan/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json b/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json index 9ac2e5e17..4484752ff 100644 --- a/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json +++ b/framework/resources/Functional/limit0/window_functions/bugs/data/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json b/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json index ade0036e4..6947fd16d 100644 --- a/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json +++ b/framework/resources/Functional/limit0/window_functions/bugs/plan/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json b/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json index cd7e737cc..c57c82f74 100644 --- a/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json +++ b/framework/resources/Functional/limit0/window_functions/empty_over_clause/data/empty_over_clause.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json b/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json index 59bc50fed..f9bd54105 100644 --- a/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json +++ b/framework/resources/Functional/limit0/window_functions/empty_over_clause/plan/empty_over_clause.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json b/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json index f92bcff0e..0a7eb3c89 100644 --- a/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json +++ b/framework/resources/Functional/limit0/window_functions/multiple_partitions/data/multiple_partitions.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json b/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json index 0f98a62c6..77764d50a 100644 --- a/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json +++ b/framework/resources/Functional/limit0/window_functions/multiple_partitions/plan/multiple_partitions.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json b/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json index 06f277f13..ce8026064 100755 --- a/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/limit0/window_functions/tpcds_variants/data/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json b/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json index bbb09a80d..592df6eac 100755 --- a/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/limit0/window_functions/tpcds_variants/plan/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/metadata_caching/data/bool_partition1.q b/framework/resources/Functional/metadata_caching/data/bool_partition1.q index e7341290d..4d48fb752 100644 --- a/framework/resources/Functional/metadata_caching/data/bool_partition1.q +++ b/framework/resources/Functional/metadata_caching/data/bool_partition1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col = true; +select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col = true; diff --git a/framework/resources/Functional/metadata_caching/data/bool_partition2.q b/framework/resources/Functional/metadata_caching/data/bool_partition2.q index 195ed94ab..e81194f1c 100644 --- a/framework/resources/Functional/metadata_caching/data/bool_partition2.q +++ b/framework/resources/Functional/metadata_caching/data/bool_partition2.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col is null; +select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col is null; diff --git a/framework/resources/Functional/metadata_caching/data/date_partition1.q b/framework/resources/Functional/metadata_caching/data/date_partition1.q index 812ff2d10..c5ba9f388 100644 --- a/framework/resources/Functional/metadata_caching/data/date_partition1.q +++ b/framework/resources/Functional/metadata_caching/data/date_partition1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col = '1999-04-08'; +select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col = '1999-04-08'; diff --git a/framework/resources/Functional/metadata_caching/data/date_partition2.q b/framework/resources/Functional/metadata_caching/data/date_partition2.q index 148ce62f6..3a1b8426a 100644 --- a/framework/resources/Functional/metadata_caching/data/date_partition2.q +++ b/framework/resources/Functional/metadata_caching/data/date_partition2.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col > '1999-04-08'; +select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col > '1999-04-08'; diff --git a/framework/resources/Functional/metadata_caching/data/error1.e_tsv b/framework/resources/Functional/metadata_caching/data/error1.e_tsv index 50aff6cfa..6732b75f9 100644 --- a/framework/resources/Functional/metadata_caching/data/error1.e_tsv +++ b/framework/resources/Functional/metadata_caching/data/error1.e_tsv @@ -1 +1 @@ -false Storage plugin or workspace does not exist [dfs.kjsd] +false Storage plugin or workspace does not exist [dfs_test.kjsd] diff --git a/framework/resources/Functional/metadata_caching/data/error1.q b/framework/resources/Functional/metadata_caching/data/error1.q index aeab5180b..298ec47d1 100644 --- a/framework/resources/Functional/metadata_caching/data/error1.q +++ b/framework/resources/Functional/metadata_caching/data/error1.q @@ -1 +1 @@ -refresh table metadata dfs.kjsd.nowayIExist; +refresh table metadata dfs_test.kjsd.nowayIExist; diff --git a/framework/resources/Functional/metadata_caching/data/int_partition1.q b/framework/resources/Functional/metadata_caching/data/int_partition1.q index e4ca68d80..e579bad31 100644 --- a/framework/resources/Functional/metadata_caching/data/int_partition1.q +++ b/framework/resources/Functional/metadata_caching/data/int_partition1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/fewtypes_intpartition` where int_col = 21; +select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_intpartition` where int_col = 21; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files1.q_disabled b/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files1.q_disabled index c5a8a33fb..fc9acd784 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files1.q_disabled +++ b/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files1.q_disabled @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files`; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files`; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files2.q_disabled b/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files2.q_disabled index 48af1dcf1..15d63289b 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files2.q_disabled +++ b/framework/resources/Functional/metadata_caching/data/mc1_addautopartitioned_files2.q_disabled @@ -8,7 +8,7 @@ select avg(l_discount) as avg_disc, count(*) as count_order from - dfs.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files` + dfs_test.`/drill/testdata/metadata_caching/lineitem_addautopartitioned_files` group by l_returnflag, l_linestatus diff --git a/framework/resources/Functional/metadata_caching/data/mc1_autopartition.q b/framework/resources/Functional/metadata_caching/data/mc1_autopartition.q index bb1071f8c..4ee935438 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_autopartition.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_autopartition.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/metadata_caching/nation` where n_regionkey=4; +select * from dfs_test.`/drill/testdata/metadata_caching/nation` where n_regionkey=4; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache1.q b/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache1.q index cd8eaa6df..94e4ae529 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache1.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache1.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/lineitem_deletecache` where dir0=2006; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/lineitem_deletecache` where dir0=2006; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache2.q b/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache2.q index 544364c8c..fe750d628 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache2.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_deletetoplevelcache2.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/lineitem_deletecache/`; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/lineitem_deletecache/`; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter1.q b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter1.q index ca3eece4e..2acd3f729 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter1.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter1.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/orders` where dir0=1994; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/orders` where dir0=1994; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter2.q b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter2.q index 4464e954a..65d9509da 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter2.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter2.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/orders` where dir0=1994 or dir0=1995; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/orders` where dir0=1994 or dir0=1995; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter3.q b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter3.q index fb2dc94c0..eac484c97 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter3.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter3.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/orders` where dir0=1994 and dir0=1995; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/orders` where dir0=1994 and dir0=1995; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter4.q b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter4.q index 5f3bea51a..588a18075 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_dirfilter4.q +++ b/framework/resources/Functional/metadata_caching/data/mc1_dirfilter4.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/orders` where dir0=1994 or dir1='Q3'; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/orders` where dir0=1994 or dir1='Q3'; diff --git a/framework/resources/Functional/metadata_caching/data/mc1_removeautopartitioned_files1.q_disabled b/framework/resources/Functional/metadata_caching/data/mc1_removeautopartitioned_files1.q_disabled index 452567231..1906009dc 100644 --- a/framework/resources/Functional/metadata_caching/data/mc1_removeautopartitioned_files1.q_disabled +++ b/framework/resources/Functional/metadata_caching/data/mc1_removeautopartitioned_files1.q_disabled @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/metadata_caching/lineitem_removeautopartitioned_files`; +select count(*) from dfs_test.`/drill/testdata/metadata_caching/lineitem_removeautopartitioned_files`; diff --git a/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json index 8cc0cd9ed..df39e7dbd 100644 --- a/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json +++ b/framework/resources/Functional/metadata_caching/data/metadata_caching_small.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.metadata_caching", + "schema": "dfs_test.metadata_caching", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json index 3e1a555c1..d042e825f 100644 --- a/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json +++ b/framework/resources/Functional/metadata_caching/generated_caches/metadata_caching_small.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.metadata_caching", + "schema": "dfs_test.metadata_caching", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json b/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json index ea32505de..e96d1d17f 100644 --- a/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json +++ b/framework/resources/Functional/metadata_caching/partition_pruning/data/metadata_caching_pp.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.metadata_caching_pp", + "schema": "dfs_test.metadata_caching_pp", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json b/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json index 7a4afaf0f..b9c1561e8 100644 --- a/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json +++ b/framework/resources/Functional/metadata_caching/partition_pruning/plan/metadata_caching_pp.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.metadata_caching_pp", + "schema": "dfs_test.metadata_caching_pp", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/metadata_caching/plan/bool_partition1.q.fail b/framework/resources/Functional/metadata_caching/plan/bool_partition1.q.fail index 6cc97d8e0..9b16c1815 100644 --- a/framework/resources/Functional/metadata_caching/plan/bool_partition1.q.fail +++ b/framework/resources/Functional/metadata_caching/plan/bool_partition1.q.fail @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col = true; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col = true; diff --git a/framework/resources/Functional/metadata_caching/plan/bool_partition2.q.fail b/framework/resources/Functional/metadata_caching/plan/bool_partition2.q.fail index 0baccdeb6..ea73fec8c 100644 --- a/framework/resources/Functional/metadata_caching/plan/bool_partition2.q.fail +++ b/framework/resources/Functional/metadata_caching/plan/bool_partition2.q.fail @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col is null; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_boolpartition` where bool_col is null; diff --git a/framework/resources/Functional/metadata_caching/plan/date_partition1.q b/framework/resources/Functional/metadata_caching/plan/date_partition1.q index 117a89931..fcc89f5cd 100644 --- a/framework/resources/Functional/metadata_caching/plan/date_partition1.q +++ b/framework/resources/Functional/metadata_caching/plan/date_partition1.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col = '1999-04-08'; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col = '1999-04-08'; diff --git a/framework/resources/Functional/metadata_caching/plan/date_partition2.q_nondeterministic b/framework/resources/Functional/metadata_caching/plan/date_partition2.q_nondeterministic index db6d8549f..f0fa3e307 100644 --- a/framework/resources/Functional/metadata_caching/plan/date_partition2.q_nondeterministic +++ b/framework/resources/Functional/metadata_caching/plan/date_partition2.q_nondeterministic @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col > '1999-04-08'; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_datepartition` where date_col > '1999-04-08'; diff --git a/framework/resources/Functional/metadata_caching/plan/drill-3892.q b/framework/resources/Functional/metadata_caching/plan/drill-3892.q index d40a4a2dd..159df38d1 100644 --- a/framework/resources/Functional/metadata_caching/plan/drill-3892.q +++ b/framework/resources/Functional/metadata_caching/plan/drill-3892.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/metadata_caching/lineitem_deletecache` where dir0=2006 group by l_linestatus; +explain plan for select count(*) from dfs_test.`/drill/testdata/metadata_caching/lineitem_deletecache` where dir0=2006 group by l_linestatus; diff --git a/framework/resources/Functional/metadata_caching/plan/int_partition1.q b/framework/resources/Functional/metadata_caching/plan/int_partition1.q index 0e88e048a..3322ff8e1 100644 --- a/framework/resources/Functional/metadata_caching/plan/int_partition1.q +++ b/framework/resources/Functional/metadata_caching/plan/int_partition1.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/fewtypes_intpartition` where int_col = 21; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/fewtypes_intpartition` where int_col = 21; diff --git a/framework/resources/Functional/metadata_caching/plan/mc1_autopartition.q b/framework/resources/Functional/metadata_caching/plan/mc1_autopartition.q index 987197917..734d8aae6 100644 --- a/framework/resources/Functional/metadata_caching/plan/mc1_autopartition.q +++ b/framework/resources/Functional/metadata_caching/plan/mc1_autopartition.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/metadata_caching/nation` where n_regionkey=4; +explain plan for select * from dfs_test.`/drill/testdata/metadata_caching/nation` where n_regionkey=4; diff --git a/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json b/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json index 7b3bbb26e..d7d3e37ce 100644 --- a/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json +++ b/framework/resources/Functional/metadata_caching/plan/metadata_caching_small.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.metadata_caching", + "schema": "dfs_test.metadata_caching", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_1.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_1.q index 8c477bfc1..a26c2fd15 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_1.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_1.q @@ -1 +1 @@ -select filename from `dfs.drillTestDir`.`min_max_dir/2016/Oct/voter5`; +select filename from `dfs_test.drillTestDir`.`min_max_dir/2016/Oct/voter5`; diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_11.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_11.q index 4b5b40fec..c8a719c79 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_11.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_11.q @@ -1 +1 @@ -select distinct((filepath || '/' || filename)) from `min_max_dir` where dir0=MAXDIR('dfs.drillTestDir','min_max_dir'); +select distinct((filepath || '/' || filename)) from `min_max_dir` where dir0=MAXDIR('dfs_test.drillTestDir','min_max_dir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_12.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_12.q index 7fb234741..e3267a8ef 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_12.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_12.q @@ -1 +1 @@ -select substr(filename,1,7), `replace`(suffix, 'p', 'P') from `dfs.drillTestDir`.`min_max_dir/2016/Oct` where length(fqn) > 59; +select substr(filename,1,7), `replace`(suffix, 'p', 'P') from `dfs_test.drillTestDir`.`min_max_dir/2016/Oct` where length(fqn) > 59; diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_13.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_13.q index fa2467727..792f41395 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_13.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_13.q @@ -1 +1 @@ -SELECT filename,suffix,fqn,filepath FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs.drillTestDir','min_max_dir') and dir1 = IMINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT filename,suffix,fqn,filepath FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs_test.drillTestDir','min_max_dir') and dir1 = IMINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_15.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_15.q index b17c17c41..7ec9c4d5d 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_15.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_15.q @@ -1 +1 @@ -select dir0,dir1,filename,fqn,filepath,suffix,registration from dfs.drillMinMaxDir1.`.` where suffix='' and registration='democrat' and dir0 < 'Nov'; +select dir0,dir1,filename,fqn,filepath,suffix,registration from dfs_test.drillMinMaxDir1.`.` where suffix='' and registration='democrat' and dir0 < 'Nov'; diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_16.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_16.q index b9d2ade1a..10bd9f6bc 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_16.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_16.q @@ -1 +1 @@ -select sum(voter_id), avg(age), max(filename) from `dfs.drillTestDir`.`min_max_dir` where suffix <> '' and dir0=2016; +select sum(voter_id), avg(age), max(filename) from `dfs_test.drillTestDir`.`min_max_dir` where suffix <> '' and dir0=2016; diff --git a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_18.q b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_18.q index 4d7fad070..e471bf327 100644 --- a/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_18.q +++ b/framework/resources/Functional/min_max_dir/drill-3474/drill-3474_18.q @@ -1 +1 @@ -select count(distinct(suffix)), count(distinct(fqn)), count(distinct(filename)), count(distinct(filepath)) from dfs.drillMinMaxDir.`.`; +select count(distinct(suffix)), count(distinct(fqn)), count(distinct(filename)), count(distinct(filepath)) from dfs_test.drillMinMaxDir.`.`; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_1.q b/framework/resources/Functional/min_max_dir/drill-3894_1.q index 7f87f7fd1..7e997bed9 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_1.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_1.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs.drillMinMaxDir','.'); +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs_test.drillMinMaxDir','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_10.q b/framework/resources/Functional/min_max_dir/drill-3894_10.q index 1f469bc9d..7d86517fd 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_10.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_10.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs.drillMinMaxDir'); +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_11.q b/framework/resources/Functional/min_max_dir/drill-3894_11.q index 33207815d..36b1479fb 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_11.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_11.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = MINDIR('dfs.drillMinMaxDir1'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = MINDIR('dfs_test.drillMinMaxDir1'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_12.q b/framework/resources/Functional/min_max_dir/drill-3894_12.q index e37c5a3c3..f71df4e69 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_12.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_12.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs.drillMinMaxDir1'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs_test.drillMinMaxDir1'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_13.q b/framework/resources/Functional/min_max_dir/drill-3894_13.q index 04a308b04..70a875a60 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_13.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_13.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = MAXDIR('dfs.drillMinMaxDir1'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = MAXDIR('dfs_test.drillMinMaxDir1'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_14.q b/framework/resources/Functional/min_max_dir/drill-3894_14.q index 7e4ada297..9a8419e03 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_14.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_14.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = IMAXDIR('dfs.drillMinMaxDir1'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = IMAXDIR('dfs_test.drillMinMaxDir1'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_15.q b/framework/resources/Functional/min_max_dir/drill-3894_15.q index a0053d1aa..a04ed016b 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_15.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_15.q @@ -1 +1 @@ -SELECT dir0,dir1,dir2 FROM dfs.drillTestDir.`min_max_dir` WHERE dir0 = MINDIR('dfs.drillTestDir','min_max_dir') order by dir1; +SELECT dir0,dir1,dir2 FROM dfs_test.drillTestDir.`min_max_dir` WHERE dir0 = MINDIR('dfs_test.drillTestDir','min_max_dir') order by dir1; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_16.q b/framework/resources/Functional/min_max_dir/drill-3894_16.q index e43cd038f..5f5ad07a4 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_16.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_16.q @@ -1 +1 @@ -SELECT dir0,dir1,dir2 FROM dfs.drillTestDir.`min_max_dir` WHERE dir0 = MAXDIR('dfs.drillTestDir','min_max_dir'); +SELECT dir0,dir1,dir2 FROM dfs_test.drillTestDir.`min_max_dir` WHERE dir0 = MAXDIR('dfs_test.drillTestDir','min_max_dir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_17.q b/framework/resources/Functional/min_max_dir/drill-3894_17.q index 3f75468b6..95d4caa6c 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_17.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_17.q @@ -1 +1 @@ -SELECT * FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_18.q b/framework/resources/Functional/min_max_dir/drill-3894_18.q index ef7966df7..2b61fa38e 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_18.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_18.q @@ -1 +1 @@ -SELECT * FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_19.q b/framework/resources/Functional/min_max_dir/drill-3894_19.q index 66e74ddf4..fa8b52b98 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_19.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_19.q @@ -1 +1 @@ -SELECT * FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MAXDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MAXDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_2.q b/framework/resources/Functional/min_max_dir/drill-3894_2.q index 4cbc2b4dc..5a9d83035 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_2.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_2.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs.drillMinMaxDir','.'); +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs_test.drillMinMaxDir','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_20.q b/framework/resources/Functional/min_max_dir/drill-3894_20.q index 1d8099f24..0fe4633e2 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_20.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_20.q @@ -1 +1 @@ -SELECT * FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMAXDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMAXDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_21.q b/framework/resources/Functional/min_max_dir/drill-3894_21.q index 4464e3c27..5359e121c 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_21.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_21.q @@ -1 +1 @@ -SELECT dir0, dir1 FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT dir0, dir1 FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = MINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_22.q b/framework/resources/Functional/min_max_dir/drill-3894_22.q index 3d79345f7..b5eba946f 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_22.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_22.q @@ -1 +1 @@ -SELECT dir0, dir1 FROM `dfs.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT dir0, dir1 FROM `dfs_test.drillTestDir`.`min_max_dir/2016` WHERE dir0 = IMINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_23.q b/framework/resources/Functional/min_max_dir/drill-3894_23.q index 8a045784e..a828d888a 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_23.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_23.q @@ -1 +1 @@ -SELECT * FROM `min_max_dir` WHERE dir1 = IMINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `min_max_dir` WHERE dir1 = IMINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_24.q b/framework/resources/Functional/min_max_dir/drill-3894_24.q index ce9595b2d..25e558a40 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_24.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_24.q @@ -1 +1 @@ -SELECT dir0,dir1,dir2 FROM `min_max_dir` WHERE dir1 = MAXDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT dir0,dir1,dir2 FROM `min_max_dir` WHERE dir1 = MAXDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_25.q b/framework/resources/Functional/min_max_dir/drill-3894_25.q index 7b86b1779..321c80ec5 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_25.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_25.q @@ -1 +1 @@ -SELECT * FROM min_max_dir WHERE dir1 = IMAXDIR('dfs.drillTestDir','min_max_dir/2016') and voter_id > 250; +SELECT * FROM min_max_dir WHERE dir1 = IMAXDIR('dfs_test.drillTestDir','min_max_dir/2016') and voter_id > 250; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_26.q b/framework/resources/Functional/min_max_dir/drill-3894_26.q index b8cd8ac1c..cbbc75acb 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_26.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_26.q @@ -1 +1 @@ -SELECT voter_id, name, contributions FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs.drillMinMaxDir1') and name like '%van%'; +SELECT voter_id, name, contributions FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs_test.drillMinMaxDir1') and name like '%van%'; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_27.q b/framework/resources/Functional/min_max_dir/drill-3894_27.q index 90589b918..50b64312f 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_27.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_27.q @@ -1 +1 @@ -SELECT distinct(registration), count(*) as `count` FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs.drillMinMaxDir') group by registration; +SELECT distinct(registration), count(*) as `count` FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs_test.drillMinMaxDir') group by registration; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_28.q b/framework/resources/Functional/min_max_dir/drill-3894_28.q index 594e0ef34..a9a25de8c 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_28.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_28.q @@ -1 +1 @@ -select dir1,count(*) from `min_max_dir` where dir0=MAXDIR('dfs.drillTestDir','min_max_dir') group by dir1; +select dir1,count(*) from `min_max_dir` where dir0=MAXDIR('dfs_test.drillTestDir','min_max_dir') group by dir1; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_29.q b/framework/resources/Functional/min_max_dir/drill-3894_29.q index 4b86fbfc9..50605fded 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_29.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_29.q @@ -1 +1 @@ -SELECT distinct(dir2), count(*) FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs.drillTestDir','min_max_dir') group by dir2; +SELECT distinct(dir2), count(*) FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs_test.drillTestDir','min_max_dir') group by dir2; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_3.q b/framework/resources/Functional/min_max_dir/drill-3894_3.q index 787106330..a573af616 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_3.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_3.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = MINDIR('dfs.drillMinMaxDir1','.'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = MINDIR('dfs_test.drillMinMaxDir1','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_30.q b/framework/resources/Functional/min_max_dir/drill-3894_30.q index cbcabe3ec..21be3c79e 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_30.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_30.q @@ -1 +1 @@ -SELECT * FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs.drillTestDir','min_max_dir') and dir1 = IMINDIR('dfs.drillTestDir','min_max_dir/2016'); +SELECT * FROM `min_max_dir` WHERE dir0 = MAXDIR('dfs_test.drillTestDir','min_max_dir') and dir1 = IMINDIR('dfs_test.drillTestDir','min_max_dir/2016'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_31.q b/framework/resources/Functional/min_max_dir/drill-3894_31.q index 213ba474e..a2092737e 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_31.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_31.q @@ -1 +1 @@ -select min(dir1), min(dir2) from `min_max_dir` where dir0=MAXDIR('dfs.drillTestDir','min_max_dir'); +select min(dir1), min(dir2) from `min_max_dir` where dir0=MAXDIR('dfs_test.drillTestDir','min_max_dir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_32.q b/framework/resources/Functional/min_max_dir/drill-3894_32.q index 26136e83c..ddc5edd7f 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_32.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_32.q @@ -1 +1 @@ -SELECT max(dir1),max(dir2) FROM dfs.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs.drillMinMaxDir'); +SELECT max(dir1),max(dir2) FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_33.q b/framework/resources/Functional/min_max_dir/drill-3894_33.q index db7e357f3..6ebd1de9a 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_33.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_33.q @@ -1 +1 @@ -select dir0,dir1,dir2 from `min_max_dir` where dir0=MAXDIR('dfs.drillTestDir','min_max_dir') order by dir0,dir1,dir2 limit 1; +select dir0,dir1,dir2 from `min_max_dir` where dir0=MAXDIR('dfs_test.drillTestDir','min_max_dir') order by dir0,dir1,dir2 limit 1; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_34.q b/framework/resources/Functional/min_max_dir/drill-3894_34.q index 34c978261..a0331a6a3 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_34.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_34.q @@ -1 +1 @@ -SELECT count(dir0) FROM dfs.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs.drillMinMaxDir'); +SELECT count(dir0) FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = IMAXDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_35.q b/framework/resources/Functional/min_max_dir/drill-3894_35.q index 3416fd032..fe598863c 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_35.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_35.q @@ -1 +1 @@ -SELECT count(dir1) FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs.drillMinMaxDir'); +SELECT count(dir1) FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_36.q b/framework/resources/Functional/min_max_dir/drill-3894_36.q index d2c2f50f1..506657575 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_36.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_36.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir1 = IMAXDIR('dfs.drillMinMaxDir'); +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir1 = IMAXDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_37.q b/framework/resources/Functional/min_max_dir/drill-3894_37.q index f6cb50322..f01d9ecc0 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_37.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_37.q @@ -1 +1 @@ -SELECT * FROM dfs.drillTestDir.`min_max_dir` WHERE dir1 = MAXDIR('dfs.drillTestDir','min_max_dir'); +SELECT * FROM dfs_test.drillTestDir.`min_max_dir` WHERE dir1 = MAXDIR('dfs_test.drillTestDir','min_max_dir'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_4.q b/framework/resources/Functional/min_max_dir/drill-3894_4.q index b37a97405..a28135103 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_4.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_4.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs.drillMinMaxDir1','.'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = IMINDIR('dfs_test.drillMinMaxDir1','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_5.q b/framework/resources/Functional/min_max_dir/drill-3894_5.q index a6b3d5b61..bb953a2a9 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_5.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_5.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = MAXDIR('dfs.drillMinMaxDir1','.'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = MAXDIR('dfs_test.drillMinMaxDir1','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_6.q b/framework/resources/Functional/min_max_dir/drill-3894_6.q index c774b075b..8f285de9b 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_6.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_6.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir1.`.` WHERE dir0 = IMAXDIR('dfs.drillMinMaxDir1','.'); +SELECT * FROM dfs_test.drillMinMaxDir1.`.` WHERE dir0 = IMAXDIR('dfs_test.drillMinMaxDir1','.'); diff --git a/framework/resources/Functional/min_max_dir/drill-3894_7.q b/framework/resources/Functional/min_max_dir/drill-3894_7.q index a35de7302..ae469256c 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_7.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_7.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs.drillMinMaxDir') order by dir1; +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MINDIR('dfs_test.drillMinMaxDir') order by dir1; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_8.q b/framework/resources/Functional/min_max_dir/drill-3894_8.q index 11d3d92c7..52fe37254 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_8.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_8.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = IMINDIR('dfs.drillMinMaxDir') order by dir1; +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = IMINDIR('dfs_test.drillMinMaxDir') order by dir1; diff --git a/framework/resources/Functional/min_max_dir/drill-3894_9.q b/framework/resources/Functional/min_max_dir/drill-3894_9.q index 489c18ebf..4ab34e5f4 100644 --- a/framework/resources/Functional/min_max_dir/drill-3894_9.q +++ b/framework/resources/Functional/min_max_dir/drill-3894_9.q @@ -1 +1 @@ -SELECT * FROM dfs.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs.drillMinMaxDir'); +SELECT * FROM dfs_test.drillMinMaxDir.`.` WHERE dir0 = MAXDIR('dfs_test.drillMinMaxDir'); diff --git a/framework/resources/Functional/min_max_dir/min_max_dir.json b/framework/resources/Functional/min_max_dir/min_max_dir.json index 8c35483a3..f9b902048 100644 --- a/framework/resources/Functional/min_max_dir/min_max_dir.json +++ b/framework/resources/Functional/min_max_dir/min_max_dir.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/misc/misc.json b/framework/resources/Functional/misc/misc.json index 5aebc45be..1b6528dcb 100644 --- a/framework/resources/Functional/misc/misc.json +++ b/framework/resources/Functional/misc/misc.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/morefiles/morefiles.json b/framework/resources/Functional/morefiles/morefiles.json index e4d099c9e..1d0d94b15 100644 --- a/framework/resources/Functional/morefiles/morefiles.json +++ b/framework/resources/Functional/morefiles/morefiles.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.morefiles", + "schema": "dfs_test.morefiles", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/orderby/orderby.json b/framework/resources/Functional/orderby/orderby.json index f503f74b4..32849429c 100644 --- a/framework/resources/Functional/orderby/orderby.json +++ b/framework/resources/Functional/orderby/orderby.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirNumerical", + "schema": "dfs_test.drillTestDirNumerical", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/p1tests/p1tests.json b/framework/resources/Functional/p1tests/p1tests.json index 99065f64a..bd5f0c31f 100644 --- a/framework/resources/Functional/p1tests/p1tests.json +++ b/framework/resources/Functional/p1tests/p1tests.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/complex_reader/parquetComplexTest.json b/framework/resources/Functional/parquet_storage/complex_reader/parquetComplexTest.json index e6d1adba4..403280f7b 100644 --- a/framework/resources/Functional/parquet_storage/complex_reader/parquetComplexTest.json +++ b/framework/resources/Functional/parquet_storage/complex_reader/parquetComplexTest.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/negative/negative.json b/framework/resources/Functional/parquet_storage/negative/negative.json index a4067b868..833b8752c 100644 --- a/framework/resources/Functional/parquet_storage/negative/negative.json +++ b/framework/resources/Functional/parquet_storage/negative/negative.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_autoPrtn/autoPrtnPrqNstd.json b/framework/resources/Functional/parquet_storage/parquet_autoPrtn/autoPrtnPrqNstd.json index feb3164ab..b7560bcbc 100644 --- a/framework/resources/Functional/parquet_storage/parquet_autoPrtn/autoPrtnPrqNstd.json +++ b/framework/resources/Functional/parquet_storage/parquet_autoPrtn/autoPrtnPrqNstd.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/drill4996.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/drill4996.q index b39ef263d..24fd342bd 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/drill4996.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/drill4996.q @@ -1 +1 @@ -select i_rec_start_date, i_size from dfs.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` group by i_rec_start_date, i_size; +select i_rec_start_date, i_size from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` group by i_rec_start_date, i_size; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json index 3f3da0cb3..a4bc15124 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/parquet_date.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q1.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q1.q index ec633a69f..27da11ace 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q1.q @@ -1 +1 @@ -select distinct i_size from dfs.`/drill/testdata/parquet_date/auto_partition/item_single_1.2` where i_rec_start_date = date '2000-10-27'; +select distinct i_size from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_single_1.2` where i_rec_start_date = date '2000-10-27'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q2.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q2.q index 3a55d7791..1d7532cad 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q2.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` where i_rec_start_date = '2000-10-27' and i_size='N/A' and i_current_price = 7.11; +select * from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` where i_rec_start_date = '2000-10-27' and i_size='N/A' and i_current_price = 7.11; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q3.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q3.q index 5c294f0e7..341e9bbd8 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q3.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.2' and l_moddate = date '1996-03-01'; +select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.2' and l_moddate = date '1996-03-01'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q4.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q4.q index e49981597..bca5f8712 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q4.q @@ -1 +1 @@ -select l_shipdate from dfs.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.6' and l_moddate = date '1996-03-01'; +select l_shipdate from dfs_test.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.6' and l_moddate = date '1996-03-01'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q5.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q5.q index 26ea55539..21d1064f0 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q5.q @@ -1 +1 @@ -select distinct i_rec_end_date from dfs.`/drill/testdata/parquet_date/auto_partition/item_single` where dir0='1.9' and i_rec_start_date = date '1997-10-27'; +select distinct i_rec_end_date from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_single` where dir0='1.9' and i_rec_start_date = date '1997-10-27'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q6.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q6.q index 647721423..ca632533e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q6.q @@ -1 +1 @@ -select i_item_sk, i_rec_start_date, i_rec_end_date from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where (dir0='1.9' or dir0='1.2') and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' order by i_rec_start_date, i_item_sk limit 50; +select i_item_sk, i_rec_start_date, i_rec_end_date from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where (dir0='1.9' or dir0='1.2') and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' order by i_rec_start_date, i_item_sk limit 50; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q7.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q7.q index c013cdc9f..44f487dc8 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/data/q7.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.9' and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' union all select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.2' and i_rec_start_date = date '1999-10-28' and i_rec_end_date = date '2001-10-26'; +select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.9' and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' union all select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.2' and i_rec_start_date = date '1999-10-28' and i_rec_end_date = date '2001-10-26'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json index fb809434d..a531edbe5 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/parquet_date.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q1.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q1.q index 56e174308..19eeab9eb 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q1.q @@ -1 +1 @@ -explain plan for select distinct i_size from dfs.`/drill/testdata/parquet_date/auto_partition/item_single_1.2` where i_rec_start_date = date '2000-10-27'; +explain plan for select distinct i_size from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_single_1.2` where i_rec_start_date = date '2000-10-27'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q2.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q2.q index 36fdb63c3..0b746460d 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q2.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` where i_rec_start_date = '2000-10-27' and i_size='N/A' and i_current_price = 7.11; +explain plan for select * from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multipart_autorefresh` where i_rec_start_date = '2000-10-27' and i_size='N/A' and i_current_price = 7.11; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q3.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q3.q index 9e3bdcf3c..1db17eaa4 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q3.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.2' and l_moddate = date '1996-03-01'; +explain plan for select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.2' and l_moddate = date '1996-03-01'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q4.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q4.q index d11c10d42..033161f06 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q4.q @@ -1 +1 @@ -explain plan for select l_shipdate from dfs.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.6' and l_moddate = date '1996-03-01'; +explain plan for select l_shipdate from dfs_test.`/drill/testdata/parquet_date/auto_partition/lineitem_single` where dir0='1.6' and l_moddate = date '1996-03-01'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q5.q.drill4999 b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q5.q.drill4999 index 373c4142b..b4a21ad05 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q5.q.drill4999 +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q5.q.drill4999 @@ -1 +1 @@ -explain plan for select distinct i_rec_end_date from dfs.`/drill/testdata/parquet_date/auto_partition/item_single` where dir0='1.9' and i_rec_start_date = date '1997-10-27'; +explain plan for select distinct i_rec_end_date from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_single` where dir0='1.9' and i_rec_start_date = date '1997-10-27'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q6.q b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q6.q index 23d964b53..c5adecfdf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q6.q @@ -1 +1 @@ -explain plan for select i_item_sk, i_rec_start_date, i_rec_end_date from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where (dir0='1.9' or dir0='1.2') and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' order by i_rec_start_date, i_item_sk limit 50; +explain plan for select i_item_sk, i_rec_start_date, i_rec_end_date from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where (dir0='1.9' or dir0='1.2') and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' order by i_rec_start_date, i_item_sk limit 50; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q7.q.drill4999 b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q7.q.drill4999 index 4f2da697b..856c419be 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q7.q.drill4999 +++ b/framework/resources/Functional/parquet_storage/parquet_date/auto_partition/plan/q7.q.drill4999 @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.9' and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' union all select count(*) from dfs.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.2' and i_rec_start_date = date '1999-10-28' and i_rec_end_date = date '2001-10-26'; +explain plan for select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.9' and i_rec_start_date = date '1997-10-27' and i_rec_end_date = date '2000-10-26' union all select count(*) from dfs_test.`/drill/testdata/parquet_date/auto_partition/item_multidate` where dir0='1.2' and i_rec_start_date = date '1999-10-28' and i_rec_end_date = date '2001-10-26'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/drill5004.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/drill5004.q index 49dd94e7d..e155bd03a 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/drill5004.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/drill5004.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_date/DRILL-5004.parquet`; +select * from dfs_test.`/drill/testdata/parquet_date/DRILL-5004.parquet`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/mixed1.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/mixed1.q index 2a6a43131..8c61ee653 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/mixed1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/mixed1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_date/mixed1`; +select * from dfs_test.`/drill/testdata/parquet_date/mixed1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/generic/parquet_date.json index ccfca8ff4..8699d0b72 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/parquet_date.json +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/parquet_date.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q1.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q1.q index 89564480c..24334670b 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_date/lineitem_dates`; +select * from dfs_test.`/drill/testdata/parquet_date/lineitem_dates`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q2.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q2.q index 89994b820..d5bb1f82b 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q2.q @@ -1 +1 @@ -select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs.`/drill/testdata/parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; +select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs_test.`/drill/testdata/parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q3.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q3.q index d479f759e..991a60965 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q3.q @@ -1 +1 @@ -select d.EXPR$0 col1_date, d.l_commitdate, d.l_receiptdate from dfs.`/drill/testdata/parquet_date/lineitem_dates` d inner join cp.`tpch/lineitem.parquet` cpl on d.EXPR$0 = cpl.l_shipdate where d.EXPR$0 > date '1998-11-10'; +select d.EXPR$0 col1_date, d.l_commitdate, d.l_receiptdate from dfs_test.`/drill/testdata/parquet_date/lineitem_dates` d inner join cp.`tpch/lineitem.parquet` cpl on d.EXPR$0 = cpl.l_shipdate where d.EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q4.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q4.q index 47adf41d6..009536a53 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q4.q @@ -1 +1 @@ -select d.EXPR$0 col1_date from dfs.`/drill/testdata/parquet_date/lineitem_dates` d where d.EXPR$0 > date '1998-11-10' union select cpl.l_shipdate from cp.`tpch/lineitem.parquet` cpl where cpl.l_shipdate > date '1998-11-09'; +select d.EXPR$0 col1_date from dfs_test.`/drill/testdata/parquet_date/lineitem_dates` d where d.EXPR$0 > date '1998-11-10' union select cpl.l_shipdate from cp.`tpch/lineitem.parquet` cpl where cpl.l_shipdate > date '1998-11-09'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q5.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q5.q index a821033d4..b28e63652 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q5.q @@ -1,2 +1,2 @@ -select l_shipdate from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` order by l_linenumber limit 100; +select l_shipdate from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` order by l_linenumber limit 100; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q6.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q6.q index 89994b820..d5bb1f82b 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q6.q @@ -1 +1 @@ -select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs.`/drill/testdata/parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; +select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs_test.`/drill/testdata/parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q7.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q7.q index 586980b9f..7072d639f 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q7.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen1`; +select * from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/q8.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/q8.q index 26c306c4b..3a8992a55 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/q8.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/q8.q @@ -1 +1 @@ -select l_extendedprice, l_shipdate, l_commitdate from dfs.`/drill/testdata/parquet_date/fixeddate_lineitem` order by l_extendedprice limit 10; +select l_extendedprice, l_shipdate, l_commitdate from dfs_test.`/drill/testdata/parquet_date/fixeddate_lineitem` order by l_extendedprice limit 10; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark1.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark1.q index 145ebc564..4318235f1 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark1.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/parquet_date/spark_generated/d1`; +select a,b,c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark2.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark2.q index cd5a3b7c2..77604a904 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark2.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/parquet_date/spark_generated/d2`; +select a,b,c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d2`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark3.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark3.q index 176b8a36c..5936868ad 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark3.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/parquet_date/spark_generated/d3`; +select a,b,c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d3`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark4.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark4.q index c925e6caf..63feeee86 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark4.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/parquet_date/spark_generated/d4`; +select a,b,c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark5.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark5.q index 1f94ffb8b..ea7c4016f 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark5.q @@ -1 +1 @@ -select distinct c from dfs.`/drill/testdata/parquet_date/spark_generated/d4`; +select distinct c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark6.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark6.q index 4134af0a1..a8908e065 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark6.q @@ -1 +1 @@ -select distinct c from dfs.`/drill/testdata/parquet_date/spark_generated/d4` order by c nulls first limit 10; +select distinct c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` order by c nulls first limit 10; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark7.q b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark7.q index 389efe129..10fcb1355 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/generic/spark7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/generic/spark7.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c > date '2016-10-1' and c is not null;; +select a,b,c from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c > date '2016-10-1' and c is not null;; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q15.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q15.q index db84e1648..60562e73e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q15.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q15.q @@ -1 +1 @@ -select x from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t14`; +select x from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/ctas_t14`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q16.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q16.q index 2677b1efb..2af70dfa3 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q16.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q16.q @@ -1 +1 @@ -select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null` order by date_col; +select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null` order by date_col; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q17.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q17.q index d94d722f5..d61669e76 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q17.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q17.q @@ -1 +1 @@ -select min(date_col) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null_large`; +select min(date_col) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/fewtypes_null_large`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q18.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q18.q index 2c2198ee5..eccf81247 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q18.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q18.q @@ -1 +1 @@ -select min(date_col) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large`; +select min(date_col) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q19.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q19.q index 92b790044..6a108ad70 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q19.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q19.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is null and varchar_col is not null; +select count(*) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is null and varchar_col is not null; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q20.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q20.q index bfc46f700..5bcd24b18 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q20.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q20.q @@ -1,8 +1,8 @@ -select distinct dt from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null +select distinct dt from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/fixeddate_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/fixeddate_lineitem` ) data order by dt asc limit 1000; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q21.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q21.q index 07b45bd17..06808e5a9 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q21.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q21.q @@ -1,3 +1,3 @@ -select count(distinct dt) from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null +select count(distinct dt) from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null) data +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null) data diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q22.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q22.q index e0da1f0d1..f2b9540e7 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q22.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q22.q @@ -1,8 +1,8 @@ -select distinct dt from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null +select distinct dt from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/fixeddate_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/fixeddate_lineitem` ) data order by dt desc limit 1000; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q23.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q23.q index 8b9640465..04e3abfa9 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q23.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q23.q @@ -1 +1 @@ -select distinct l_shipdate from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/pre1.0lineitem_1.2mc`; +select distinct l_shipdate from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2_autogen/pre1.0lineitem_1.2mc`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q24.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q24.q index 24c0fe95c..a62a15baf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q24.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.2gen/q24.q @@ -1 +1 @@ -select distinct l_shipdate from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/pre1.0lineitem_1.2mc`; +select distinct l_shipdate from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.2/pre1.0lineitem_1.2mc`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q15.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q15.q index 68df4649f..78a62dd7e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q15.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q15.q @@ -1 +1 @@ -select x from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t14`; +select x from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/ctas_t14`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q16.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q16.q index 8ae826fae..87788017a 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q16.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q16.q @@ -1 +1 @@ -select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null` order by date_col; +select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null` order by date_col; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q17.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q17.q index 9d4198eff..0bd21921f 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q17.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q17.q @@ -1 +1 @@ -select min(date_col) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null_large`; +select min(date_col) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/fewtypes_null_large`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q18.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q18.q index 2866202a5..f9e61d64c 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q18.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q18.q @@ -1 +1 @@ -select min(date_col) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large`; +select min(date_col) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q19.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q19.q index 08bfb7a2f..872bc3b9e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q19.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q19.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is null and varchar_col is not null; +select count(*) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is null and varchar_col is not null; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q20.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q20.q index 70eaf4783..8fe4a1b94 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q20.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q20.q @@ -1,8 +1,8 @@ -select distinct dt from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null +select distinct dt from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/fixeddate_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/fixeddate_lineitem` ) data order by dt asc limit 1000; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q21.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q21.q index 6accc8f43..27cb36472 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q21.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q21.q @@ -1,3 +1,3 @@ -select count(distinct dt) from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null +select count(distinct dt) from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null) data +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null) data diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q22.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q22.q index 80bfad426..b105a81fa 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q22.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q22.q @@ -1,8 +1,8 @@ -select distinct dt from (select date_col dt from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null +select distinct dt from (select date_col dt from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/fewtypes_null_large` where date_col is not null union all -select c dt from dfs.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null +select c dt from dfs_test.`/drill/testdata/parquet_date/spark_generated/d4` where c is not null union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/dates_nodrillversion/drillgen2_lineitem` union all -select l_shipdate dt from dfs.`/drill/testdata/parquet_date/fixeddate_lineitem` +select l_shipdate dt from dfs_test.`/drill/testdata/parquet_date/fixeddate_lineitem` ) data order by dt desc limit 1000; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q23.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q23.q index bf4ccd091..c98cec83c 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q23.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q23.q @@ -1 +1 @@ -select distinct l_shipdate from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/pre1.0lineitem_1.6mc`; +select distinct l_shipdate from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6_autogen/pre1.0lineitem_1.6mc`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q24.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q24.q index 3236d0ce9..197b073df 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q24.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/drill1.6gen/mc_q24.q @@ -1 +1 @@ -select distinct l_shipdate from dfs.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/pre1.0lineitem_1.6mc`; +select distinct l_shipdate from dfs_test.`/drill/testdata/parquet_date/metadata_cache/metadata_cache1.6/pre1.0lineitem_1.6mc`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1.q index 460541856..0045803e8 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/mc_parquet_date/mixed1`; +select * from dfs_test.`/drill/testdata/mc_parquet_date/mixed1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned1.q.drill5002 b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned1.q.drill5002 index 59db3c24c..5928f68e7 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned1.q.drill5002 +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned1.q.drill5002 @@ -1 +1 @@ -select l_shipdate, `month`(l_shipdate) from dfs.`/drill/testdata/mc_parquet_date/mixed1_partitioned`; +select l_shipdate, `month`(l_shipdate) from dfs_test.`/drill/testdata/mc_parquet_date/mixed1_partitioned`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned2.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned2.q index 08e64b685..f401d831d 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned2.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where date_col = date '1997-03-07'; +select count(*) from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where date_col = date '1997-03-07'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned3.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned3.q index 44e6a03fc..32e2870f7 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned3.q @@ -1 +1 @@ -select distinct int_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where date_col = date '1997-03-07'; +select distinct int_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where date_col = date '1997-03-07'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned4.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned4.q index 22a5f7e9d..de67530b2 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned4.q @@ -1,5 +1,5 @@ select count(distinct date_col) from ( - select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.2 + select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.2 union - select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.9 + select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.9 ); diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned5.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned5.q index 25fe645d4..1f76146ec 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned5.q @@ -1 +1 @@ -select a.int_col, b.date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` a inner join ( select date_col, int_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0 = '1.2' and date_col > '1996-03-07' ) b on cast(a.date_col as date)= date_add(b.date_col, 5) where a.int_col = 7 and a.dir0='1.9' group by a.int_col, b.date_col; +select a.int_col, b.date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` a inner join ( select date_col, int_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0 = '1.2' and date_col > '1996-03-07' ) b on cast(a.date_col as date)= date_add(b.date_col, 5) where a.int_col = 7 and a.dir0='1.9' group by a.int_col, b.date_col; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned6.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned6.q index b495971c5..d8b9cda9b 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned6.q @@ -1 +1 @@ -select l_shipdate from dfs.`/drill/testdata/mc_parquet_date/mixed1_partitioned/1992`; +select l_shipdate from dfs_test.`/drill/testdata/mc_parquet_date/mixed1_partitioned/1992`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned7.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned7.q index 4d523ccf2..9ad345d88 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned7.q @@ -1 +1 @@ -select l_shipdate from dfs.`/drill/testdata/mc_parquet_date/mixed1_partitioned/1991`; +select l_shipdate from dfs_test.`/drill/testdata/mc_parquet_date/mixed1_partitioned/1991`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned8.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned8.q index 5bc10d277..84ffcc9a1 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned8.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/mixed1_partitioned8.q @@ -1,7 +1,7 @@ select date_col from ( - select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.2 and int_col is null group by date_col + select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.2 and int_col is null group by date_col union - select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.9 and big_int_col is null and float_col is null group by date_col + select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.9 and big_int_col is null and float_col is null group by date_col union - select date_col from dfs.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.6 and big_int_col is null and float_col is null group by date_col + select date_col from dfs_test.`/drill/testdata/parquet_date/metadata_cache/mixed/fewtypes_null_large` where dir0=1.6 and big_int_col is null and float_col is null group by date_col ); diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q1.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q1.q index adbef7b42..6f6e25422 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/mc_parquet_date/lineitem_dates`; +select * from dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q2.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q2.q index 837898dc0..33bfff7bf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q2.q @@ -1 +1 @@ -select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs.`/drill/testdata/mc_parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; +select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q3.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q3.q index 0bf18c988..6bd64aaae 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q3.q @@ -1 +1 @@ -select d.EXPR$0 col1_date, d.l_commitdate, d.l_receiptdate from dfs.`/drill/testdata/mc_parquet_date/lineitem_dates` d inner join cp.`tpch/lineitem.parquet` cpl on d.EXPR$0 = cpl.l_shipdate where d.EXPR$0 > date '1998-11-10'; +select d.EXPR$0 col1_date, d.l_commitdate, d.l_receiptdate from dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates` d inner join cp.`tpch/lineitem.parquet` cpl on d.EXPR$0 = cpl.l_shipdate where d.EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q4.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q4.q index 19ed1a31b..761a22df8 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q4.q @@ -1 +1 @@ -select d.EXPR$0 col1_date from dfs.`/drill/testdata/mc_parquet_date/lineitem_dates` d where d.EXPR$0 > date '1998-11-10' union select cpl.l_shipdate from cp.`tpch/lineitem.parquet` cpl where cpl.l_shipdate > date '1998-11-09'; +select d.EXPR$0 col1_date from dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates` d where d.EXPR$0 > date '1998-11-10' union select cpl.l_shipdate from cp.`tpch/lineitem.parquet` cpl where cpl.l_shipdate > date '1998-11-09'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q5.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q5.q index 0feabac10..638681ecf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q5.q @@ -1 +1 @@ -select l_shipdate from dfs.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen2_lineitem` order by l_linenumber limit 100; +select l_shipdate from dfs_test.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen2_lineitem` order by l_linenumber limit 100; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q6.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q6.q index 837898dc0..33bfff7bf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q6.q @@ -1 +1 @@ -select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs.`/drill/testdata/mc_parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; +select EXPR$0, l_commitdate+1, date_add(l_receiptdate, 5) from dfs_test.`/drill/testdata/mc_parquet_date/lineitem_dates` where EXPR$0 > date '1998-11-10'; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q7.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q7.q index 23d3537b2..1a967d5e0 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q7.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen1`; +select * from dfs_test.`/drill/testdata/mc_parquet_date/dates_nodrillversion/drillgen1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q8.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q8.q index 875c5da96..c26fca4d0 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q8.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/q8.q @@ -1 +1 @@ -select l_extendedprice, l_shipdate, l_commitdate from dfs.`/drill/testdata/mc_parquet_date/fixeddate_lineitem` order by l_extendedprice limit 10; +select l_extendedprice, l_shipdate, l_commitdate from dfs_test.`/drill/testdata/mc_parquet_date/fixeddate_lineitem` order by l_extendedprice limit 10; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark1.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark1.q index bc9080667..b1cfe2944 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark1.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark1.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d1`; +select a,b,c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d1`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark2.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark2.q index 6ecc82f16..999422fe9 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark2.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark2.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d2`; +select a,b,c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d2`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark3.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark3.q index 1b254a5b7..afb2d0dbf 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark3.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark3.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d3`; +select a,b,c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d3`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark4.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark4.q index a62ef4c14..3caac2d79 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark4.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark4.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d4`; +select a,b,c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d4`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark5.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark5.q index c3b2615f7..263676618 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark5.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark5.q @@ -1 +1 @@ -select distinct c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d4`; +select distinct c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d4`; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark6.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark6.q index 6edbb16ca..df339084e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark6.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark6.q @@ -1 +1 @@ -select distinct c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d4` order by c nulls first limit 10; +select distinct c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d4` order by c nulls first limit 10; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark7.q b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark7.q index e7dc33116..0431c454b 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark7.q +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/generic/spark7.q @@ -1 +1 @@ -select a,b,c from dfs.`/drill/testdata/mc_parquet_date/spark_generated/d4` where c > date '2016-10-1' and c is not null; +select a,b,c from dfs_test.`/drill/testdata/mc_parquet_date/spark_generated/d4` where c > date '2016-10-1' and c is not null; diff --git a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json index e98a5ebac..0af2545e6 100644 --- a/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json +++ b/framework/resources/Functional/parquet_storage/parquet_date/mc_parquet_date/parquet_date.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/DRILL-4759.q b/framework/resources/Functional/parquet_storage/parquet_generic/DRILL-4759.q index 6b9a22c09..f041f6b05 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/DRILL-4759.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/DRILL-4759.q @@ -1 +1 @@ -select sum(ts), sum(dr), count(ui), sum(up) from dfs.`/drill/testdata/parquet_storage/DRILL-4759.gz.parquet` +select sum(ts), sum(dr), count(ui), sum(up) from dfs_test.`/drill/testdata/parquet_storage/DRILL-4759.gz.parquet` diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4048_1.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4048_1.q index 2efcdc5d5..0dc6257c9 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4048_1.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4048_1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/lineitem_dic_enc.parquet` limit 1; +select * from dfs_test.`/drill/testdata/parquet_storage/lineitem_dic_enc.parquet` limit 1; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4349.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4349.q index cf39715ce..1ec1f3a2e 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4349.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4349.q @@ -1 +1 @@ -SELECT * FROM dfs.`/drill/testdata/parquet_storage/DRILL_4349.parquet` WHERE id>=50000 ORDER BY id LIMIT 10; +SELECT * FROM dfs_test.`/drill/testdata/parquet_storage/DRILL_4349.parquet` WHERE id>=50000 ORDER BY id LIMIT 10; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_1.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_1.q index 36c52055c..2fb53ffc0 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_1.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_1.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/drill4764/int_8`; +select * from dfs_test.`/drill/testdata/parquet_storage/drill4764/int_8`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_2.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_2.q index 6008111af..e5b314081 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_2.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_2.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/drill4764/int_16`; +select * from dfs_test.`/drill/testdata/parquet_storage/drill4764/int_16`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_3.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_3.q index 4d5a4b6f4..3695e88c4 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_3.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_3.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/drill4764/uint_8`; +select * from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_8`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_4.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_4.q index b22328cad..5c7e43900 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_4.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_4.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/drill4764/uint_16`; +select * from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_16`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_5.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_5.q index 3794686f2..294c5d684 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_5.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_5.q @@ -1 +1 @@ -select * from dfs.`/drill/testdata/parquet_storage/drill4764/uint_32`; +select * from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_32`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_6.q b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_6.q index 264378949..4dab8efea 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_6.q +++ b/framework/resources/Functional/parquet_storage/parquet_generic/drill4764_6.q @@ -1,5 +1,5 @@ -select index, `value` from dfs.`/drill/testdata/parquet_storage/drill4764/int_8` union -select index, `value` from dfs.`/drill/testdata/parquet_storage/drill4764/int_16` union -select index, `value` from dfs.`/drill/testdata/parquet_storage/drill4764/uint_8` union -select index, `value` from dfs.`/drill/testdata/parquet_storage/drill4764/uint_16` union -select index, `value` from dfs.`/drill/testdata/parquet_storage/drill4764/uint_32`; +select index, `value` from dfs_test.`/drill/testdata/parquet_storage/drill4764/int_8` union +select index, `value` from dfs_test.`/drill/testdata/parquet_storage/drill4764/int_16` union +select index, `value` from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_8` union +select index, `value` from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_16` union +select index, `value` from dfs_test.`/drill/testdata/parquet_storage/drill4764/uint_32`; diff --git a/framework/resources/Functional/parquet_storage/parquet_generic/parquetReadGroup.json b/framework/resources/Functional/parquet_storage/parquet_generic/parquetReadGroup.json index 0c7a7a809..9899a7503 100644 --- a/framework/resources/Functional/parquet_storage/parquet_generic/parquetReadGroup.json +++ b/framework/resources/Functional/parquet_storage/parquet_generic/parquetReadGroup.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/parquet_storage/parquet_in_nested_dir/parquetInNestedDir.json b/framework/resources/Functional/parquet_storage/parquet_in_nested_dir/parquetInNestedDir.json index 49a282962..364887289 100644 --- a/framework/resources/Functional/parquet_storage/parquet_in_nested_dir/parquetInNestedDir.json +++ b/framework/resources/Functional/parquet_storage/parquet_in_nested_dir/parquetInNestedDir.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/csv/data/drill4071.q b/framework/resources/Functional/partition_pruning/dfs/csv/data/drill4071.q index aff7b9269..de9cd2d5f 100644 --- a/framework/resources/Functional/partition_pruning/dfs/csv/data/drill4071.q +++ b/framework/resources/Functional/partition_pruning/dfs/csv/data/drill4071.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0='1997' and coalesce(columns[14], 'TRUCK') = 'TRUCK'; +select count(*) from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0='1997' and coalesce(columns[14], 'TRUCK') = 'TRUCK'; diff --git a/framework/resources/Functional/partition_pruning/dfs/csv/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/csv/data/partitionDirectory.json index 642ad1ec6..3e7921a52 100644 --- a/framework/resources/Functional/partition_pruning/dfs/csv/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/csv/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/csv/plan/drill4071.q b/framework/resources/Functional/partition_pruning/dfs/csv/plan/drill4071.q index ba067f930..c96d27978 100644 --- a/framework/resources/Functional/partition_pruning/dfs/csv/plan/drill4071.q +++ b/framework/resources/Functional/partition_pruning/dfs/csv/plan/drill4071.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0='1997' and coalesce(l_shipmode, 'TRUCK') = 'TRUCK'; +explain plan for select count(*) from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0='1997' and coalesce(l_shipmode, 'TRUCK') = 'TRUCK'; diff --git a/framework/resources/Functional/partition_pruning/dfs/csv/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/csv/plan/partitionDirectory.json index 5d7014d15..57169b1e1 100644 --- a/framework/resources/Functional/partition_pruning/dfs/csv/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/csv/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4071.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4071.q index 8651e068e..62f205147 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4071.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4071.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1997 and coalesce(columns[14], 'TRUCK') = 'TRUCK'; +select count(*) from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1997 and coalesce(columns[14], 'TRUCK') = 'TRUCK'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_0.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_0.q index 2fecf9e48..541b85b09 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_0.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_0.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[0] like '2960%'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[0] like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_1.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_1.q index fb176cee7..4fbb05fc6 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_1.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and dir1='nov'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_2.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_2.q index cef33eba1..3d8227f07 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_2.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '2960%' and dir1='nov'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '2960%' and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_3.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_3.q index bf22ba096..fd2a62512 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_3.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '%' and dir1='nov'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '%' and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_4.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_4.q index f78e42269..e0e837439 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_4.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_4.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and (case when columns[0] like 29600 then 29600 else 0 end) = 29600 and dir1='nov' and columns[13] like '%ERS%'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and (case when columns[0] like 29600 then 29600 else 0 end) = 29600 and dir1='nov' and columns[13] like '%ERS%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_5.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_5.q index 59e901234..559fe554e 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_5.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_5.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_6.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_6.q index b1de80949..dffda8d47 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_6.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_6.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%' and dir0=1993 and dir1='nov'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%' and dir0=1993 and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_7.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_7.q index 369bebf78..727d71107 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_7.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/drill4665_7.q @@ -1 +1 @@ -select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and dir0=1993 and dir1='nov' and abs(columns[0]+1) like '2960%'; +select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and dir0=1993 and dir1='nov' and abs(columns[0]+1) like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/partitionDirectory.json index bcb6ec826..87698170f 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4071.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4071.q index 15498242a..f963f9dd5 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4071.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4071.q @@ -1 +1 @@ -explain plan for select count(*) from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1997 and coalesce(columns[14], 'TRUCK') = 'TRUCK'; +explain plan for select count(*) from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1997 and coalesce(columns[14], 'TRUCK') = 'TRUCK'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_0.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_0.q index aec9c8918..12dcddf0f 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_0.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_0.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[0] like '2960%'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[0] like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_1.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_1.q index 9749ae1c4..fefb7fa65 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_1.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and dir1='nov'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_2.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_2.q index e697aa674..7cf6a18a6 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_2.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '2960%' and dir1='nov'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '2960%' and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_3.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_3.q index 59b0a178a..10103677b 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_3.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '%' and dir1='nov'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and cast(columns[0] as int) like 29600 and columns[0] like '%' and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_4.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_4.q index 8ef807c61..1fb7a1b32 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_4.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_4.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and (case when columns[0] like 29600 then 29600 else 0 end) = 29600 and dir1='nov' and columns[13] like '%ERS%'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and (case when columns[0] like 29600 then 29600 else 0 end) = 29600 and dir1='nov' and columns[13] like '%ERS%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_5.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_5.q index 89fd2d14c..e062f9377 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_5.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_5.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where dir0=1993 and dir1='nov' and columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_6.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_6.q index 2c8cfec3a..f55834cdf 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_6.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_6.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%' and dir0=1993 and dir1='nov'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and abs(columns[0]+1) like '2960%' and dir0=1993 and dir1='nov'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_7.q b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_7.q index 53b0706ba..52fdf277b 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_7.q +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/drill4665_7.q @@ -1 +1 @@ -explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and dir0=1993 and dir1='nov' and abs(columns[0]+1) like '2960%'; +explain plan for select columns[0], columns[1], columns[4], columns[10], columns[13] from dfs_test.`/drill/testdata/partition_pruning/hive/text/lineitem_hierarchical_intstring` where columns[13] like '%ERS%' and dir0=1993 and dir1='nov' and abs(columns[0]+1) like '2960%'; diff --git a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/partitionDirectory.json index 57411533e..93a1e1f7b 100644 --- a/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/hierarchical/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_1.q b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_1.q index 6cfd6791e..76fb2956a 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_1.q @@ -1 +1 @@ -select id, dir0 from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0='a'; +select id, dir0 from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0='a'; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_2.q b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_2.q index 5ffcaebc7..39b07d16d 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_2.q @@ -1 +1 @@ -select id from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null; +select id from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_3.q b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_3.q index 9d5190914..9cd4543b4 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/data/drill4250_3.q @@ -1 +1 @@ -select id, dir0 from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null or dir0 = 'b'; +select id, dir0 from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null or dir0 = 'b'; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/json/data/partitionDirectory.json index 986b7a0e9..7545c903c 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/json/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_1.q b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_1.q index ff10ec707..9fd164e00 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_1.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0='a'; +explain plan for select * from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0='a'; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_2.q b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_2.q index 93bf0593b..190d19092 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_2.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null; +explain plan for select * from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_3.q b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_3.q index 3729a61d7..f701bbdde 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/json/plan/drill4250_3.q @@ -1 +1 @@ -explain plan for select * from dfs.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null or dir0 = 'b'; +explain plan for select * from dfs_test.`/drill/testdata/partition_pruning/dfs/drill4250_1` where dir0 is null or dir0 = 'b'; diff --git a/framework/resources/Functional/partition_pruning/dfs/json/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/json/plan/partitionDirectory.json index 0a85b4029..3b12910fb 100644 --- a/framework/resources/Functional/partition_pruning/dfs/json/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/json/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/mixed/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/mixed/data/partitionDirectory.json index f516c5b54..1001e45fe 100644 --- a/framework/resources/Functional/partition_pruning/dfs/mixed/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/mixed/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/count.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/count.q index 4389a43c1..06c484a28 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/count.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/count.q @@ -1 +1 @@ -select count(*) from dfs.`/drill/testdata/partition_pruning/dfs/orders` where dir0=1993; +select count(*) from dfs_test.`/drill/testdata/partition_pruning/dfs/orders` where dir0=1993; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_1.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_1.q index 9fadc051d..c1ac78d83 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_1.q @@ -1,4 +1,4 @@ select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_10.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_10.q index ad96a0f05..ff4d80796 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_10.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_10.q @@ -1,5 +1,5 @@ select count (*) from - (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1='one' and dir2 = '2015-7-12') t1 + (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1='one' and dir2 = '2015-7-12') t1 inner join - (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1 = 'two' and dir2 = '2015-8-12') t2 + (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1 = 'two' and dir2 = '2015-8-12') t2 on t1.l_orderkey = t2.l_orderkey diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_11.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_11.q index f73eaf20a..f3304efd5 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_11.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_11.q @@ -1,7 +1,7 @@ select count (*) from ( - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t2.l_discount = t1.l_discount + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t2.l_discount = t1.l_discount ) ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_2.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_2.q index 1b5b8f935..62dbb06b3 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_2.q @@ -1,4 +1,4 @@ select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_3.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_3.q index 438442ab3..6629c95d4 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_3.q @@ -1,6 +1,6 @@ select count (*) from ( - select t1.l_orderkey, t1.dir0, t1.l_linenumber, t2.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 - join dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey + select t1.l_orderkey, t1.dir0, t1.l_linenumber, t2.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 + join dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_4.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_4.q index ccdae0ed3..bc1080108 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_4.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_4.q @@ -1,4 +1,4 @@ select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level/1/two/2015-8-12` t2 ) data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level/1/two/2015-8-12` t2 ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_5.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_5.q index 07e402dfb..0b0774daa 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_5.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_5.q @@ -1,4 +1,4 @@ select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir1='two' ) data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir1='two' ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_6.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_6.q index c9abf11b8..085e5c575 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_6.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_6.q @@ -1,7 +1,7 @@ select count (*) from ( - select t1.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 join - dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 + dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t2.dir1='one' and t2.dir2 = '2015-7-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_7.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_7.q index 48a2fe668..3fbd38ac4 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_7.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_7.q @@ -1,7 +1,7 @@ select count (*) from ( - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' ) ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_8.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_8.q index 1dbec135d..88f7d90da 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_8.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_8.q @@ -1,8 +1,8 @@ -create or replace view dfs.drillTestDirViews.drill4825_v1_plan as - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 +create or replace view dfs_test.drillTestDirViews.drill4825_v1_plan as + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' ); -select count (*) from dfs.drillTestDirViews.drill4825_v1_plan; -drop view dfs.drillTestDirViews.drill4825_v1_plan; +select count (*) from dfs_test.drillTestDirViews.drill4825_v1_plan; +drop view dfs_test.drillTestDirViews.drill4825_v1_plan; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_9.q b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_9.q index 3ceccf57d..50f35d8ec 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_9.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/drill4825_9.q @@ -1,7 +1,7 @@ select count (*) from - (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level`) t1 + (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level`) t1 inner join - (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level`) t2 + (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level`) t2 on t1.l_orderkey = t2.l_orderkey where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12'; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json index 970c78b49..92a8141da 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/data/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_1.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_1.q index b44828899..bf685c0fd 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_1.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_1.q @@ -1,4 +1,4 @@ explain plan for select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_10.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_10.q index d468f5595..cb0ed4b3a 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_10.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_10.q @@ -1,5 +1,5 @@ explain plan for select count (*) from - (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1='one' and dir2 = '2015-7-12') t1 + (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1='one' and dir2 = '2015-7-12') t1 inner join - (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1 = 'two' and dir2 = '2015-8-12') t2 + (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` where dir0 = 1 and dir1 = 'two' and dir2 = '2015-8-12') t2 on t1.l_orderkey = t2.l_orderkey diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_2.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_2.q index 2de795d3a..557575e95 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_2.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_2.q @@ -1,4 +1,4 @@ explain plan for select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_3.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_3.q index 2aff77eab..a0fd42370 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_3.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_3.q @@ -1,6 +1,6 @@ explain plan for select count (*) from ( - select t1.l_orderkey, t1.dir0, t1.l_linenumber, t2.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 - join dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey + select t1.l_orderkey, t1.dir0, t1.l_linenumber, t2.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 + join dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_4.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_4.q index c9d04e569..4693a9d35 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_4.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_4.q @@ -1,4 +1,4 @@ explain plan for select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level/1/two/2015-8-12` t2 ) data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level/1/two/2015-8-12` t2 ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_5.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_5.q index e2e1ba831..8b9b3b9e4 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_5.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_5.q @@ -1,4 +1,4 @@ explain plan for select count (*) from ( - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' union all - select l_orderkey, dir0 from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir1='two' ) data; + select l_orderkey, dir0 from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir1='two' ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_6.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_6.q index 809502aa1..cc17da1fe 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_6.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_6.q @@ -1,7 +1,7 @@ explain plan for select count (*) from ( - select t1.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 join - dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 + dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 on t1.l_orderkey = t2.l_orderkey where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t2.dir1='one' and t2.dir2 = '2015-7-12') data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_7.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_7.q index bc583073d..4394f739b 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_7.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_7.q @@ -1,7 +1,7 @@ explain plan for select count (*) from ( - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' ) ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_8.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_8.q index 4b1d26334..cac896d0d 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_8.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_8.q @@ -1,8 +1,8 @@ -create or replace view dfs.drillTestDirViews.drill4825_v1 as - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 +create or replace view dfs_test.drillTestDirViews.drill4825_v1 as + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' ); -explain plan for select count (*) from dfs.drillTestDirViews.drill4825_v1; -drop view dfs.drillTestDirViews.drill4825_v1; +explain plan for select count (*) from dfs_test.drillTestDirViews.drill4825_v1; +drop view dfs_test.drillTestDirViews.drill4825_v1; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_9.q b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_9.q index 44fea031d..fe5ca7634 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_9.q +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4825_9.q @@ -1,7 +1,7 @@ explain plan for select count (*) from - (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level`) t1 + (select l_orderkey, dir0, dir1, dir2, l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level`) t1 inner join - (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level`) t2 + (select l_orderkey, dir0, dir1, dir2,l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level`) t2 on t1.l_orderkey = t2.l_orderkey where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12'; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4860.q.fail b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4860.q.fail index 961e9f0bc..dd92fff70 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4860.q.fail +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/drill4860.q.fail @@ -1,7 +1,7 @@ explain plan for select count (*) from ( - select t1.l_orderkey, t1.l_linenumber from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t1 + select t1.l_orderkey, t1.l_linenumber from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t1 where t1.dir0 = 1 and t1.dir1='one' and t1.dir2 = '2015-7-12' and t1.l_orderkey in ( - select t2.l_orderkey from dfs.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t2.l_discount = t1.l_discount + select t2.l_orderkey from dfs_test.`/drill/testdata/partition_pruning/nested/l_3level` t2 where t2.dir0 = 1 and t2.dir1='two' and t2.dir2 = '2015-8-12' and t2.l_discount = t1.l_discount ) ) data; diff --git a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/partitionDirectory.json index 81fac8c6c..09e976a0c 100644 --- a/framework/resources/Functional/partition_pruning/dfs/parquet/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/dfs/parquet/plan/partitionDirectory.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/general/data/general.json b/framework/resources/Functional/partition_pruning/hive/general/data/general.json index a2226c431..ee133deb0 100644 --- a/framework/resources/Functional/partition_pruning/hive/general/data/general.json +++ b/framework/resources/Functional/partition_pruning/hive/general/data/general.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/general/plan/general.json b/framework/resources/Functional/partition_pruning/hive/general/plan/general.json index dc80c1a15..8ab13aea5 100644 --- a/framework/resources/Functional/partition_pruning/hive/general/plan/general.json +++ b/framework/resources/Functional/partition_pruning/hive/general/plan/general.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/data/partitionDirectory.json index a7d814e35..e5070d972 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/plan/partitionDirectory.json index 0be4602b8..ebd5d2af4 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet-native/dynamic_hier_intstring/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet-native/join/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet-native/join/data/partitionDirectory.json index 40152e1c0..d9a4d1a44 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet-native/join/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet-native/join/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet-native/join/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet-native/join/plan/partitionDirectory.json index 7e708fd6f..434f95fcd 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet-native/join/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet-native/join/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet-native/string_partition/data/partitionString.json b/framework/resources/Functional/partition_pruning/hive/parquet-native/string_partition/data/partitionString.json index 0a5c965ba..21f7c047a 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet-native/string_partition/data/partitionString.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet-native/string_partition/data/partitionString.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/date_partition/data/partitionDate.json b/framework/resources/Functional/partition_pruning/hive/parquet/date_partition/data/partitionDate.json index fa4da3505..717d0063d 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/date_partition/data/partitionDate.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/date_partition/data/partitionDate.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intint/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intint/data/partitionDirectory.json index 3e662b7a2..b81f174c2 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intint/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intint/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intstring/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intstring/data/partitionDirectory.json index 19a8f08b1..f2a28d6b2 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intstring/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_hier_intstring/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/data/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/data/partitionInt.json index 7422933fe..30d7d462a 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/data/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/data/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/plan/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/plan/partitionInt.json index 186f57ec1..232bb804e 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/plan/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_int_partition/plan/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_string_partition/data/partitionString.json b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_string_partition/data/partitionString.json index 383c3dc7d..685cf2755 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_string_partition/data/partitionString.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/dynamic_string_partition/data/partitionString.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/hier_intint/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet/hier_intint/data/partitionDirectory.json index 5caa729c9..112c717e3 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/hier_intint/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/hier_intint/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/hier_intstring/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet/hier_intstring/data/partitionDirectory.json index 791f96128..6f5c64ab1 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/hier_intstring/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/hier_intstring/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/data/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/data/partitionInt.json index c1a1f2281..c783ea60c 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/data/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/data/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/plan/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/plan/partitionInt.json index 90af1dce6..60b545151 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/plan/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/int_partition/plan/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/join/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/parquet/join/data/partitionDirectory.json index c903505fb..f47a2f3d4 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/join/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/join/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/parquet/string_partition/data/partitionString.json b/framework/resources/Functional/partition_pruning/hive/parquet/string_partition/data/partitionString.json index 3d6d06e39..513e963bd 100644 --- a/framework/resources/Functional/partition_pruning/hive/parquet/string_partition/data/partitionString.json +++ b/framework/resources/Functional/partition_pruning/hive/parquet/string_partition/data/partitionString.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/date_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/date_partition/data/partitionDirectory.json index 19809a65e..cec42a879 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/date_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/date_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/date_partition/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/date_partition/plan/partitionDirectory.json index cec3cdca9..3b3739a36 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/date_partition/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/date_partition/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/data/partitionDirectory.json index 9479063f2..09dec6af2 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/plan/partitionDirectory.json index 0a2ace42f..83e1585dc 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_date_partition/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intint/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intint/data/partitionDirectory.json index 041e9e69a..56ed2e3ec 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intint/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intint/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intstring/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intstring/data/partitionDirectory.json index a7670f264..ca76045a5 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intstring/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_hier_intstring/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/data/partitionDirectory.json index ce4f7f106..be452ba7d 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/plan/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/plan/partitionInt.json index 970d8c651..ad53a4b95 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/plan/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_int_partition/plan/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/data/partitionDirectory.json index 35f85575c..8c13be8ff 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/plan/partitionDirectory.json index 3dad86205..daa06e990 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/dynamic_string_partition/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/hier_intint/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/hier_intint/data/partitionDirectory.json index 3b21eaf57..21a6643f9 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/hier_intint/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/hier_intint/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json index 7d143cbf0..e087edf70 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/hier_intint/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/data/partitionDirectory.json index 7b27b7318..0f07febef 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/plan/partitionDirectory.json index 11ee07eb9..4d018b95e 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/hier_intstring/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/int_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/int_partition/data/partitionDirectory.json index 3a830d95b..6f66f343d 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/int_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/int_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/int_partition/plan/partitionInt.json b/framework/resources/Functional/partition_pruning/hive/text/int_partition/plan/partitionInt.json index 638cd328d..38cf56d56 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/int_partition/plan/partitionInt.json +++ b/framework/resources/Functional/partition_pruning/hive/text/int_partition/plan/partitionInt.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/join/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/join/data/partitionDirectory.json index 233ce14cf..80fefc73e 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/join/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/join/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/join/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/join/plan/partitionDirectory.json index 59723459b..b892fbae2 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/join/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/join/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/string_partition/data/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/string_partition/data/partitionDirectory.json index 48acbdc32..42ddad462 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/string_partition/data/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/string_partition/data/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/partition_pruning/hive/text/string_partition/plan/partitionDirectory.json b/framework/resources/Functional/partition_pruning/hive/text/string_partition/plan/partitionDirectory.json index 550307de7..0bd7f1495 100644 --- a/framework/resources/Functional/partition_pruning/hive/text/string_partition/plan/partitionDirectory.json +++ b/framework/resources/Functional/partition_pruning/hive/text/string_partition/plan/partitionDirectory.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/schema-changes/drill4032_1.q b/framework/resources/Functional/schema-changes/drill4032_1.q index 668f4c9d2..a76d52bcc 100644 --- a/framework/resources/Functional/schema-changes/drill4032_1.q +++ b/framework/resources/Functional/schema-changes/drill4032_1.q @@ -1 +1 @@ -select d.col2.col4, d.col2.col3 from dfs.`/drill/testdata/schema-changes/drill4032` d; +select d.col2.col4, d.col2.col3 from dfs_test.`/drill/testdata/schema-changes/drill4032` d; diff --git a/framework/resources/Functional/schema-changes/schem-changes.json b/framework/resources/Functional/schema-changes/schem-changes.json index 42febd0bd..1d97f78a0 100644 --- a/framework/resources/Functional/schema-changes/schem-changes.json +++ b/framework/resources/Functional/schema-changes/schem-changes.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirP1", + "schema": "dfs_test.drillTestDirP1", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json b/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json index e443a4f28..2eaafbf54 100644 --- a/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json +++ b/framework/resources/Functional/schema_change_empty_batch/json/empty_batch_json.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.drillTestDirSchemaChangeEmptyBatch", + "schema": "dfs_test.drillTestDirSchemaChangeEmptyBatch", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json b/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json index 1852c8aff..a180210cb 100644 --- a/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json +++ b/framework/resources/Functional/schema_change_empty_batch/text/dfs/empty_batch_text_dfs.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.drillTestDirSchemaChangeEmptyBatch", + "schema": "dfs_test.drillTestDirSchemaChangeEmptyBatch", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/subqueries/bugs/bugs.json b/framework/resources/Functional/subqueries/bugs/bugs.json index 3c7072944..9cb67f9f5 100644 --- a/framework/resources/Functional/subqueries/bugs/bugs.json +++ b/framework/resources/Functional/subqueries/bugs/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/subqueries/exists_not_exists/exists_not_exists.json b/framework/resources/Functional/subqueries/exists_not_exists/exists_not_exists.json index b3531c678..220329d22 100644 --- a/framework/resources/Functional/subqueries/exists_not_exists/exists_not_exists.json +++ b/framework/resources/Functional/subqueries/exists_not_exists/exists_not_exists.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/subqueries/negative/not_supported.json b/framework/resources/Functional/subqueries/negative/not_supported.json index c9d82cdb5..a7856dc70 100644 --- a/framework/resources/Functional/subqueries/negative/not_supported.json +++ b/framework/resources/Functional/subqueries/negative/not_supported.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ "regex" ] diff --git a/framework/resources/Functional/subqueries/not_in/not_in.json b/framework/resources/Functional/subqueries/not_in/not_in.json index 6c9d29b2d..e3451b15b 100644 --- a/framework/resources/Functional/subqueries/not_in/not_in.json +++ b/framework/resources/Functional/subqueries/not_in/not_in.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/subqueries/scalar_aggregate/scalar_aggregate.json b/framework/resources/Functional/subqueries/scalar_aggregate/scalar_aggregate.json index db2d620dc..20104daad 100644 --- a/framework/resources/Functional/subqueries/scalar_aggregate/scalar_aggregate.json +++ b/framework/resources/Functional/subqueries/scalar_aggregate/scalar_aggregate.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/subqueries/with_clause/with_clause.json b/framework/resources/Functional/subqueries/with_clause/with_clause.json index 3abed3d79..bfedde8a3 100644 --- a/framework/resources/Functional/subqueries/with_clause/with_clause.json +++ b/framework/resources/Functional/subqueries/with_clause/with_clause.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/table_function/positive/drill-3149_8.q b/framework/resources/Functional/table_function/positive/drill-3149_8.q index 07bbb8da9..7a85a002d 100644 --- a/framework/resources/Functional/table_function/positive/drill-3149_8.q +++ b/framework/resources/Functional/table_function/positive/drill-3149_8.q @@ -1 +1 @@ -select max(columns[2]),min(columns[1]),sum(cast(columns[0] as int)) from table(`dfs.drillTestDir`.`table_function/cr_only.txt`(type=>'text',lineDelimiter=>'\r',fieldDelimiter=>'\"')); +select max(columns[2]),min(columns[1]),sum(cast(columns[0] as int)) from table(`dfs_test.drillTestDir`.`table_function/cr_only.txt`(type=>'text',lineDelimiter=>'\r',fieldDelimiter=>'\"')); diff --git a/framework/resources/Functional/table_function/positive/table_function.json b/framework/resources/Functional/table_function/positive/table_function.json index 8c323f8fd..73bba3cd1 100644 --- a/framework/resources/Functional/table_function/positive/table_function.json +++ b/framework/resources/Functional/table_function/positive/table_function.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDir", + "schema": "dfs_test.drillTestDir", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/text_storage/negative/textReadGroup.json b/framework/resources/Functional/text_storage/negative/textReadGroup.json index 8b8b065dd..0774e90d5 100644 --- a/framework/resources/Functional/text_storage/negative/textReadGroup.json +++ b/framework/resources/Functional/text_storage/negative/textReadGroup.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/text_storage/testcases/drill1831.q b/framework/resources/Functional/text_storage/testcases/drill1831.q index 1f77c6dd6..72375ba0b 100644 --- a/framework/resources/Functional/text_storage/testcases/drill1831.q +++ b/framework/resources/Functional/text_storage/testcases/drill1831.q @@ -1 +1 @@ -select columns[0] from dfs.`/drill/testdata/text_storage/drill1831.tbl` where columns[0] similar to '[aq]%'; +select columns[0] from dfs_test.`/drill/testdata/text_storage/drill1831.tbl` where columns[0] similar to '[aq]%'; diff --git a/framework/resources/Functional/text_storage/testcases/textReadGroup.json b/framework/resources/Functional/text_storage/testcases/textReadGroup.json index 9b837fde1..3b53d7a34 100644 --- a/framework/resources/Functional/text_storage/testcases/textReadGroup.json +++ b/framework/resources/Functional/text_storage/testcases/textReadGroup.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs", + "schema": "dfs_test", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json b/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json index 80c93a6d7..8e6f9b1ef 100755 --- a/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json +++ b/framework/resources/Functional/tpcds/impala/json/tpcds_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_json_views", + "schema": "dfs_test.tpcds_sf1_json_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json index f6cf6cbbb..c849e781d 100755 --- a/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/tpcds/impala/parquet/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json index 93153f7ac..6dbc4432a 100755 --- a/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json +++ b/framework/resources/Functional/tpcds/impala/text/tpcds_text_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.tpcds_sf1_text_views", + "schema": "dfs_test.tpcds_sf1_text_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json b/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json index 37167bb05..222fc4761 100755 --- a/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json +++ b/framework/resources/Functional/tpcds/sanity/json/tpcds_sf1_json.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_json_views", + "schema": "dfs_test.tpcds_sf1_json_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json index 80995af13..5f13f6059 100755 --- a/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/tpcds/sanity/parquet/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/sanity/text/drill_4872.sql b/framework/resources/Functional/tpcds/sanity/text/drill_4872.sql index f91998741..32cc0043b 100644 --- a/framework/resources/Functional/tpcds/sanity/text/drill_4872.sql +++ b/framework/resources/Functional/tpcds/sanity/text/drill_4872.sql @@ -1,3 +1,3 @@ -create table dfs.tmp.t_drill4872 partition by ( x ) as select case when columns[8] = '' then cast(null as varchar(10)) else cast(columns[8] as varchar(10)) end as x FROM dfs.`/drill/testdata/tpcds_sf1/text/store_sales`; -SELECT COUNT(*) FROM dfs.tmp.t_drill4872; -DROP TABLE dfs.tmp.t_drill4872; +create table dfs_test.tmp.t_drill4872 partition by ( x ) as select case when columns[8] = '' then cast(null as varchar(10)) else cast(columns[8] as varchar(10)) end as x FROM dfs_test.`/drill/testdata/tpcds_sf1/text/store_sales`; +SELECT COUNT(*) FROM dfs_test.tmp.t_drill4872; +DROP TABLE dfs_test.tmp.t_drill4872; diff --git a/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json index 28a91cfc7..5ddad52de 100755 --- a/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json +++ b/framework/resources/Functional/tpcds/sanity/text/tpcds_text_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_text_views", + "schema": "dfs_test.tpcds_sf1_text_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json b/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json index effb7fc37..6fa3c20dc 100755 --- a/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json +++ b/framework/resources/Functional/tpcds/variants/json/tpcds_json_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_json_views", + "schema": "dfs_test.tpcds_sf1_json_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json b/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json index 371ec3abb..67ca54b63 100755 --- a/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/tpcds/variants/parquet/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json b/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json index b41512a4c..adda2aaa3 100755 --- a/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json +++ b/framework/resources/Functional/tpcds/variants/text/tpcds_text_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_text_views", + "schema": "dfs_test.tpcds_sf1_text_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/json/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/json/tpch.json index af98054a7..1a610e700 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/json/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/json/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch01Json", + "schema": "dfs_test.drillTestDirTpch01Json", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/json/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/noextension/json/tpch.json index a4f090af5..e7d693a7b 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/json/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/json/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.NoExtJson", + "schema": "dfs_test.NoExtJson", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/01.q b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/01.q index 108b9f976..72bb82114 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/01.q +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/01.q @@ -11,7 +11,7 @@ select avg(l_discount) as avg_disc, count(*) as count_order from - dfs.`/drill/testdata/no-extension/parquet/lineitem` + dfs_test.`/drill/testdata/no-extension/parquet/lineitem` where l_shipdate <= date '1998-12-01' - interval '120' day (3) group by diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/tpch.json index 15d40e573..d60725d0e 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet-root/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.root", + "schema": "dfs_test.root", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/01.q b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/01.q index 108b9f976..72bb82114 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/01.q +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/01.q @@ -11,7 +11,7 @@ select avg(l_discount) as avg_disc, count(*) as count_order from - dfs.`/drill/testdata/no-extension/parquet/lineitem` + dfs_test.`/drill/testdata/no-extension/parquet/lineitem` where l_shipdate <= date '1998-12-01' - interval '120' day (3) group by diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/tpch.json index a3e3c2d1f..90afc1880 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquet/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.NoExtParquet", + "schema": "dfs_test.NoExtParquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquetNull/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquetNull/tpch.json index 0ed6aadff..d9b64ea89 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquetNull/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/parquetNull/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.NoExtParquetNull", + "schema": "dfs_test.NoExtParquetNull", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/noextension/text/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/noextension/text/tpch.json index 3a941a731..4e7022892 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/noextension/text/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/noextension/text/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.NoExtText", + "schema": "dfs_test.NoExtText", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/parquet/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/parquet/tpch.json index 9f431254c..1a1b9bb52 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/parquet/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/parquet/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch01Parquet", + "schema": "dfs_test.drillTestDirTpch01Parquet", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/1.json b/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/1.json index 5038850b0..c0e089681 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/1.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*01.q", - "schema": "dfs.drillTestDirText", + "schema": "dfs_test.drillTestDirText", "output-format": "tsv", "expected-file": ".*01.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/tpch.json index 460f7c950..30f2b3330 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/text/text_decimal/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch01Text", + "schema": "dfs_test.drillTestDirTpch01Text", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/text/text_double/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/text/text_double/tpch.json index ce9ee7276..a9b0d9a0c 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/text/text_double/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/text/text_double/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch01Text", + "schema": "dfs_test.drillTestDirTpch01Text", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/original/text/text_noviews/tpch.json b/framework/resources/Functional/tpch/sf0dot01/original/text/text_noviews/tpch.json index cd8e25a85..bbb8eeda4 100644 --- a/framework/resources/Functional/tpch/sf0dot01/original/text/text_noviews/tpch.json +++ b/framework/resources/Functional/tpch/sf0dot01/original/text/text_noviews/tpch.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirTpch01Text", + "schema": "dfs_test.drillTestDirTpch01Text", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/tpch/sf0dot01/smoke/parquet/tpch-smoke.json b/framework/resources/Functional/tpch/sf0dot01/smoke/parquet/tpch-smoke.json index b8ca61a79..279e47605 100644 --- a/framework/resources/Functional/tpch/sf0dot01/smoke/parquet/tpch-smoke.json +++ b/framework/resources/Functional/tpch/sf0dot01/smoke/parquet/tpch-smoke.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.TpcHMulti", + "schema": "dfs_test.TpcHMulti", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/udfs/udfs.json b/framework/resources/Functional/udfs/udfs.json index 1a52b87e7..44d40ca61 100644 --- a/framework/resources/Functional/udfs/udfs.json +++ b/framework/resources/Functional/udfs/udfs.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirUdfs", + "schema": "dfs_test.drillTestDirUdfs", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/union/queries.json b/framework/resources/Functional/union/queries.json index 336a7e127..0417d6018 100644 --- a/framework/resources/Functional/union/queries.json +++ b/framework/resources/Functional/union/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union", + "schema": "dfs_test.union", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/union_all/negative/negative.json b/framework/resources/Functional/union_all/negative/negative.json index 1caf994e6..52e46077f 100644 --- a/framework/resources/Functional/union_all/negative/negative.json +++ b/framework/resources/Functional/union_all/negative/negative.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.union_all", + "schema": "dfs_test.union_all", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/union_all/prq_union_all/prq_union_all.json b/framework/resources/Functional/union_all/prq_union_all/prq_union_all.json index 2edc727e5..6bc518b6f 100644 --- a/framework/resources/Functional/union_all/prq_union_all/prq_union_all.json +++ b/framework/resources/Functional/union_all/prq_union_all/prq_union_all.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union_all", + "schema": "dfs_test.union_all", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/values/values.json b/framework/resources/Functional/values/values.json index c1f44ae7c..94d9436b3 100644 --- a/framework/resources/Functional/values/values.json +++ b/framework/resources/Functional/values/values.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.union_all", + "schema": "dfs_test.union_all", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/views/drill2461.e_tsv b/framework/resources/Functional/views/drill2461.e_tsv index 3d5208c73..499b617ef 100644 --- a/framework/resources/Functional/views/drill2461.e_tsv +++ b/framework/resources/Functional/views/drill2461.e_tsv @@ -1 +1 @@ -DRILL dfs.drillTestDirViews Drill2461View EXPR$0 1 null YES INTERVAL null null null null null 6 HOUR 2 +DRILL dfs_test.drillTestDirViews Drill2461View EXPR$0 1 null YES INTERVAL null null null null null 6 HOUR 2 diff --git a/framework/resources/Functional/views/query35.e_tsv b/framework/resources/Functional/views/query35.e_tsv index 2d84e731f..9a04460e8 100644 --- a/framework/resources/Functional/views/query35.e_tsv +++ b/framework/resources/Functional/views/query35.e_tsv @@ -1 +1 @@ -DRILL dfs.drillTestDirViews v1 +DRILL dfs_test.drillTestDirViews v1 diff --git a/framework/resources/Functional/views/views.json b/framework/resources/Functional/views/views.json index ada2a07e6..45fe0f974 100644 --- a/framework/resources/Functional/views/views.json +++ b/framework/resources/Functional/views/views.json @@ -10,7 +10,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.drillTestDirViews", + "schema": "dfs_test.drillTestDirViews", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/aggregates/queries.json b/framework/resources/Functional/window_functions/aggregates/queries.json index 432cca249..1ed1a6ae6 100644 --- a/framework/resources/Functional/window_functions/aggregates/queries.json +++ b/framework/resources/Functional/window_functions/aggregates/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/bugs/bugs.json b/framework/resources/Functional/window_functions/bugs/bugs.json index e6ad00a00..5ca2f5b1c 100644 --- a/framework/resources/Functional/window_functions/bugs/bugs.json +++ b/framework/resources/Functional/window_functions/bugs/bugs.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/empty_over_clause/empty_over_clause.json b/framework/resources/Functional/window_functions/empty_over_clause/empty_over_clause.json index 20e8e2fa0..eb4daa8c3 100644 --- a/framework/resources/Functional/window_functions/empty_over_clause/empty_over_clause.json +++ b/framework/resources/Functional/window_functions/empty_over_clause/empty_over_clause.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/first_val/queries.json b/framework/resources/Functional/window_functions/first_val/queries.json index dd2ee1dd8..f0b28ae93 100644 --- a/framework/resources/Functional/window_functions/first_val/queries.json +++ b/framework/resources/Functional/window_functions/first_val/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/RBCRACR/queries.json b/framework/resources/Functional/window_functions/frameclause/RBCRACR/queries.json index b5602f461..1059fb62e 100644 --- a/framework/resources/Functional/window_functions/frameclause/RBCRACR/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/RBCRACR/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/RBUPACR/queries.json b/framework/resources/Functional/window_functions/frameclause/RBUPACR/queries.json index ff65fb5e6..d466b0877 100644 --- a/framework/resources/Functional/window_functions/frameclause/RBUPACR/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/RBUPACR/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/RBUPAUF/queries.json b/framework/resources/Functional/window_functions/frameclause/RBUPAUF/queries.json index 44436d82c..54155d2e5 100644 --- a/framework/resources/Functional/window_functions/frameclause/RBUPAUF/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/RBUPAUF/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/RBUPAUF/starInSubQry.q.failing b/framework/resources/Functional/window_functions/frameclause/RBUPAUF/starInSubQry.q.failing index 2c65194a7..9d5a75522 100644 --- a/framework/resources/Functional/window_functions/frameclause/RBUPAUF/starInSubQry.q.failing +++ b/framework/resources/Functional/window_functions/frameclause/RBUPAUF/starInSubQry.q.failing @@ -1,3 +1,3 @@ SELECT SUM(c1) OVER w -FROM (select * from dfs.tmp.`t_alltype`) +FROM (select * from dfs_test.tmp.`t_alltype`) WINDOW w AS (PARTITION BY c8 ORDER BY c1 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING); diff --git a/framework/resources/Functional/window_functions/frameclause/README.txt b/framework/resources/Functional/window_functions/frameclause/README.txt index 306eedcf2..7b3d2edde 100644 --- a/framework/resources/Functional/window_functions/frameclause/README.txt +++ b/framework/resources/Functional/window_functions/frameclause/README.txt @@ -15,7 +15,7 @@ group is a directory and holds related test files. CTAS used to create `t_alltype.parquet` file is, -0: jdbc:drill:schema=dfs.tmp> create table t_alltype as select +0: jdbc:drill:schema=dfs_test.tmp> create table t_alltype as select > case when columns[0] = '' then cast(null as integer) else cast(columns[0] as integer) end as c1, > case when columns[1] = '' then cast(null as integer) else cast(columns[1] as integer) end as c2, > case when columns[2] = '' then cast(null as bigint) else cast(columns[2] as bigint) end as c3, @@ -25,7 +25,7 @@ CTAS used to create `t_alltype.parquet` file is, > case when columns[6] = '' then cast(null as date) else cast(columns[6] as date) end as c7, > case when columns[7] = '' then cast(null as boolean) else cast(columns[7] as boolean) end as c8, > case when columns[8] = '' then cast(null as double) else cast(columns[8] as double) end as c9 - > from dfs.tmp.`t_alltype.csv`; + > from dfs_test.tmp.`t_alltype.csv`; +-----------+----------------------------+ | Fragment | Number of records written | +-----------+----------------------------+ diff --git a/framework/resources/Functional/window_functions/frameclause/defaultFrame/queries.json b/framework/resources/Functional/window_functions/frameclause/defaultFrame/queries.json index 0704fab87..9320fc940 100644 --- a/framework/resources/Functional/window_functions/frameclause/defaultFrame/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/defaultFrame/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/multipl_wnwds/queries.json b/framework/resources/Functional/window_functions/frameclause/multipl_wnwds/queries.json index 83a7ffeac..6fa65d5b5 100644 --- a/framework/resources/Functional/window_functions/frameclause/multipl_wnwds/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/multipl_wnwds/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/frameclause/subQueries/queries.json b/framework/resources/Functional/window_functions/frameclause/subQueries/queries.json index 8b53efe8b..8f9f6ec9b 100644 --- a/framework/resources/Functional/window_functions/frameclause/subQueries/queries.json +++ b/framework/resources/Functional/window_functions/frameclause/subQueries/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/lag_func/queries.json b/framework/resources/Functional/window_functions/lag_func/queries.json index 88698f9c8..79956a38f 100644 --- a/framework/resources/Functional/window_functions/lag_func/queries.json +++ b/framework/resources/Functional/window_functions/lag_func/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/last_val/queries.json b/framework/resources/Functional/window_functions/last_val/queries.json index 73296c3ed..ba6a9ff33 100644 --- a/framework/resources/Functional/window_functions/last_val/queries.json +++ b/framework/resources/Functional/window_functions/last_val/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/lead_func/queries.json b/framework/resources/Functional/window_functions/lead_func/queries.json index c78e890c4..d96ca4396 100644 --- a/framework/resources/Functional/window_functions/lead_func/queries.json +++ b/framework/resources/Functional/window_functions/lead_func/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/misc/misc.json b/framework/resources/Functional/window_functions/misc/misc.json index d38200199..bf432542e 100644 --- a/framework/resources/Functional/window_functions/misc/misc.json +++ b/framework/resources/Functional/window_functions/misc/misc.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/multiple_partitions/multiple_partitions.json b/framework/resources/Functional/window_functions/multiple_partitions/multiple_partitions.json index 092be3878..681b5c134 100644 --- a/framework/resources/Functional/window_functions/multiple_partitions/multiple_partitions.json +++ b/framework/resources/Functional/window_functions/multiple_partitions/multiple_partitions.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/negative/negative.json b/framework/resources/Functional/window_functions/negative/negative.json index 594a2f020..94a2e75fe 100644 --- a/framework/resources/Functional/window_functions/negative/negative.json +++ b/framework/resources/Functional/window_functions/negative/negative.json @@ -7,7 +7,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/nestedAggs/nstdagg.json b/framework/resources/Functional/window_functions/nestedAggs/nstdagg.json index 017f093ff..9de763201 100644 --- a/framework/resources/Functional/window_functions/nestedAggs/nstdagg.json +++ b/framework/resources/Functional/window_functions/nestedAggs/nstdagg.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/ntile_func/queries.json b/framework/resources/Functional/window_functions/ntile_func/queries.json index 0ab4a4767..03873b560 100644 --- a/framework/resources/Functional/window_functions/ntile_func/queries.json +++ b/framework/resources/Functional/window_functions/ntile_func/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/optimization/data/query.json b/framework/resources/Functional/window_functions/optimization/data/query.json index a973a8f06..d19aba736 100644 --- a/framework/resources/Functional/window_functions/optimization/data/query.json +++ b/framework/resources/Functional/window_functions/optimization/data/query.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/optimization/plan/plan.json b/framework/resources/Functional/window_functions/optimization/plan/plan.json index daf52b1c2..992b444d7 100644 --- a/framework/resources/Functional/window_functions/optimization/plan/plan.json +++ b/framework/resources/Functional/window_functions/optimization/plan/plan.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/order_by/order_by.json b/framework/resources/Functional/window_functions/order_by/order_by.json index 2d8e01237..fc754b782 100644 --- a/framework/resources/Functional/window_functions/order_by/order_by.json +++ b/framework/resources/Functional/window_functions/order_by/order_by.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/partition_by/partition_by.json b/framework/resources/Functional/window_functions/partition_by/partition_by.json index b08217e09..13f6fd90e 100644 --- a/framework/resources/Functional/window_functions/partition_by/partition_by.json +++ b/framework/resources/Functional/window_functions/partition_by/partition_by.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/statistical_aggregates/statistical_aggregates.json b/framework/resources/Functional/window_functions/statistical_aggregates/statistical_aggregates.json index 78677fc77..8de87c3b7 100644 --- a/framework/resources/Functional/window_functions/statistical_aggregates/statistical_aggregates.json +++ b/framework/resources/Functional/window_functions/statistical_aggregates/statistical_aggregates.json @@ -6,7 +6,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.subqueries", + "schema": "dfs_test.subqueries", "output-format": "tsv", "expected-file": ".*.res", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/tpcds/query47.sql.fail b/framework/resources/Functional/window_functions/tpcds/query47.sql.fail index 7eef5550a..235d611c6 100755 --- a/framework/resources/Functional/window_functions/tpcds/query47.sql.fail +++ b/framework/resources/Functional/window_functions/tpcds/query47.sql.fail @@ -1,4 +1,4 @@ -use dfs.tpcds_sf1_parquet_views; +use dfs_test.tpcds_sf1_parquet_views; WITH v1 AS (SELECT i_category, @@ -19,10 +19,10 @@ WITH v1 partition BY i_category, i_brand, s_store_name, s_company_name ORDER BY d_year, d_moy) rn - FROM dfs.`/drill/testdata/tpcds_sf1/parquet/views/item.view.drill`, - dfs.`/drill/testdata/tpcds_sf1/parquet/views/store_sales.view.drill`, - dfs.`/drill/testdata/tpcds_sf1/parquet/views/date_dim.view.drill`, - dfs.`/drill/testdata/tpcds_sf1/parquet/views/store.view.drill` + FROM dfs_test.`/drill/testdata/tpcds_sf1/parquet/views/item.view.drill`, + dfs_test.`/drill/testdata/tpcds_sf1/parquet/views/store_sales.view.drill`, + dfs_test.`/drill/testdata/tpcds_sf1/parquet/views/date_dim.view.drill`, + dfs_test.`/drill/testdata/tpcds_sf1/parquet/views/store.view.drill` WHERE ss_item_sk = i_item_sk AND ss_sold_date_sk = d_date_sk AND ss_store_sk = s_store_sk diff --git a/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json b/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json index fb89cec0d..16471ca68 100755 --- a/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/window_functions/tpcds/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json b/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json index 88575cde8..b9bbb1d86 100755 --- a/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json +++ b/framework/resources/Functional/window_functions/tpcds_variants/tpcds_parquet_sf1.json @@ -8,7 +8,7 @@ "matrices": [ { "query-file": ".*.sql", - "schema": "dfs.tpcds_sf1_parquet_views", + "schema": "dfs_test.tpcds_sf1_parquet_views", "output-format": "tsv", "expected-file": ".*.e_tsv", "verification-type": [ diff --git a/framework/resources/Functional/window_functions/views/queries.json b/framework/resources/Functional/window_functions/views/queries.json index 9c7c72367..0bf4a5ab7 100644 --- a/framework/resources/Functional/window_functions/views/queries.json +++ b/framework/resources/Functional/window_functions/views/queries.json @@ -9,7 +9,7 @@ "matrices": [ { "query-file": ".*.q", - "schema": "dfs.window_functions", + "schema": "dfs_test.window_functions", "output-format": "tsv", "expected-file": ".*.e", "verification-type": [