Skip to content

Commit

Permalink
address PR feedback
Browse files Browse the repository at this point in the history
- update sample result in ducumentation
- remove unused import
- cleaner code in AST builder
- use constant for spark conf

Signed-off-by: Sean Kao <[email protected]>
  • Loading branch information
seankao-az committed Mar 14, 2024
1 parent f323925 commit 6bb0103
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 24 deletions.
10 changes: 9 additions & 1 deletion docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,15 @@ SHOW FLINT [INDEX|INDEXES] IN catalog[.database]

Example:
```sql
SHOW FLINT INDEXES IN spark_catalog.default
os> SHOW FLINT INDEXES IN spark_catalog.default;
fetched rows / total rows = 3/3
+-------------------------------------------------------------+----------+----------+-----------+-----------------+--------------+------------+
| flint_index_name | kind | database | table | index_name | auto_refresh | status |
|-------------------------------------------------------------+----------+----------+-----------+-----------------+--------------+------------|
| flint_spark_catalog_default_http_count_view | mv | default | NULL | http_count_view | false | active |
| flint_spark_catalog_default_http_logs_skipping_index | skipping | default | http_logs | NULL | true | refreshing |
| flint_spark_catalog_default_http_logs_status_clientip_index | covering | default | http_logs | status_clientip | false | active |
+-------------------------------------------------------------+----------+----------+-----------+-----------------+--------------+------------+
```

#### Create Index Options
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,8 @@
package org.opensearch.flint.core;

import java.util.List;
import java.util.Optional;

import org.opensearch.flint.core.metadata.FlintMetadata;
import org.opensearch.flint.core.metadata.log.FlintMetadataLogEntry;
import org.opensearch.flint.core.metadata.log.OptimisticTransaction;
import org.opensearch.flint.core.storage.FlintReader;
import org.opensearch.flint.core.storage.FlintWriter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,25 +43,16 @@ trait FlintSparkIndexAstBuilder extends FlintSparkSqlExtensionsVisitor[AnyRef] {
flint
.describeIndexes(indexNamePattern)
.map { index =>
val parts = index match {
case mv: FlintSparkMaterializedView => mv.mvName.split('.')
case covering: FlintSparkCoveringIndex => covering.tableName.split('.')
case skipping: FlintSparkSkippingIndex => skipping.tableName.split('.')
}
val databaseName = parts(1)

val tableName = index match {
// MV doesn't belong to a table
case _: FlintSparkMaterializedView => null
// Table name must be qualified when metadata created
case _ => parts.drop(2).mkString(".")
}
val indexName = index match {
case covering: FlintSparkCoveringIndex => covering.indexName
// MV name must be qualified when metadata created
case _: FlintSparkMaterializedView => parts.drop(2).mkString(".")
// Skipping index doesn't have a user defined name
case _: FlintSparkSkippingIndex => null
val (databaseName, tableName, indexName) = index match {
case skipping: FlintSparkSkippingIndex =>
val parts = skipping.tableName.split('.')
(parts(1), parts.drop(2).mkString("."), null)
case covering: FlintSparkCoveringIndex =>
val parts = covering.tableName.split('.')
(parts(1), parts.drop(2).mkString("."), covering.indexName)
case mv: FlintSparkMaterializedView =>
val parts = mv.mvName.split('.')
(parts(1), null, parts.drop(2).mkString("."))
}

val status = index.latestLogEntry match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import org.opensearch.flint.core.metadata.log.FlintMetadataLogEntry.IndexState.I
import org.opensearch.flint.core.storage.FlintOpenSearchClient._
import org.opensearch.flint.spark.FlintSparkSuite

import org.apache.spark.sql.flint.config.FlintSparkConf.DATA_SOURCE_NAME

/**
* Transaction test base suite that creates the metadata log index which enables transaction
* support in index operation.
Expand All @@ -33,7 +35,7 @@ trait OpenSearchTransactionSuite extends FlintSparkSuite {

override def beforeAll(): Unit = {
super.beforeAll()
spark.conf.set("spark.flint.datasource.name", testDataSourceName)
spark.conf.set(DATA_SOURCE_NAME.key, testDataSourceName)
}

override def beforeEach(): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ import org.opensearch.index.seqno.SequenceNumbers.{UNASSIGNED_PRIMARY_TERM, UNAS
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.mockito.MockitoSugar.mock

import org.apache.spark.sql.flint.config.FlintSparkConf.DATA_SOURCE_NAME

class FlintTransactionITSuite extends OpenSearchTransactionSuite with Matchers {

val testFlintIndex = "flint_test_index"
Expand All @@ -29,7 +31,7 @@ class FlintTransactionITSuite extends OpenSearchTransactionSuite with Matchers {

override def beforeAll(): Unit = {
super.beforeAll()
val options = openSearchOptions + ("spark.flint.datasource.name" -> testDataSourceName)
val options = openSearchOptions + (DATA_SOURCE_NAME.key -> testDataSourceName)
flintClient = new FlintOpenSearchClient(new FlintOptions(options.asJava))
}

Expand Down

0 comments on commit 6bb0103

Please sign in to comment.