Skip to content

Commit

Permalink
Update more javadoc
Browse files Browse the repository at this point in the history
Signed-off-by: Chen Dai <[email protected]>
  • Loading branch information
dai-chen committed Sep 20, 2023
1 parent 8fc8ec1 commit 519f1fb
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ abstract class FlintSparkIndexBuilder(flint: FlintSpark) {
.toMap
}

/**
* Add index options.
*
* @param options
* index options
* @return
* builder
*/
def options(options: FlintSparkIndexOptions): this.type = {
this.indexOptions = options
this
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ import org.apache.spark.sql.types.StructType
* @param indexedColumns
* indexed column list
*/
case class FlintSparkSkippingIndex(
class FlintSparkSkippingIndex(
tableName: String,
indexedColumns: Seq[FlintSparkSkippingStrategy],
val indexedColumns: Seq[FlintSparkSkippingStrategy],
override val options: FlintSparkIndexOptions = empty)
extends FlintSparkIndex {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@

package org.opensearch.flint.spark.sql

import org.antlr.v4.runtime.tree.RuleNode
import org.antlr.v4.runtime.tree.{ParseTree, RuleNode}
import org.opensearch.flint.spark.FlintSpark
import org.opensearch.flint.spark.sql.covering.FlintSparkCoveringIndexAstBuilder
import org.opensearch.flint.spark.sql.skipping.FlintSparkSkippingIndexAstBuilder

import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan

/**
* Flint Spark AST builder that builds Spark command for Flint index statement. This class mix-in
* all other AST builders and provides util methods.
Expand All @@ -20,6 +22,10 @@ class FlintSparkSqlAstBuilder
with FlintSparkCoveringIndexAstBuilder
with SparkSqlAstBuilder {

override def visit(tree: ParseTree): LogicalPlan = {
tree.accept(this).asInstanceOf[LogicalPlan]
}

override def aggregateResult(aggregate: AnyRef, nextResult: AnyRef): AnyRef =
if (nextResult != null) nextResult else aggregate
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class FlintSparkSqlParser(sparkParser: ParserInterface) extends ParserInterface

override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { flintParser =>
try {
flintAstBuilder.visit(flintParser.singleStatement()).asInstanceOf[LogicalPlan]
flintAstBuilder.visit(flintParser.singleStatement())
} catch {
// Fall back to Spark parse plan logic if flint cannot parse
case _: ParseException => sparkParser.parsePlan(sqlText)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ import org.opensearch.flint.spark.sql.FlintSparkSqlExtensionsParser.{PropertyKey
import org.apache.spark.sql.catalyst.parser.ParserUtils.string

/**
* AST builder that builds for common rule in Spark SQL grammar.
* AST builder that builds for common rule in Spark SQL grammar. The main logic is modified slightly
* from Spark AstBuilder code.
*/
trait SparkSqlAstBuilder extends FlintSparkSqlExtensionsVisitor[AnyRef] {

Expand Down

0 comments on commit 519f1fb

Please sign in to comment.