Skip to content

Commit

Permalink
fix test arg in fips
Browse files Browse the repository at this point in the history
fix java doc
  • Loading branch information
sfc-gh-bli committed Jan 18, 2024
1 parent e8a35b3 commit e34bd50
Show file tree
Hide file tree
Showing 13 changed files with 75 additions and 48 deletions.
2 changes: 2 additions & 0 deletions fips-pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -577,6 +577,7 @@
--add-exports=java.base/sun.nio.ch=ALL-UNNAMED
--add-exports=jdk.unsupported/sun.misc=ALL-UNNAMED
--add-opens=java.base/sun.security.util=ALL-UNNAMED
-DFIPS_TEST=true
</argLine>
</configuration>
</plugin>
Expand All @@ -590,6 +591,7 @@
--add-exports=java.base/sun.nio.ch=ALL-UNNAMED
--add-exports=jdk.unsupported/sun.misc=ALL-UNNAMED
--add-opens=java.base/sun.security.util=ALL-UNNAMED
-DFIPS_TEST=true
</argLine>
</configuration>
</plugin>
Expand Down
7 changes: 3 additions & 4 deletions java_doc.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,7 @@
<version>3.3.1</version>
<configuration>
<additionalOptions>--allow-script-in-comments</additionalOptions>
<bottom>&#169; {currentYear} Snowflake Inc. All Rights Reserved</bottom>
<doctitle>Snowpark Java API Reference ${project.version}</doctitle>
<footer>
<bottom>&#169; {currentYear} Snowflake Inc. All Rights Reserved
<![CDATA[
<!-- Google Analytics Code -->
<script>
Expand Down Expand Up @@ -98,7 +96,8 @@
}
</script>
]]>
</footer>
</bottom>
<doctitle>Snowpark Java API Reference ${project.version}</doctitle>
<header>
<![CDATA[
<div style="margin-top: 14px"><strong>
Expand Down
8 changes: 2 additions & 6 deletions src/main/java/com/snowflake/snowpark_java/DataFrame.java
Original file line number Diff line number Diff line change
Expand Up @@ -805,14 +805,10 @@ public Column col(String colName) {
/**
* Returns the current DataFrame aliased as the input alias name.
*
* For example:
* <p>For example:
*
* {{{
* val df2 = df.alias("A")
* df2.select(df2.col("A.num"))
* }}}
* <p>{{{ val df2 = df.alias("A") df2.select(df2.col("A.num")) }}}
*
* @group basic
* @since 1.10.0
* @param alias The alias name of the dataframe
* @return a [[DataFrame]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public class DataFrameWriter {
/**
* Sets the specified option in the DataFrameWriter.
*
* <h2>Sets the specified option for saving data to a table</h2>
* <p><b>Sets the specified option for saving data to a table</b>
*
* <p>Use this method to configure options:
*
Expand All @@ -70,7 +70,7 @@ public class DataFrameWriter {
* and the target table exists.
* </ul>
*
* <h2>Sets the specified option for saving data to a file on a stage</h2>
* <b>Sets the specified option for saving data to a file on a stage</b>
*
* <p>Use this method to configure options:
*
Expand Down Expand Up @@ -117,7 +117,7 @@ public DataFrameWriter option(String key, Object value) {
/**
* Sets multiple specified options in the DataFrameWriter.
*
* <h2>Sets the specified option for saving data to a table</h2>
* <p><b>Sets the specified option for saving data to a table</b>
*
* <p>Use this method to configure options:
*
Expand All @@ -126,7 +126,7 @@ public DataFrameWriter option(String key, Object value) {
* and the target table exists.
* </ul>
*
* <h2>Sets the specified option for saving data to a file on a stage</h2>
* <b>Sets the specified option for saving data to a file on a stage</b>
*
* <p>Use this method to configure options:
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,8 @@ private[snowpark] object ErrorMessage {
def DF_MORE_THAN_ONE_TF_IN_SELECT(): SnowparkClientException =
createException("0131")

def DF_ALIAS_DUPLICATES(duplicatedAlias: scala.collection.Set[String]): SnowparkClientException =
def DF_ALIAS_DUPLICATES(
duplicatedAlias: scala.collection.Set[String]): SnowparkClientException =
createException("0132", duplicatedAlias.mkString(", "))

/*
Expand Down
12 changes: 9 additions & 3 deletions src/main/scala/com/snowflake/snowpark/internal/Utils.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
package com.snowflake.snowpark.internal

import com.snowflake.snowpark.Column
import com.snowflake.snowpark.internal.analyzer.{Attribute, LogicalPlan, TableFunctionExpression, singleQuote}
import com.snowflake.snowpark.internal.analyzer.{
Attribute,
LogicalPlan,
TableFunctionExpression,
singleQuote
}

import java.io.{File, FileInputStream}
import java.lang.invoke.SerializedLambda
Expand Down Expand Up @@ -99,8 +104,9 @@ object Utils extends Logging {
lastInternalLine + "\n" + stackTrace.take(stackDepth).mkString("\n")
}

def addToDataframeAliasMap(result: Map[String, Seq[Attribute]], child: LogicalPlan)
: Map[String, Seq[Attribute]] = {
def addToDataframeAliasMap(
result: Map[String, Seq[Attribute]],
child: LogicalPlan): Map[String, Seq[Attribute]] = {
if (child != null) {
val map = child.dfAliasMap
val duplicatedAlias = result.keySet.intersect(map.keySet)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,8 @@ private[snowpark] case class UnresolvedAttribute(override val name: String)
}

private[snowpark] case class UnresolvedDFAliasAttribute(override val name: String)
extends Expression with NamedExpression {
extends Expression
with NamedExpression {
override def sql: String = ""

override def children: Seq[Expression] = Seq.empty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,19 @@ import com.snowflake.snowpark.internal.ErrorMessage
import scala.collection.mutable.{Map => MMap}

private[snowpark] object ExpressionAnalyzer {
def apply(aliasMap: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer =
def apply(
aliasMap: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer =
new ExpressionAnalyzer(aliasMap, dfAliasMap)

def apply(): ExpressionAnalyzer =
new ExpressionAnalyzer(Map.empty, Map.empty)

// create new analyzer by combining two alias maps
def apply(map1: Map[ExprId, String], map2: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer = {
def apply(
map1: Map[ExprId, String],
map2: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer = {
val common = map1.keySet & map2.keySet
val result = (map1 ++ map2).filter {
// remove common column, let (df1.join(df2))
Expand All @@ -24,16 +27,18 @@ private[snowpark] object ExpressionAnalyzer {
new ExpressionAnalyzer(result, dfAliasMap)
}

def apply(maps: Seq[Map[ExprId, String]],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer = {
def apply(
maps: Seq[Map[ExprId, String]],
dfAliasMap: Map[String, Seq[Attribute]]): ExpressionAnalyzer = {
maps.foldLeft(ExpressionAnalyzer()) {
case (expAnalyzer, map) => ExpressionAnalyzer(expAnalyzer.getAliasMap, map, dfAliasMap)
}
}
}

private[snowpark] class ExpressionAnalyzer(aliasMap: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]) {
private[snowpark] class ExpressionAnalyzer(
aliasMap: Map[ExprId, String],
dfAliasMap: Map[String, Seq[Attribute]]) {
private val generatedAliasMap: MMap[ExprId, String] = MMap.empty

def analyze(ex: Expression): Expression = ex match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ private[snowpark] trait MultiChildrenNode extends LogicalPlan {

protected def updateChildren(newChildren: Seq[LogicalPlan]): MultiChildrenNode


override lazy val dfAliasMap: Map[String, Seq[Attribute]] =
children.foldLeft(Map.empty[String, Seq[Attribute]]) {
case (map, child) => Utils.addToDataframeAliasMap(map, child)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,7 @@ private[snowpark] case class Sort(order: Seq[SortOrder], child: LogicalPlan) ext
Sort(order, _)
}

private[snowpark] case class DataframeAlias(alias: String, child: LogicalPlan)
extends UnaryNode {
private[snowpark] case class DataframeAlias(alias: String, child: LogicalPlan) extends UnaryNode {

override lazy val dfAliasMap: Map[String, Seq[Attribute]] =
Utils.addToDataframeAliasMap(Map(alias -> child.getSnowflakePlan.get.output), child)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ private[snowpark] abstract class BinaryNode extends LogicalPlan {
lazy override protected val analyzer: ExpressionAnalyzer =
ExpressionAnalyzer(left.aliasMap, right.aliasMap, dfAliasMap)


override lazy val dfAliasMap: Map[String, Seq[Attribute]] =
Utils.addToDataframeAliasMap(Utils.addToDataframeAliasMap(Map.empty, left), right)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ private[snowpark] case class Alias(child: Expression, name: String, isInternal:
}

private[snowpark] case class DfAlias(child: Expression, name: String)
extends UnaryExpression
extends UnaryExpression
with NamedExpression {
override def sqlOperator: String = ""
override def operatorFirst: Boolean = false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,27 @@ class DataFrameAliasSuite extends TestData with BeforeAndAfterEach with EagerSes
runQuery(s"insert into $tableName2 values(1, 7),(2, 8),(3, 9)", session)
val df1 = session.table(tableName1).alias("A")
val df2 = session.table(tableName2).alias("B")
checkAnswer(df1.join(df2, $"id1" === $"id2")
.select(df1.col("A.num1")), Seq(Row(4), Row(5), Row(6)))
checkAnswer(df1.join(df2, $"id1" === $"id2")
.select(df2.col("B.num2")), Seq(Row(7), Row(8), Row(9)))
checkAnswer(
df1
.join(df2, $"id1" === $"id2")
.select(df1.col("A.num1")),
Seq(Row(4), Row(5), Row(6)))
checkAnswer(
df1
.join(df2, $"id1" === $"id2")
.select(df2.col("B.num2")),
Seq(Row(7), Row(8), Row(9)))

checkAnswer(df1.join(df2, $"id1" === $"id2")
.select($"A.num1"), Seq(Row(4), Row(5), Row(6)))
checkAnswer(df1.join(df2, $"id1" === $"id2")
.select($"B.num2"), Seq(Row(7), Row(8), Row(9)))
checkAnswer(
df1
.join(df2, $"id1" === $"id2")
.select($"A.num1"),
Seq(Row(4), Row(5), Row(6)))
checkAnswer(
df1
.join(df2, $"id1" === $"id2")
.select($"B.num2"),
Seq(Row(7), Row(8), Row(9)))
}

test("Test for alias with join with column renaming") {
Expand All @@ -72,16 +84,23 @@ class DataFrameAliasSuite extends TestData with BeforeAndAfterEach with EagerSes
runQuery(s"insert into $tableName2 values(1, 7),(2, 8),(3, 9)", session)
val df1 = session.table(tableName1).alias("A")
val df2 = session.table(tableName2).alias("B")
checkAnswer(df1.join(df2, df1.col("id") === df2.col("id"))
.select(df1.col("A.num")), Seq(Row(4), Row(5), Row(6)))
checkAnswer(df1.join(df2, df1.col("id") === df2.col("id"))
.select(df2.col("B.num")), Seq(Row(7), Row(8), Row(9)))
checkAnswer(
df1
.join(df2, df1.col("id") === df2.col("id"))
.select(df1.col("A.num")),
Seq(Row(4), Row(5), Row(6)))
checkAnswer(
df1
.join(df2, df1.col("id") === df2.col("id"))
.select(df2.col("B.num")),
Seq(Row(7), Row(8), Row(9)))

// The following use case is out of the scope of supporting alias
// We still follow the old ambiguity resolving policy and require DF to be used
assertThrows[SnowparkClientException](
df1.join(df2, df1.col("id") === df2.col("id"))
.select($"A.num"))
df1
.join(df2, df1.col("id") === df2.col("id"))
.select($"A.num"))
}

test("Test for alias conflict") {
Expand All @@ -90,7 +109,8 @@ class DataFrameAliasSuite extends TestData with BeforeAndAfterEach with EagerSes
val df1 = session.table(tableName1).alias("A")
val df2 = session.table(tableName2).alias("A")
assertThrows[SnowparkClientException](
df1.join(df2, df1.col("id") === df2.col("id"))
.select(df1.col("A.num")))
df1
.join(df2, df1.col("id") === df2.col("id"))
.select(df1.col("A.num")))
}
}

0 comments on commit e34bd50

Please sign in to comment.