Skip to content

Commit

Permalink
Merge pull request #90 from badrinathpatchikolla/spark-3.2
Browse files Browse the repository at this point in the history
Added Cache Argument Method
  • Loading branch information
mantovani authored Sep 27, 2023
2 parents f97e0cd + 27f8d97 commit e485547
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 9 deletions.
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,12 @@ Cache/Uncache both DataFrame or Table
cache(true)
```

Cache Dataframe with Storage Level

```scala
cache(true,storageLevel = MEMORY_AND_DISK)
```

#### Coalesce

Decrease the number of partitions in the RDD to numPartitions. Useful for running operations more efficiently after filtering down a large dataset.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.github.music.of.the.ainur.almaren.Tree
import com.github.music.of.the.ainur.almaren.builder.Core
import com.github.music.of.the.ainur.almaren.state.core._
import org.apache.spark.sql.Column
import org.apache.spark.storage.StorageLevel

private[almaren] trait Main extends Core {
def sql(sql: String): Option[Tree] =
Expand All @@ -12,8 +13,8 @@ private[almaren] trait Main extends Core {
def alias(alias:String): Option[Tree] =
Alias(alias)

def cache(opType:Boolean = true,tableName:Option[String] = None): Option[Tree] =
Cache(opType, tableName)
def cache(opType:Boolean = true,tableName:Option[String] = None,storageLevel: Option[StorageLevel] = None): Option[Tree] =
Cache(opType, tableName = tableName, storageLevel = storageLevel)

def coalesce(size:Int): Option[Tree] =
Coalesce(size)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package com.github.music.of.the.ainur.almaren.state.core
import com.github.music.of.the.ainur.almaren.State
import com.github.music.of.the.ainur.almaren.util.Constants
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.storage.StorageLevel

private[almaren] abstract class Main extends State {
override def executor(df: DataFrame): DataFrame = core(df)
Expand Down Expand Up @@ -81,22 +82,30 @@ case class Alias(alias:String) extends Main {
}
}

case class Cache(opType:Boolean = true,tableName:Option[String] = None) extends Main {
case class Cache(opType: Boolean = true, tableName: Option[String] = None, storageLevel: Option[StorageLevel] = None) extends Main {
override def core(df: DataFrame): DataFrame = cache(df)

def cache(df: DataFrame): DataFrame = {
logger.info(s"opType:{$opType}, tableName{$tableName}")
logger.info(s"opType:{$opType}, tableName:{$tableName}, StorageType:{$storageLevel}")
tableName match {
case Some(t) => cacheTable(df,t)
case None => cacheDf(df)
case Some(t) => cacheTable(df, t)
case None => cacheDf(df, storageLevel)
}
df
}
private def cacheDf(df:DataFrame): Unit = opType match {
case true => df.persist()

private def cacheDf(df: DataFrame, storageLevel: Option[StorageLevel]): Unit = opType match {
case true => {
storageLevel match {
case Some(value) => df.persist(value)
case None => df.persist()
}
}
case false => df.unpersist()

}
private def cacheTable(df:DataFrame,tableName: String): Unit =

private def cacheTable(df: DataFrame, tableName: String): Unit =
opType match {
case true => df.sqlContext.cacheTable(tableName)
case false => df.sqlContext.uncacheTable(tableName)
Expand Down
13 changes: 13 additions & 0 deletions src/test/scala/com/github/music/of/the/ainur/almaren/Test.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import org.apache.spark.sql.{AnalysisException, Column, DataFrame, SaveMode}
import org.scalatest._
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.sql.avro._
import org.apache.spark.storage.StorageLevel._

import java.io.File
import scala.collection.immutable._
Expand Down Expand Up @@ -383,6 +384,18 @@ class Test extends AnyFunSuite with BeforeAndAfter {
assert(bool_cache)
}

val testCacheDfStorage: DataFrame = almaren.builder.sourceSql("select * from cache_test").cache(true,storageLevel = Some(MEMORY_ONLY)).batch
val bool_cache_storage = testCacheDfStorage.storageLevel.useMemory
test("Testing Cache Memory Storage") {
assert(bool_cache_storage)
}

val testCacheDfDiskStorage: DataFrame = almaren.builder.sourceSql("select * from cache_test").cache(true, storageLevel = Some(DISK_ONLY)).batch
val bool_cache_disk_storage = testCacheDfDiskStorage.storageLevel.useDisk
test("Testing Cache Disk Storage") {
assert(bool_cache_disk_storage)
}

val testUnCacheDf = almaren.builder.sourceSql("select * from cache_test").cache(false).batch
val bool_uncache = testUnCacheDf.storageLevel.useMemory
test("Testing Uncache") {
Expand Down

0 comments on commit e485547

Please sign in to comment.