Skip to content

Initial tests

Initial tests #1

# Workflow derived from https://github.com/r-lib/actions/tree/master/examples
# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
on:
push:
branches: main
pull_request:
branches: main
name: Spark-Connect
jobs:
Spark-Tests:
runs-on: ubuntu-latest
name: ${{ matrix.config.name }}
strategy:
fail-fast: false
matrix:
config:
- {spark: '3.4.1', name: 'Spark 3.4'}
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
R_KEEP_PKG_SOURCE: yes
SPARK_VERSION: ${{ matrix.config.spark }}
HADOOP_VERSION: ${{ matrix.config.hadoop }}
steps:
- uses: actions/checkout@v3
- uses: r-lib/actions/setup-r@v2
with:
r-version: 'release'
use-public-rspm: true
- uses: r-lib/actions/setup-r-dependencies@v2
with:
extra-packages: |
any::devtools
any::qs
any::e1071
any::mlbench
any::glmnet
any::sparklyr.nested
any::janeaustenr
any::nycflights13
any::reshape2
any::survival
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Cache Spark
id: cache-spark
uses: actions/cache@v2
with:
path: /home/runner/spark/spark-${{ matrix.config.spark }}-bin-hadoop${{ matrix.config.hadoop }}
key: sparklyr-spark-${{ matrix.config.spark }}-bin-hadoop${{ matrix.config.hadoop }}-3
- name: Install Spark (via sparklyr)
if: steps.cache-spark.outputs.cache-hit != 'true'
run: |
sparklyr::spark_install(version = Sys.getenv("SPARK_VERSION"))
shell: Rscript {0}
- name: Cache Scala
id: cache-scala
uses: actions/cache@v2
with:
path: /home/runner/scala/
key: scala-2
- name: Install Scala (via sparklyr)
if: steps.cache-scala.outputs.cache-hit != 'true'
run: |
sparklyr::download_scalac()
shell: Rscript {0}
- name: R Session Info
run: sessionInfo()
shell: Rscript {0}
- name: R Environment Variables
run: Sys.getenv()
shell: Rscript {0}
- name: R Installed Packages
run: |
m_pkgs <- installed.packages()
t_pkgs <- as.data.frame(m_pkgs, row.names = FALSE)
print(t_pkgs[, c("Package", "Version")])
shell: Rscript {0}
- name: Virtual Environment
run: |
devtools::load_all()
install_pyspark(Sys.getenv("SPARK_VERSION"))
shell: Rscript {0}
- name: R Tests
run: |
devtools::load_all()
library(sparklyr)
Sys.getenv("JAVA_HOME")
devtools::test()
shell: Rscript {0}