Skip to content

Commit

Permalink
[SPARK-50283][INFRA] Add a separate docker file for linter
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
Add a separate docker file for linter

### Why are the changes needed?
1, to centralize the installation of linter;
2, to spin it off the single docker;

### Does this PR introduce _any_ user-facing change?
no, infra-only

### How was this patch tested?
ci

### Was this patch authored or co-authored using generative AI tooling?
no

Closes #48826 from zhengruifeng/infra_separate_docker_lint.

Authored-by: Ruifeng Zheng <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
  • Loading branch information
zhengruifeng authored and HyukjinKwon committed Nov 13, 2024
1 parent 158aeb0 commit e29db6e
Show file tree
Hide file tree
Showing 5 changed files with 138 additions and 15 deletions.
39 changes: 26 additions & 13 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ jobs:
image_url: ${{ steps.infra-image-outputs.outputs.image_url }}
image_docs_url: ${{ steps.infra-image-docs-outputs.outputs.image_docs_url }}
image_docs_url_link: ${{ steps.infra-image-link.outputs.image_docs_url_link }}
image_lint_url: ${{ steps.infra-image-lint-outputs.outputs.image_lint_url }}
image_lint_url_link: ${{ steps.infra-image-link.outputs.image_lint_url_link }}
steps:
- name: Checkout Spark repository
uses: actions/checkout@v4
Expand Down Expand Up @@ -144,15 +146,25 @@ jobs:
IMG_NAME="apache-spark-ci-image-docs:${{ inputs.branch }}-${{ github.run_id }}"
IMG_URL="ghcr.io/$REPO_OWNER/$IMG_NAME"
echo "image_docs_url=$IMG_URL" >> $GITHUB_OUTPUT
- name: Generate infra image URL (Linter)
id: infra-image-lint-outputs
run: |
# Convert to lowercase to meet Docker repo name requirement
REPO_OWNER=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
IMG_NAME="apache-spark-ci-image-lint:${{ inputs.branch }}-${{ github.run_id }}"
IMG_URL="ghcr.io/$REPO_OWNER/$IMG_NAME"
echo "image_lint_url=$IMG_URL" >> $GITHUB_OUTPUT
- name: Link the docker images
id: infra-image-link
run: |
# Set the image URL for job "docs"
# Should delete the link and directly use image_docs_url after SPARK 3.x EOL
if [[ "${{ inputs.branch }}" == 'branch-3.5' ]]; then
echo "image_docs_url_link=${{ steps.infra-image-outputs.outputs.image_url }}" >> $GITHUB_OUTPUT
echo "image_lint_url_link=${{ steps.infra-image-outputs.outputs.image_url }}" >> $GITHUB_OUTPUT
else
echo "image_docs_url_link=${{ steps.infra-image-docs-outputs.outputs.image_docs_url }}" >> $GITHUB_OUTPUT
echo "image_lint_url_link=${{ steps.infra-image-lint-outputs.outputs.image_lint_url }}" >> $GITHUB_OUTPUT
fi
# Build: build Spark and run the tests for specified modules.
Expand Down Expand Up @@ -382,6 +394,17 @@ jobs:
${{ needs.precondition.outputs.image_docs_url }}
# Use the infra image cache to speed up
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-docs-cache:${{ inputs.branch }}
- name: Build and push (Linter)
if: hashFiles('dev/spark-test-image/lint/Dockerfile') != ''
id: docker_build_lint
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/lint/
push: true
tags: |
${{ needs.precondition.outputs.image_lint_url }}
# Use the infra image cache to speed up
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ inputs.branch }}


pyspark:
Expand Down Expand Up @@ -667,7 +690,7 @@ jobs:
PYSPARK_PYTHON: python3.9
GITHUB_PREV_SHA: ${{ github.event.before }}
container:
image: ${{ needs.precondition.outputs.image_url }}
image: ${{ needs.precondition.outputs.image_lint_url_link }}
steps:
- name: Checkout Spark repository
uses: actions/checkout@v4
Expand Down Expand Up @@ -741,18 +764,8 @@ jobs:
# Should delete this section after SPARK 3.5 EOL.
python3.9 -m pip install 'flake8==3.9.0' pydata_sphinx_theme 'mypy==0.982' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' numpydoc 'jinja2<3.0.0' 'black==22.6.0'
python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 'grpcio==1.56.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0'
- name: Install Python dependencies for python linter and documentation generation
if: inputs.branch != 'branch-3.5'
run: |
# Should unpin 'sphinxcontrib-*' after upgrading sphinx>5
# See 'ipython_genutils' in SPARK-38517
# See 'docutils<0.18.0' in SPARK-39421
python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive numpy pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
python3.9 -m pip list
- name: List Python packages
run: python3.9 -m pip list
- name: Python linter
run: PYTHON_EXECUTABLE=python3.9 ./dev/lint-python
# Should delete this section after SPARK 3.5 EOL.
Expand Down
14 changes: 14 additions & 0 deletions .github/workflows/build_infra_images_cache.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ on:
paths:
- 'dev/infra/Dockerfile'
- 'dev/spark-test-image/docs/Dockerfile'
- 'dev/spark-test-image/lint/Dockerfile'
- '.github/workflows/build_infra_images_cache.yml'
# Create infra image when cutting down branches/tags
create:
Expand Down Expand Up @@ -74,3 +75,16 @@ jobs:
- name: Image digest (Documentation)
if: hashFiles('dev/spark-test-image/docs/Dockerfile') != ''
run: echo ${{ steps.docker_build_docs.outputs.digest }}
- name: Build and push (Linter)
if: hashFiles('dev/spark-test-image/lint/Dockerfile') != ''
id: docker_build_lint
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/lint/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }},mode=max
- name: Image digest (Linter)
if: hashFiles('dev/spark-test-image/lint/Dockerfile') != ''
run: echo ${{ steps.docker_build_lint.outputs.digest }}
96 changes: 96 additions & 0 deletions dev/spark-test-image/lint/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Image for building and testing Spark branches. Based on Ubuntu 22.04.
# See also in https://hub.docker.com/_/ubuntu
FROM ubuntu:jammy-20240911.1
LABEL org.opencontainers.image.authors="Apache Spark project <[email protected]>"
LABEL org.opencontainers.image.licenses="Apache-2.0"
LABEL org.opencontainers.image.ref.name="Apache Spark Infra Image for Linter"
# Overwrite this label to avoid exposing the underlying Ubuntu OS version label
LABEL org.opencontainers.image.version=""

ENV FULL_REFRESH_DATE 20241112

ENV DEBIAN_FRONTEND noninteractive
ENV DEBCONF_NONINTERACTIVE_SEEN true

RUN apt-get update && apt-get install -y \
build-essential \
ca-certificates \
curl \
gfortran \
git \
gnupg \
libcurl4-openssl-dev \
libfontconfig1-dev \
libfreetype6-dev \
libfribidi-dev \
libgit2-dev \
libharfbuzz-dev \
libjpeg-dev \
libpng-dev \
libssl-dev \
libtiff5-dev \
libxml2-dev \
nodejs \
npm \
pkg-config \
qpdf \
r-base \
software-properties-common \
wget \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*

RUN Rscript -e "install.packages(c('devtools', 'knitr', 'markdown', 'rmarkdown', 'testthat'), repos='https://cloud.r-project.org/')" \
&& Rscript -e "devtools::install_version('pkgdown', version='2.0.1', repos='https://cloud.r-project.org')" \
&& Rscript -e "devtools::install_version('preferably', version='0.4', repos='https://cloud.r-project.org')" \
&& Rscript -e "devtools::install_version('lintr', version='2.0.1', repos='https://cloud.r-project.org')" \

# See more in SPARK-39735
ENV R_LIBS_SITE "/usr/local/lib/R/site-library:${R_LIBS_SITE}:/usr/lib/R/library"

# Install Python 3.9
RUN add-apt-repository ppa:deadsnakes/ppa
RUN apt-get update && apt-get install -y python3.9 python3.9-distutils \
&& rm -rf /var/lib/apt/lists/*
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.9

RUN python3.9 -m pip install \
'black==23.9.1' \
'flake8==3.9.0' \
'googleapis-common-protos-stubs==2.2.0' \
'grpc-stubs==1.24.11' \
'grpcio-status==1.67.0' \
'grpcio==1.67.0' \
'ipython' \
'ipython_genutils' \
'jinja2' \
'matplotlib' \
'mypy==1.8.0' \
'numpy==2.0.2' \
'numpydoc' \
'pandas' \
'pandas-stubs==1.2.0.53' \
'plotly>=4.8' \
'pyarrow>=18.0.0' \
'pytest-mypy-plugins==1.9.3' \
'pytest==7.1.3' \
&& python3.9 -m pip install torch torchvision --index-url https://download.pytorch.org/whl/cpu \
&& python3.9 -m pip install torcheval \
&& python3.9 -m pip cache purge
2 changes: 1 addition & 1 deletion python/pyspark/sql/connect/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ def __repr__(self) -> str:
# is sightly different:
# java.time.Duration only applies HOURS, MINUTES, SECONDS units,
# while Pandas applies all supported units.
return pd.Timedelta(delta).isoformat() # type: ignore[attr-defined]
return pd.Timedelta(delta).isoformat()
return f"{self._value}"


Expand Down
2 changes: 1 addition & 1 deletion python/pyspark/sql/pandas/conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ def convert_timestamp(value: Any) -> Any:
else:
return (
pd.Timestamp(value)
.tz_localize(timezone, ambiguous=False) # type: ignore
.tz_localize(timezone, ambiguous=False)
.tz_convert(_get_local_timezone())
.tz_localize(None)
.to_pydatetime()
Expand Down

0 comments on commit e29db6e

Please sign in to comment.