forked from kubeflow/spark-operator
-
Notifications
You must be signed in to change notification settings - Fork 0
188 lines (156 loc) · 5.43 KB
/
main.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
name: Pre-commit checks
on:
pull_request:
branches:
- master
push:
branches:
- master
jobs:
build-api-docs:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v2
with:
fetch-depth: "0"
- name: The API should not change once published
run: |
if ! git diff --quiet origin/master -- pkg/apis/sparkoperator.k8s.io/v1beta1; then
echo "sparkoperator.k8s.io/v1beta1 api has changed"
false
fi
if ! git diff --quiet origin/master -- pkg/apis/sparkoperator.k8s.io/v1beta2; then
echo "sparkoperator.k8s.io/v1beta2 api has changed"
false
fi
- name: The API documentation hasn't changed
run: |
make build-api-docs
if ! git diff --quiet -- docs/api-docs.md; then
echo "Need to re-run 'make build-api-docs' and commit the changes"
git diff -- docs/api-docs.md;
false
fi
build-sparkctl:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v2
with:
fetch-depth: "0"
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- name: build sparkctl
run: |
make all
build-spark-operator:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v2
with:
fetch-depth: "0"
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- name: Run gofmt check
run: make fmt-check
- name: Run static analysis
run: make static-analysis
- name: Run unit tests
run: make unit-test
- name: Build Spark-Operator Docker Image
run: |
docker build -t gcr.io/spark-operator/spark-operator:latest .
- name: Check changes in resources used in docker file
run: |
DOCKERFILE_RESOURCES=$(cat Dockerfile | grep -P -o "COPY [a-zA-Z0-9].*? " | cut -c6-)
for resource in $DOCKERFILE_RESOURCES; do
# If the resource is different
if ! git diff --quiet origin/master -- $resource; then
## And the appVersion hasn't been updated
if ! git diff origin/master -- charts/spark-operator-chart/Chart.yaml | grep +appVersion; then
echo "resource used in gcr.io/spark-operator/spark-operator has changed in $resource, need to update the appVersion in charts/spark-operator-chart/Chart.yaml"
git diff origin/master -- $resource;
echo "failing the build... " && false
fi
fi
done
build-helm-chart:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v2
with:
fetch-depth: "0"
- name: Install Helm
uses: azure/setup-helm@v1
with:
version: v3.7.1
- name: Produce the helm documentation
run: |
make helm-docs
if ! git diff --quiet -- charts/spark-operator-chart/README.md; then
echo "Need to re-run 'make helm-docs' and commit the changes"
false
fi
- name: Set up chart-testing
uses: helm/[email protected]
- name: Print chart-testing version information
run: ct version
- name: Run chart-testing (lint)
run: ct lint
- name: Run chart-testing (list-changed)
id: list-changed
run: |
changed=$(ct list-changed)
if [[ -n "$changed" ]]; then
echo "::set-output name=changed::true"
fi
- name: Detect CRDs drift between chart and manifest
run: make detect-crds-drift
- name: setup minikube
uses: manusa/[email protected]
with:
minikube version: "v1.24.0"
kubernetes version: "v1.20.8"
start args: --memory 6g --cpus=2 --addons ingress
github token: ${{ inputs.github-token }}
- name: Run chart-testing (install)
run: |
docker build -t ghcr.io/googlecloudplatform/spark-operator:local .
minikube image load ghcr.io/googlecloudplatform/spark-operator:local
ct install
integration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout source code
uses: actions/checkout@v2
with:
fetch-depth: "0"
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version-file: "go.mod"
- name: setup minikube
uses: manusa/[email protected]
with:
minikube version: "v1.24.0"
kubernetes version: "v1.20.8"
start args: --memory 6g --cpus=2 --addons ingress
github token: ${{ inputs.github-token }}
- name: Build local spark-operator docker image for minikube testing
run: |
docker build -t gcr.io/spark-operator/spark-operator:local .
minikube image load gcr.io/spark-operator/spark-operator:local
# The integration tests are currently broken see: https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/issues/1416
# - name: Run chart-testing (integration test)
# run: make integation-test
- name: Setup tmate session
if: failure()
uses: mxschmitt/action-tmate@v3
timeout-minutes: 15