-
Notifications
You must be signed in to change notification settings - Fork 7
212 lines (178 loc) · 7.16 KB
/
actions_s3_update_gallery.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
name: Actions - Update Actions to S3 Gallery
on:
workflow_dispatch:
# Enables manual triggering via GitHub Actions
push:
branches:
- main
paths:
- 'actions/**'
jobs:
build_packages:
runs-on: ubuntu-latest
permissions:
id-token: write # required by AWS aws-actions/configure-aws-credentials
contents: read
env:
RCC_VERSION: v18.1.1
outputs:
has_packages: ${{ steps.check_artifacts.outputs.has_packages }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check if manifest.json exists
run: |
if curl --head --silent --fail https://cdn.sema4.ai/gallery/actions/manifest.json; then
echo "manifest.json exists. Proceeding with the workflow."
else
echo "manifest.json does not exist. Stopping the workflow."
exit 1
fi
- name: Build updated packages
run: |
cd bin
curl -L -o rcc "https://cdn.sema4.ai/rcc/releases/${{ env.RCC_VERSION }}/linux64/rcc"
chmod +x rcc
./rcc run -r publisher/robot.yaml -t "Build updated packages"
- name: Save Gallery artifact
uses: actions/upload-artifact@v4
with:
name: gallery-artifact
path: bin/publisher/gallery
- name: Check artifacts
id: check_artifacts
run: |
if find bin/publisher/gallery -mindepth 1 -maxdepth 1 | read; then
echo "has_packages=true" >> $GITHUB_OUTPUT
fi
build_environments:
needs: build_packages
if: ${{ needs.build_packages.outputs.has_packages == 'true' }}
runs-on: ${{ matrix.os }}
env:
RCC_VERSION: v18.1.1
strategy:
fail-fast: false
matrix:
name:
- ubuntu
- windows
- macos
include:
- name: ubuntu
os: ubuntu-latest
rcc_folder: linux64
- name: windows
os: windows-latest
rcc_folder: windows64
- name: macos
os: macos-latest
rcc_folder: macos64
outputs:
has_environments: ${{ steps.check_artifacts.outputs.has_environments }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download Gallery artifact
uses: actions/download-artifact@v4
with:
name: gallery-artifact
path: bin/publisher/gallery
- name: Build Environments [Linux and Mac]
if: ${{ matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest' }}
run: |
ls -lh bin/publisher/gallery
cd bin
curl -L -o rcc "https://cdn.sema4.ai/rcc/releases/${{ env.RCC_VERSION }}/${{ matrix.rcc_folder }}/rcc"
chmod +x rcc
# Please note that the script will only build environments for packages already existing in the gallery folder.
./rcc run -r publisher/robot.yaml -t "Build environments"
- name: Build Environments [Windows]
if: ${{ matrix.os == 'windows-latest' }}
run: |
Set-Location -Path bin
Invoke-WebRequest -Uri "https://cdn.sema4.ai/rcc/releases/${{ env.RCC_VERSION }}/${{ matrix.rcc_folder }}/rcc.exe" -OutFile rcc.exe
icacls .\rcc.exe /grant Everyone:F
# Please note that the script will only build environments for packages already existing in the gallery folder.
Invoke-Expression -Command ".\rcc.exe run -r publisher/robot.yaml -t `"Build environments`""
- name: Save environments artifact
uses: actions/upload-artifact@v4
with:
name: environments-artifact-${{ matrix.os }}
path: bin/publisher/environments
- name: Check artifacts
if: ${{ matrix.os == 'ubuntu-latest' }}
id: check_artifacts
run: |
# Note that it's enough to only do it once (on single OS), running it on all 3 would result
# in redundant operations.
if find bin/publisher/environments -mindepth 1 -maxdepth 1 | read; then
echo "has_environments=true" >> $GITHUB_OUTPUT
fi
publish:
runs-on: ubuntu-latest
needs: [build_packages, build_environments]
if: ${{ needs.build_packages.outputs.has_packages == 'true' || needs.build_environments.outputs.has_environments }}
permissions:
id-token: write # required by AWS aws-actions/configure-aws-credentials
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download Gallery artifact
uses: actions/download-artifact@v4
with:
name: gallery-artifact
path: bin/publisher/gallery
- name: Download Environments artifact
uses: actions/download-artifact@v4
with:
pattern: environments-artifact-*
path: bin/publisher/environments
merge-multiple: true
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: eu-west-1
role-to-assume: arn:aws:iam::710450854638:role/github-action-gallery
- name: Publish environments
if: ${{ needs.build_environments.outputs.has_environments == 'true' }}
run: |
S3_BASE_URL="s3://downloads.robocorp.com/holotree/sema4ai"
cd bin/publisher/environments
ls -la
# Copy the environments
# cache-control max-age=31536000, because these should be immutable
for file in *; do
aws s3 cp "$file" "$S3_BASE_URL/$file" --cache-control max-age=31536000
returnCode=$?
if [ $returnCode -ne 0 ]; then
echo "Uploading of '$file' environments failed, exiting..."
exit 1
fi
done
- name: Publish packages
if: ${{ needs.build_packages.outputs.has_packages == 'true' }}
run: |
echo "Updating Gallery S3"
S3_BASE_URL="s3://downloads.robocorp.com/gallery/actions"
cd bin/publisher/gallery
ls -la
# Copy the Action Package subdirectories. Note that since this is an update workflow,
# subdirectories will only contain packages which version's was actually bumped.
# cache-control max-age=31536000, because these should be immutable.
for dir in */; do
aws s3 cp "$dir" "$S3_BASE_URL/$dir" --recursive --cache-control max-age=31536000
returnCode=$?
# If the upload of any package fails, we want to break the workflow, as to not update the manifest,
# which should be the only source of truth.
# Even if some packages will be uploaded before other one fails, manifest won't include them,
# and therefore they won't be available in clients to download, and will be eligible to re-upload
# on the next Gallery update run.
if [ $returnCode -ne 0 ]; then
echo "Upload of '$dir' package failed, exiting..."
exit 1
fi
done
# Copy the updated manifest
aws s3 cp manifest.json $S3_BASE_URL/manifest.json --cache-control max-age=120 --content-type "text/plain"