-
Notifications
You must be signed in to change notification settings - Fork 17
211 lines (172 loc) · 7 KB
/
ci-s3.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
name: Build and Upload geth Binary to s3
on:
push:
branches:
- main
paths-ignore:
- '.github/**'
pull_request:
branches:
- main
paths-ignore:
- '.github/**'
workflow_dispatch:
permissions:
id-token: write
contents: write
pull-requests: write
actions: write
env:
NUM_INTERNAL_BINARIES_TO_KEEP: 50
NUM_PUBLIC_BINARIES_TO_KEEP: 400
S3_BUCKET: story-geth-binaries
jobs:
# Add timestamp
Timestamp:
uses: storyprotocol/gha-workflows/.github/workflows/reusable-timestamp.yml@main
# Build and upload the geth binary
build_and_push:
needs: Timestamp
runs-on: ubuntu-latest
strategy:
matrix:
platform:
[linux-amd64, linux-arm64, darwin-amd64, darwin-arm64]
steps:
- name: Checkout code
uses: actions/[email protected]
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.22.0'
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Extract the version
run: |
PARAMS_FILE="./params/version.go"
VERSION_MAJOR=$(awk -F= '/VersionMajor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_MINOR=$(awk -F= '/VersionMinor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_PATCH=$(awk -F= '/VersionPatch/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_META=$(awk -F\" '/VersionMeta/ {print $2; exit}' $PARAMS_FILE)
# Construct the full version string
VERSION="$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH"
if [ "$VERSION_META" != "stable" ]; then
VERSION+="-${VERSION_META}"
fi
echo "Version extracted: $VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "VERSION_META=$VERSION_META" >> $GITHUB_ENV
- name: Build the geth binary
run: |
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
output_name=./build/bin/geth
if [ "$GOOS" = "windows" ]; then
output_name+='.exe'
fi
echo "Building for $GOOS/$GOARCH..."
env GOOS=$GOOS GOARCH=$GOARCH go build -o $output_name ./cmd/geth
if [ $? -ne 0 ]; then
echo "Build failed!"
exit 1
fi
if [ ! -f "$output_name" ]; then
echo "Geth binary not found!"
exit 1
fi
# Apply chmod only for non-windows builds
if [ "$GOOS" != "windows" ]; then
chmod +x "$output_name"
fi
- name: Upload the geth binary to S3
run: |
export TZ=America/Los_Angeles
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
TIMESTAMP=$(date +%Y%m%d%H%M%S)
echo "Timestamp: $TIMESTAMP"
HUMAN_READABLE_VERSION=$(date)
COMMIT_HASH=$(git rev-parse --short HEAD)
FOLDER_NAME="geth-${{ matrix.platform }}-${VERSION}-${COMMIT_HASH}"
ARCHIVE_NAME="${FOLDER_NAME}.tar.gz"
PUBLIC_DOWNLOAD_URL="https://$S3_BUCKET.s3.us-west-1.amazonaws.com/geth-public/$ARCHIVE_NAME"
binary_name=./build/bin/geth
if [ "$GOOS" = "windows" ]; then
binary_name+='.exe'
fi
# For linux amd64 upload the binary for internal testing
if [ "${{ matrix.platform }}" = "linux-amd64" ]; then
echo "Uploading binary for internal use..."
aws s3 cp $binary_name s3://$S3_BUCKET/geth/geth-$TIMESTAMP --quiet
# Update manifest file for linux-amd64 builds only
aws s3 cp s3://$S3_BUCKET/geth/manifest.txt manifest.txt --quiet || touch manifest.txt
echo "$TIMESTAMP" >> manifest.txt
aws s3 cp manifest.txt s3://$S3_BUCKET/geth/manifest.txt --quiet
# Update version file
aws s3 cp s3://$S3_BUCKET/geth-public/version.txt version.txt --quiet || printf "File Name\t\t\tVerison\t\t\t\tCommit Hash\t\tTimestamp\n" > version.txt
if [ "${VERSION_META}" != "stable" ]; then
printf "$VERSION-$COMMIT_HASH\t\t$VERSION\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
else
printf "$VERSION-$COMMIT_HASH\t\t\t$VERSION\t\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
fi
aws s3 cp version.txt s3://$S3_BUCKET/geth-public/version.txt --quiet
fi
mkdir $FOLDER_NAME
mv $binary_name $FOLDER_NAME/
echo "Archiving the geth binary..."
tar -czvf $ARCHIVE_NAME $FOLDER_NAME
if [ $? -ne 0 ]; then
echo "Failed to create the archive: $ARCHIVE_NAME"
exit 1
fi
echo "Uploading $ARCHIVE_NAME to S3..."
aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/geth-public/$ARCHIVE_NAME --quiet
if [ $? -ne 0 ]; then
echo "Failed to upload $ARCHIVE_NAME to S3!"
exit 1
fi
echo "COMMIT_HASH=$COMMIT_HASH" >> $GITHUB_ENV
echo "PUBLIC_DOWNLOAD_URL=$PUBLIC_DOWNLOAD_URL" >> $GITHUB_ENV
- name: Add binary version back to PR
if: matrix.platform == 'linux-amd64'
uses: mshick/add-pr-comment@v2
with:
message: |
### Binary uploaded successfully 🎉
📦 **Version Name:** ${{ env.VERSION }}-${{ env.COMMIT_HASH }}
📦 **Download Source:** [AWS S3](${{ env.PUBLIC_DOWNLOAD_URL }})
cleanup:
runs-on: ubuntu-latest
needs: build_and_push
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::478656756051:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Cleanup old binaries
run: |
cleanup_s3() {
PREFIX=$1
KEEP=$2
echo "Cleaning up in bucket $S3_BUCKET with prefix: $PREFIX, keeping latest $KEEP binaries"
aws s3api list-objects-v2 --bucket $S3_BUCKET --prefix $PREFIX --query "sort_by(Contents,&LastModified)[*].Key" > all_binaries.json
# Extract the list of keys, remove the latest $KEEP binaries
BINARIES_TO_DELETE=$(jq -r ".[0:-${KEEP}][]" all_binaries.json)
if [ -n "$BINARIES_TO_DELETE" ]; then
# Delete old binaries
for key in $BINARIES_TO_DELETE; do
aws s3 rm s3://$S3_BUCKET/$key --quiet
done
echo "Deleted old binaries: $BINARIES_TO_DELETE"
else
echo "No old binaries to delete."
fi
}
# Cleanup internal geth binaries
cleanup_s3 "geth/" "${NUM_INTERNAL_BINARIES_TO_KEEP}"
# Cleanup public geth binaries
cleanup_s3 "geth-public/" "${NUM_PUBLIC_BINARIES_TO_KEEP}"