-
Notifications
You must be signed in to change notification settings - Fork 163
248 lines (202 loc) · 9.85 KB
/
proton_run.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
# This is a workflow created to run based on a commit made by AWS Proton
# It only works if there is only one resource modified as part of the commit.
name: 'proton-run'
on:
pull_request:
types:
- opened
- reopened
paths:
- '**/.proton/deployment-metadata.json'
push:
branches:
- main
paths:
- '**/.proton/deployment-metadata.json'
jobs:
get-deployment-data:
name: Get Deployment Data
runs-on: ubuntu-latest
outputs:
role_arn: ${{ steps.get-data.outputs.role_arn }}
environment: ${{ steps.get-data.outputs.environment }}
resource_arn: ${{ steps.get-data.outputs.resource_arn }}
working_directory: ${{ steps.get-data.outputs.working_directory }}
deployment_id: ${{ steps.get-data.outputs.deployment_id }}
target_region: ${{ steps.get-data.outputs.target_region }}
proton_region: ${{ steps.get-data.outputs.proton_region }}
state_bucket: ${{ steps.get-data.outputs.state_bucket }}
permissions:
id-token: write
contents: read
continue-on-error: true
steps:
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v2
- name: Get changed files
id: files
uses: jitterbit/get-changed-files@v1
- name: Find modified resource
id: find-modified
run: |
found=false
for changed_file in ${{ steps.files.outputs.all }}; do
if [[ "$changed_file" == *".proton/deployment-metadata.json" ]]; then
echo "found file"
if [[ "$found" == true ]]; then
echo "More than one resource found to have a new deployment, I'm not sure which one to update, exiting."
exit 1
fi
echo "setting found to true"
found=true
echo "setting outputs"
echo "::set-output name=deployment-metadata-path::$changed_file"
fi
done
if [[ "$found" == false ]]; then
echo "No change made to deployment-metadata.json, exiting"
exit 1
fi
- name: Get data
id: get-data
run: |
modified_resource_arn=$(jq -r '.resourceMetadata.arn' ${{ steps.find-modified.outputs.deployment-metadata-path }})
echo "::set-output name=resource_arn::$modified_resource_arn"
IFS=':'
read -a split_arn <<< "$modified_resource_arn"
proton_region=${split_arn[3]}
echo "::set-output name=proton_region::$proton_region"
deployment_id=$(jq -r '.deploymentId' ${{ steps.find-modified.outputs.deployment-metadata-path }})
echo "::set-output name=deployment_id::$deployment_id"
if [[ "$modified_resource_arn" == *":environment/"* ]]; then
environment_name=${modified_resource_arn##*/}
working_directory="$environment_name/"
elif [[ "$modified_resource_arn" == *"/service-instance/"* ]]; then
environment_arn=$(jq -r '.resourceMetadata.environmentArn' ${{ steps.find-modified.outputs.deployment-metadata-path }})
environment_name=${environment_arn##*/}
resource_portion=${modified_resource_arn##*:}
IFS='/'
read -a split_resources <<< "$resource_portion"
service_name=${split_resources[1]}
instance_name=${split_resources[3]}
working_directory=$environment_name/$service_name-$instance_name/
elif [[ "$modified_resource_arn" == *"/pipeline"* ]]; then
environment_name="pipeline"
resource_portion=${modified_resource_arn##*:}
IFS='/'
read -a split_resources <<< "$resource_portion"
service_name=${split_resources[1]}
working_directory=pipeline/$service_name
fi
if [[ $(jq -r --arg env $environment_name 'has($env)' env_config.json) = "false" ]]; then
echo "Missing $env from env_config.json, existing"
exit 1
fi
echo "::set-output name=working_directory::$working_directory"
echo "::set-output name=environment::$environment_name"
role_arn=$(jq -r --arg env $environment_name '.[$env]["role"]' env_config.json)
echo "::set-output name=role_arn::$role_arn"
target_region=$(jq -r --arg env $environment_name '.[$env]["region"]' env_config.json)
echo "::set-output name=target_region::$target_region"
state_bucket=$(jq -r --arg env $environment_name '.[$env]["state_bucket"]' env_config.json)
echo "::set-output name=state_bucket::$state_bucket"
terraform:
name: 'Terraform'
needs: get-deployment-data
runs-on: ubuntu-latest
environment: ${{ needs.get-deployment-data.outputs.environment }}
outputs:
tf_outputs: ${{ steps.tf_get_outputs.outputs.tf_outputs }}
permissions:
id-token: write
contents: read
defaults:
run:
working-directory: ${{ needs.get-deployment-data.outputs.working_directory }}
shell: bash # Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest
if: needs.get-deployment-data.result == 'success'
continue-on-error: true
steps:
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v2
- name: Configure AWS Credentials
id: assume_role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-region: ${{ needs.get-deployment-data.outputs.target_region }}
role-to-assume: ${{ needs.get-deployment-data.outputs.role_arn }}
role-session-name: TF-Github-Actions
# Install the latest version of Terraform CLI and configure the Terraform CLI configuration file with a Terraform Cloud user API token
- name: Setup Terraform
id: tf_setup
uses: hashicorp/setup-terraform@v1
with:
terraform_version: 1.0.7
terraform_wrapper: false
# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc.
- name: Terraform Init
id: tf_init
run: terraform init -backend-config="bucket=${{ needs.get-deployment-data.outputs.state_bucket }}" -backend-config="key=${{ needs.get-deployment-data.outputs.working_directory }}terraform.tfstate" -backend-config="region=${{ needs.get-deployment-data.outputs.target_region }}"
# Checks that all Terraform configuration files adhere to a canonical format
- name: Terraform Format
id: tf_fmt
run: terraform fmt -diff -check
# Generates an execution plan for Terraform
- name: Terraform Plan
id: tf_plan
run: terraform plan -var="aws_region=${{ needs.get-deployment-data.outputs.target_region }}"
# On push to main, build or change infrastructure according to Terraform configuration files
# Note: It is recommended to set up a required "strict" status check in your repository for "Terraform Cloud". See the documentation on "strict" required status checks for more information: https://help.github.com/en/github/administering-a-repository/types-of-required-status-checks
- name: Terraform Apply
id: tf_apply
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
run: terraform apply -auto-approve -var="aws_region=${{ needs.get-deployment-data.outputs.target_region }}"
- name: Terraform Get Outputs
id: tf_get_outputs
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
run: |
# Get outputs as json
outputs_json=$(terraform output -json)
# The outputs parameters expects a list of key=keyName,valueString=value key=key2Name,valueString=value2 etc...
# So here we convert the output json into a shell array
# NOTE: This will probably not play nicely with complex output objects (non primitives)
formatted_outputs=( $(echo $outputs_json | jq -r "to_entries|map(\"key=\(.key),valueString=\(.value.value|tostring)\")|.[]") )
echo "::set-output name=tf_outputs::$formatted_outputs"
notify-proton:
name: 'Notify Proton'
needs:
- get-deployment-data
- terraform
runs-on: ubuntu-latest
environment: ${{ needs.get-deployment-data.outputs.environment }}
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
permissions:
id-token: write
contents: read
steps:
- name: Configure AWS Credentials
id: assume_role
uses: aws-actions/configure-aws-credentials@v1
with:
aws-region: ${{ needs.get-deployment-data.outputs.proton_region }}
role-to-assume: ${{ needs.get-deployment-data.outputs.role_arn }}
role-session-name: TF-Github-Actions-Notify-Proton
# This is a temporary measure until this feature exits Public Preview
- name: Install Proton Model
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
run: |
aws s3 cp s3://aws-proton-preview-public-files/model/proton-2020-07-20.normal.json .
- name: Notify Proton Success
id: notify_success
if: needs.terraform.result == 'success'
run: |
# Notify proton
aws proton notify-resource-deployment-status-change --region ${{ needs.get-deployment-data.outputs.proton_region }} --resource-arn ${{ needs.get-deployment-data.outputs.resource_arn }} --status SUCCEEDED --deployment-id ${{ needs.get-deployment-data.outputs.deployment_id }} --outputs ${{ needs.terraform.outputs.tf_outputs }}
echo "Notify success!"
- name: Notify Proton Failure
if: needs.terraform.result == 'failure' || needs.terraform.result == 'cancelled'
run: |
aws proton notify-resource-deployment-status-change --region ${{ needs.get-deployment-data.outputs.proton_region }} --resource-arn ${{ needs.get-deployment-data.outputs.resource_arn }} --status FAILED --deployment-id ${{ needs.get-deployment-data.outputs.deployment_id }}
echo "Notify failure!"