-
Notifications
You must be signed in to change notification settings - Fork 19
143 lines (117 loc) · 3.98 KB
/
ci-cd-dev.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
name: CI-CD-DEV
on:
pull_request:
types:
- opened
- reopened
- synchronize
- closed
branches:
- master
jobs:
pre-commit:
runs-on: ubuntu-latest
if: >-
github.event.pull_request.merged == false &&
github.event.pull_request.state == 'open'
steps:
- uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8
- id: file_changes
uses: trilom/[email protected]
with:
output: " "
- uses: pre-commit/[email protected]
env:
extra_args: --color=always --files ${{ steps.file_changes.outputs.files}}
tests:
runs-on: ubuntu-latest
needs: [pre-commit]
steps:
- uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install dependencies
run: |
cat airflow_variables_dev.json | sed -e s/\\/home\\/airflow\\/gcs\\/dags\\/// > airflow_variables_ci.json
python -m pip install --upgrade pip
pip install -r requirements-ci.txt
- name: Init Airflow SQLite database
run: airflow db init
- name: Import Airflow variables
run: airflow variables import airflow_variables_ci.json
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"
- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-2-5f1f2dbf-gke"
location: "us-central1"
- name: Pytest
run: pytest dags/
deploy-to-dev:
runs-on: ubuntu-latest
needs: [tests]
# deploy to dev occurs every time
# someone submits a pr targeting `master`
# from a branch at `stellar/stellar-etl-airflow` repo
if: github.repository == 'stellar/stellar-etl-airflow'
# known caveats:
# if there's more than 1 person working
# in the same file this won't behave nicely
steps:
- uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install dependencies
run: |
pip install --upgrade pip
pip install google-cloud-storage==2.1.0
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"
- name: Upload files to dev GCS bucket
run: python dags/stellar_etl_airflow/add_files_to_composer.py --bucket $BUCKET
env:
GOOGLE_CLOUD_PROJECT: test-hubble-319619
BUCKET: us-central1-test-hubble-2-5f1f2dbf-bucket
- name: Update Airflow variables
uses: actions-hub/gcloud@master
env:
PROJECT_ID: test-hubble-319619
APPLICATION_CREDENTIALS: "${{ secrets.CREDS_TEST_HUBBLE }}"
COMPOSER_ENVIRONMENT: test-hubble-2
LOCATION: us-central1
with:
args: >
components install kubectl && gcloud composer environments run
$COMPOSER_ENVIRONMENT --location $LOCATION variables import
-- gcsfuse/actual_mount_path/variables.json
promote-to-prod:
runs-on: ubuntu-latest
# deploy only occurs when pr is merged
if: github.event.pull_request.merged == true
permissions:
pull-requests: write
steps:
- uses: actions/checkout@v3
- name: Create pull request
run: >
gh pr create
--base release
--head master
--title "[PRODUCTION] Update production Airflow environment"
--body "This PR was auto-generated by GitHub Actions.
After merged and closed, this PR will trigger an action that updates DAGs, libs and schemas files from prod Airflow."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}