-
-
Notifications
You must be signed in to change notification settings - Fork 110
/
Makefile
192 lines (165 loc) · 7.19 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
gcs_cache_path := --gcs-cache-path=gs://zenodo-cache.catalyst.coop
covargs := --append
pytest_args := --durations 20 ${gcs_cache_path} --cov-fail-under=0
etl_fast_yml := src/pudl/package_data/settings/etl_fast.yml
etl_full_yml := src/pudl/package_data/settings/etl_full.yml
# We use mamba locally, but micromamba in CI, so choose the right binary:
ifdef GITHUB_ACTIONS
mamba := micromamba
else
mamba := mamba
endif
# Tell make to look in the environments and output directory for targets and sources.
VPATH = environments:${PUDL_OUTPUT}
########################################################################################
# Targets for starting up interactive web-interfaces
# Note that these commands do not return until you quit out of the server with ^C
########################################################################################
.PHONY: dagster
dagster:
dagster-webserver
.PHONY: jlab
jlab:
jupyter lab --no-browser
########################################################################################
# Conda lockfile generation and environment creation
########################################################################################
# Remove pre-existing conda lockfile and rendered environment files
.PHONY: conda-clean
conda-clean:
rm -f environments/conda-*lock.yml
# Regenerate the conda lockfile and render platform specific conda environments.
conda-lock.yml: pyproject.toml
${mamba} run --name base ${mamba} install --quiet --yes "conda-lock>=2.5.7" prettier
${mamba} run --name base conda-lock \
--${mamba} \
--file=pyproject.toml \
--lockfile=environments/conda-lock.yml
cd environments && ${mamba} run --name base conda-lock render \
--kind env \
--dev-dependencies \
conda-lock.yml
${mamba} run --name base prettier --write environments/*.yml
# Create the pudl-dev conda environment based on the universal lockfile
.PHONY: pudl-dev
pudl-dev:
${mamba} run --name base ${mamba} install --quiet --yes "conda-lock>=2.5.7"
# Only attempt to remove the pudl-dev environment if it already exists.
if ${mamba} env list | grep -q pudl-dev; then \
${mamba} env remove --quiet --yes --name pudl-dev; \
fi
${mamba} run --name base conda-lock install \
--name pudl-dev \
--${mamba} \
--dev environments/conda-lock.yml
echo "To activate the fresh environment run: mamba activate pudl-dev"
.PHONY: install-pudl
install-pudl: pudl-dev
${mamba} run --name pudl-dev pip install --no-cache-dir --no-deps --editable .
echo "To activate the fresh environment run: mamba activate pudl-dev"
########################################################################################
# Build documentation for local use or testing
########################################################################################
.PHONY: docs-clean
docs-clean:
rm -rf docs/_build
rm -rf docs/autoapi
rm -f docs/data_dictionaries/pudl_db.rst
rm -f docs/data_dictionaries/codes_and_labels.rst
rm -rf docs/data_dictionaries/code_csvs
rm -f docs/data_sources/eia*.rst
rm -f docs/data_sources/epacems*.rst
rm -f docs/data_sources/ferc*.rst
rm -f docs/data_sources/gridpathratoolkit*.rst
rm -f docs/data_sources/phmsagas*.rst
# Note that there's some PUDL code which only gets run when we generate the docs, so
# we want to generate coverage from the docs build.
.PHONY: docs-build
docs-build: docs-clean
doc8 docs/ README.rst
coverage run ${covargs} -- ${CONDA_PREFIX}/bin/sphinx-build --jobs auto -v -W -b html docs docs/_build/html
########################################################################################
# Running the Full ETL
# NOTE: these commands will clobber your existing databases, and may take an hour or
# more to run.
########################################################################################
# Extract all FERC DBF and XBRL data to SQLite.
.PHONY: ferc
ferc:
rm -f ${PUDL_OUTPUT}/ferc*.sqlite
rm -f ${PUDL_OUTPUT}/ferc*_xbrl_datapackage.json
rm -f ${PUDL_OUTPUT}/ferc*_xbrl_taxonomy_metadata.json
coverage run ${covargs} -- src/pudl/ferc_to_sqlite/cli.py ${gcs_cache_path} ${etl_full_yml}
# Remove the existing PUDL DB if it exists.
# Create a new empty DB using alembic.
# Run the full PUDL ETL.
.PHONY: pudl
pudl:
rm -f ${PUDL_OUTPUT}/pudl.sqlite
alembic upgrade head
coverage run ${covargs} -- src/pudl/etl/cli.py ${gcs_cache_path} ${etl_full_yml}
########################################################################################
# Targets that are coordinated by pytest -- mostly they're actual tests.
########################################################################################
.PHONY: pytest-unit
pytest-unit:
pytest ${pytest_args} --doctest-modules src/pudl test/unit
.PHONY: pytest-integration
pytest-integration:
pytest ${pytest_args} --etl-settings ${etl_fast_yml} test/integration
.PHONY: coverage-erase
coverage-erase:
coverage erase
.PHONY: pytest-ci
pytest-ci: pytest-unit pytest-integration
.PHONY: pytest-coverage
pytest-coverage: coverage-erase docs-build pytest-ci
coverage report
.PHONY: pytest-integration-full
pytest-integration-full:
pytest ${pytest_args} -n 4 --no-cov --live-dbs --etl-settings ${etl_full_yml} test/integration
.PHONY: pytest-validate
pytest-validate:
pudl_check_fks
pytest ${pytest_args} -n 4 --no-cov --live-dbs test/validate
# Run the full ETL, generating new FERC & PUDL SQLite DBs and EPA CEMS Parquet files.
# Then run the full integration tests and data validations on all years of data.
# NOTE: This will clobber your existing databases and takes hours to run!!!
# Backgrounding the data validation and integration tests and using wait allows them to
# run in parallel.
.PHONY: nuke
nuke: coverage-erase docs-build pytest-unit ferc pudl
pudl_check_fks
pytest ${pytest_args} -n 4 --live-dbs --etl-settings ${etl_full_yml} test/integration
pytest ${pytest_args} -n 4 --live-dbs test/validate
coverage report
# Check that designated Jupyter notebooks can be run against the current DB
.PHONY: pytest-jupyter
pytest-jupyter:
pytest --live-dbs test/integration/jupyter_notebooks_test.py
# Compare actual and expected number of rows in many tables. This will run any test
# whose name contains "minmax_rows" so it's important to follow that naming convention.
.PHONY: pytest-minmax-rows
pytest-minmax-rows:
pytest -n 8 --no-cov --live-dbs test/validate -k minmax_rows
# Build the FERC 1 and PUDL DBs, ignoring foreign key constraints.
# Identify any plant or utility IDs in the DBs that haven't yet been mapped
# NOTE: This probably needs to be turned into a script of some kind not a test.
# In particular, building these DBs without checking FK constraints, in a location
# where they aren't going to clobber existing user DBs
unmapped-ids:
pytest \
--save-unmapped-ids \
--ignore-foreign-key-constraints \
--etl-settings ${etl_full_yml} \
test/integration/glue_test.py
########################################################################################
# Continuous Integration Tests
########################################################################################
.PHONY: pre-commit
pre-commit:
pre-commit run --all-files
# This target will run all the tests that typically take place in our continuous
# integration tests on GitHub (with the exception building our docker container).
.PHONY: ci
ci: pre-commit pytest-coverage