From ce56db0ca36b0ea1b935182affb3a752b1f4a8b4 Mon Sep 17 00:00:00 2001 From: Dazhong Xia Date: Wed, 6 Dec 2023 11:08:47 -0500 Subject: [PATCH] Reorganize contributing docs + add process description. (#3044) * Reorganize contributing docs + add process description. * Update conda-lock.yml and rendered conda environment files. * Update for PR comments --------- Co-authored-by: jdangerx --- .github/ISSUE_TEMPLATE/new_dataset.md | 30 ++++ .github/pull_request_template.md | 52 ++----- .github/workflows/build-deploy-pudl.yml | 1 + CONTRIBUTING.rst | 100 ++++++++++++ docs/CONTRIBUTING.rst | 146 +++++++----------- environments/conda-linux-64.lock.yml | 8 +- environments/conda-lock.yml | 112 +++++++------- environments/conda-osx-64.lock.yml | 8 +- environments/conda-osx-arm64.lock.yml | 8 +- .../resources/ferc1_eia_record_linkage.py | 4 +- 10 files changed, 272 insertions(+), 197 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/new_dataset.md create mode 100644 CONTRIBUTING.rst diff --git a/.github/ISSUE_TEMPLATE/new_dataset.md b/.github/ISSUE_TEMPLATE/new_dataset.md new file mode 100644 index 0000000000..f66566062c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new_dataset.md @@ -0,0 +1,30 @@ +--- +name: New dataset +about: Provide information about a new dataset you'd like to see in PUDL +title: '' +labels: new-data +assignees: '' +--- + +### Overview + +What is this dataset? + +Why do you want it in PUDL? + +Is it already partially in PUDL, or do we need to start from scratch? + +### Logistics + +Is this dataset publically available? + +Where can one download the actual data? + +How often does this dataset get updated? + +What licensing restrictions apply? + +### What do you know about it so far? + +What have you done with this dataset so far? Have you run into any problems with +it yet? diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 325f1bcb8a..24e2d667aa 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,49 +1,25 @@ +# Overview -# PR Overview +Closes #XXXX. - +# Testing -# PR Checklist +How did you make sure this worked? How can a reviewer verify this? -- [ ] Merge the most recent version of the branch you are merging into (probably `dev`). -- [ ] All CI checks are passing. [Run tests locally to debug failures](https://catalystcoop-pudl.readthedocs.io/en/latest/dev/testing.html#running-tests-with-tox) -- [ ] Make sure you've included good docstrings. +```[tasklist] +# To-do list +- [ ] Make sure full ETL runs & `make pytest-integration-full` passes locally - [ ] For major data coverage & analysis changes, [run data validation tests](https://catalystcoop-pudl.readthedocs.io/en/latest/dev/testing.html#data-validation) -- [ ] Include unit tests for new functions and classes. -- [ ] Defensive data quality/sanity checks in analyses & data processing functions. -- [ ] Update the [release notes](https://catalystcoop-pudl.readthedocs.io/en/latest/release_notes.html) and reference reference the PR and related issues. -- [ ] Do your own explanatory review of the PR to help the reviewer understand what's going on and identify issues preemptively. +- [ ] If updating analyses or data processing functions: make sure to update or write data validation tests +- [ ] Update the [release notes](../docs/release_notes.rst): reference the PR and related issues. +- [ ] Review the PR yourself and call out any questions or issues you have +``` diff --git a/.github/workflows/build-deploy-pudl.yml b/.github/workflows/build-deploy-pudl.yml index 9419258d18..eec4318ecf 100644 --- a/.github/workflows/build-deploy-pudl.yml +++ b/.github/workflows/build-deploy-pudl.yml @@ -139,4 +139,5 @@ jobs: channel-id: "C03FHB9N0PQ" slack-message: "build-deploy-pudl status: ${{ job.status }}\n${{ env.COMMIT_TIME}}-${{ env.SHORT_SHA }}-${{ env.COMMIT_BRANCH }}" env: + channel-id: "C03FHB9N0PQ" SLACK_BOT_TOKEN: ${{ secrets.PUDL_DEPLOY_SLACK_TOKEN }} diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000000..fc5e40359a --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,100 @@ +------------------------- +Contributing Code to PUDL +------------------------- + +Welcome! We're so glad you're interested in contributing to PUDL! We would love +some help making PUDL data as complete as possible. + +.. _after-intro: + +.. IMPORTANT:: Already have a dataset in mind? + + If you **need data that's not in PUDL**, `open an issue + `__ + to tell us more about it! + + If you've **already written some code to wrangle a dataset**, find us at + `office hours `__ and we + can talk through next steps. + + +Your first contribution +----------------------- + +**Setup** + +You'll need to fork this repository and get the +`dev environment set up `__. + +**Pick an issue** + +* Look for issues with the `good first issue + `__ + tag in our `Community Kanban Board + `__. These + are issues that don't require a ton of PUDL-specific context, and are + relatively tightly scoped. + +* Comment on the issue and tag ``@catalyst-cooperative/com-dev`` (our Community + Development Team) to let us know you're working on it. Feel free to ask any + questions you might have! + +* Once you have an idea of how you want to tackle this issue, write out your + plan so we can guide you around obstacles in your way! Post a comment outlining: + * what steps have you broken this down into? + * what is the output of each step? + * how will one know that each step is working? + +* Once you've talked through your plan with someone from Catalyst, go forth and + develop! + +**Work on it!** + +* Make a branch on your fork and open a draft pull request (PR) early so we can + discuss concrete code! **Set the base branch to ``dev`` unless there's a good + reason otherwise.** Please don't wait until it's all polished up - it's much + easier for us to help you when we can see the code evolve over time. + +* Please make sure to write tests and documentation for your code - if you run + into trouble with writing tests, let us know in the comments and we can help! + We automatically run the test suite for all PRs, but some of those will have + to be manually approved by Catalyst members for safety reasons. + +* **Try to keep your changes relatively small:** stuff happens, and one's + bandwidth for volunteer work can fluctuate frequently. If you make a bunch of + small changes, it's much easier to pause on a project without losing a ton of + context. We try to keep PRs to **less than 500 lines of code.** + +**Get it merged in!** + +* Turn the draft PR into a normal PR and tag ``@catalyst-cooperative/com-dev`` + in a comment. We'll try to get back to you within a few days - the + smaller/simpler the PR, the faster we'll be able to get back to you. + +* The reviewer will leave comments - if they request changes, address their + concerns and re-request review. + +* There will probably be some back-and-forth until your PR is approved - this + is normal and a sign of good communication on your part! Don't be shy about + asking us for updates and re-requesting review! + +* Don't accidentally "start a review" when responding to comments! If this does + happen, don't forget to submit the review you've started so the other PR + participants can see your comments (they are invisible to others if marked + "Pending"). + +Next contributions +------------------ + +Hooray! You made your first contribution! To find another issue to tackle, check +out the `Community Kanban board +`__ where +we've picked out some issues that are + +* useful to work on + +* unlikely to become super time-sensitive + +* have some context, success criteria, and next steps information. + +Pick one of these and follow the contribution flow above! diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index 9e5bffc4c3..0581d1cbe2 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -2,111 +2,79 @@ Contributing to PUDL =============================================================================== + Welcome! We're excited that you're interested in contributing to the Public Utility -Data Liberation effort! The work is currently being coordinated by the members of the -`Catalyst Cooperative `__. PUDL is meant to serve a wide -variety of public interests including academic research, climate advocacy, data -journalism, and public policy making. This open source project has been supported by -a combination of volunteer contributions, grant funding from the `Alfred P. Sloan -Foundation `__, and reinvestment of net income from the -cooperative's client projects. +Data Liberation effort! + +We need lots of help with :ref:`user-feedback`, we welcome :ref:`code-contribs`, and +it would be great to :ref:`connect-orgs` that we can work with. + +Finally, `financial donations +`__ +are welcome too! + +--------------- +Code of Conduct +--------------- Please make sure you review our :doc:`code of conduct `, which is based on the `Contributor Covenant `__. We want to make the PUDL project welcoming to contributors with different levels of experience and diverse personal backgrounds. -------------------------------------------------------------------------------- -How to Get Involved -------------------------------------------------------------------------------- +.. _user-feedback: + +------------- +User feedback +------------- -We welcome just about any kind of contribution to the project. Alone, we'll never be -able to understand every use case or integrate all the available data. The project -will serve the community better if other folks get involved. +PUDL's goal is to help people use data to make change in the US energy landscape. +As such, it's critical that we understand our users' needs! `GitHub Discussions +`__ is our main forum +for all this. Since it's publicly readable, any conversation here can +potentially benefit other users too! -There are lots of ways to contribute -- it's not all about code! +We'd love it if you could: -* If you need help, someone else might need it too - ask for help in `Github - Discussions +* Tell us what problems you're running into, in the `Help Me! `__ - and maybe the ensuing discussion will be useful to other people too! -* `Suggest new data and features `__ that would be useful. + discussion board +* Tell us about what data you're looking for by opening an `issue + `__ +* Tell us what you're trying to do with PUDL data in `this thread + `__ * `File bug reports `__ on Github. -* Help expand and improve the documentation, or create new - `example notebooks `__ -* Help us create more and better software :doc:`test cases `. -* Give us feedback on overall usability using `GitHub Discussions +* Tell us what you'd like to see in PUDL in the `Ideas `__ - -- what's confusing? -* Tell us a story about how you're using of the data. -* Point us at interesting publications related to open energy data, open source energy - system modeling, how energy policy can be affected by better data, or open source - tools we should check out. -* Cite PUDL using - `DOIs from Zenodo `__ - if you use the software or data in your own published work. + discussion board + +.. _code-contribs: + +-------------------- +Code contributions +-------------------- + +.. include:: ../CONTRIBUTING.rst + :start-after: after-intro: + +.. _connect-orgs: + +----------------------------------- +Connect us with other organizations +----------------------------------- + +For PUDL to make a bigger impact, we need to find more people who need the data. +Here's how you can help: + +* Cite PUDL using `DOIs from Zenodo + `__ if you use the + software or data in your own published work. * Point us toward appropriate grant funding opportunities and meetings where we might present our work. +* Point us at interesting publications related to open energy data, open source + energy system modeling, how energy policy can be affected by better data, or + open source tools we should check out. * Share your Jupyter notebooks and other analyses that use PUDL. * `Hire Catalyst `__ to do analysis for your organization using the PUDL data -- contract work helps us self-fund ongoing open source development. -* Contribute code via - `pull requests `__. - See the :doc:`developer setup ` for more details. -* And of course... we also appreciate - `financial contributions `__. - -.. seealso:: - - * :doc:`dev/dev_setup` for instructions on how to set up the PUDL - development environment. - -------------------------------------------------------------------------------- -Find us on GitHub -------------------------------------------------------------------------------- -Github is the primary platform we use to manage the project, integrate -contributions, write and publish documentation, answer user questions, automate -testing & deployment, etc. -`Signing up for a GitHub account `__ -(even if you don't intend to write code) will allow you to participate in -online discussions and track projects that you're interested in. - -Asking (and answering) questions is a valuable contribution! As noted in `How to -support open-source software and stay sane -`__, it's much more efficient to -ask and answer questions in a public forum because then other users and contributors -who are having the same problem can find answers without having to re-ask the same -question. The forum we're using is our `Github discussions -`__. - -Even if you feel like you have a basic question, we want you to feel -comfortable asking for help in public -- we (Catalyst) only recently came to -this data work from being activists and policy wonks -- so it's easy for us to -remember when it all seemed frustrating and alien! Sometimes it still does. We -want people to use the software and data to do good things in the world. We -want you to be able to access it. Using a public forum also enables the -community of users to help each other! - -Don't hesitate to post a discussion with a `feature request -`__, -a pointer to energy data that needs liberating, or a reference to documentation -that's out of date, unclear, or missing. Understanding how people are using the -software, and how they would *like* to be using the software, is very valuable and -will help us make it more useful and usable. - -------------------------------------------------------------------------------- -Our design process -------------------------------------------------------------------------------- - -We do our technical design out in the open, so that community members can weigh -in. Here's the process we usually follow: - -1. Someone has a problem they'd like to solve. They post in the `Ideas - `__ - forum with their problem and some context. - -2. Discussion ensues. - -3. When the open questions are answered, we create an issue from the discussion, - which holds the conclusions of the discussion. diff --git a/environments/conda-linux-64.lock.yml b/environments/conda-linux-64.lock.yml index 6024660317..62d6796b73 100644 --- a/environments/conda-linux-64.lock.yml +++ b/environments/conda-linux-64.lock.yml @@ -178,9 +178,9 @@ dependencies: - fontconfig=2.14.2=h14ed4e7_0 - freexl=2.0.0=h743c826_0 - frozenlist=1.4.0=py311h459d7ec_1 - - fsspec=2023.12.0=pyhca7485f_0 + - fsspec=2023.12.1=pyhca7485f_0 - gdk-pixbuf=2.42.10=h829c605_4 - - google-cloud-sdk=455.0.0=py311h38be061_0 + - google-cloud-sdk=456.0.0=py311h38be061_0 - greenlet=3.0.1=py311hb755f60_0 - gts=0.7.6=h977cf35_4 - hpack=4.0.0=pyh9f0ad1d_0 @@ -490,7 +490,7 @@ dependencies: - typer=0.9.0=pyhd8ed1ab_0 - uvicorn-standard=0.24.0.post1=h38be061_0 - aws-sdk-cpp=1.11.182=h8beafcf_7 - - boto3=1.33.6=pyhd8ed1ab_0 + - boto3=1.33.7=pyhd8ed1ab_0 - cachecontrol-with-filecache=0.13.1=pyhd8ed1ab_0 - dagster=1.5.10=pyhd8ed1ab_0 - datasette=0.64.4=pyhd8ed1ab_1 @@ -547,7 +547,7 @@ dependencies: - libarrow-dataset=14.0.1=h59595ed_3_cpu - libarrow-flight-sql=14.0.1=h61ff412_3_cpu - nbconvert-pandoc=7.12.0=pyhd8ed1ab_0 - - gcsfs=2023.12.0=pyhd8ed1ab_0 + - gcsfs=2023.12.1=pyhd8ed1ab_0 - jupyter-lsp=2.2.1=pyhd8ed1ab_0 - jupyter-resource-usage=1.0.1=pyhd8ed1ab_0 - jupyterlab_server=2.25.2=pyhd8ed1ab_0 diff --git a/environments/conda-lock.yml b/environments/conda-lock.yml index 6124d63cd0..11cfca6914 100644 --- a/environments/conda-lock.yml +++ b/environments/conda-lock.yml @@ -1914,48 +1914,48 @@ package: category: main optional: false - name: boto3 - version: 1.33.6 + version: 1.33.7 manager: conda platform: linux-64 dependencies: - botocore: ">=1.33.6,<1.34.0" + botocore: ">=1.33.7,<1.34.0" jmespath: ">=0.7.1,<2.0.0" python: ">=3.7" s3transfer: ">=0.8.2,<0.9.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.6-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.7-pyhd8ed1ab_0.conda hash: - md5: fff8f43d8786f4e2a0ab4ed431f8c511 - sha256: 7fad398c6730cb751de3495b8204a7cd133aeecdd684273bc3359f31e1c01eca + md5: be6f34d34e000afe25b585cde53f5c3a + sha256: cd9e195dd981604033c64b9068e486feb71b087aaadb61047a14a704d03946a9 category: main optional: false - name: boto3 - version: 1.33.6 + version: 1.33.7 manager: conda platform: osx-64 dependencies: python: ">=3.7" jmespath: ">=0.7.1,<2.0.0" s3transfer: ">=0.8.2,<0.9.0" - botocore: ">=1.33.6,<1.34.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.6-pyhd8ed1ab_0.conda + botocore: ">=1.33.7,<1.34.0" + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.7-pyhd8ed1ab_0.conda hash: - md5: fff8f43d8786f4e2a0ab4ed431f8c511 - sha256: 7fad398c6730cb751de3495b8204a7cd133aeecdd684273bc3359f31e1c01eca + md5: be6f34d34e000afe25b585cde53f5c3a + sha256: cd9e195dd981604033c64b9068e486feb71b087aaadb61047a14a704d03946a9 category: main optional: false - name: boto3 - version: 1.33.6 + version: 1.33.7 manager: conda platform: osx-arm64 dependencies: python: ">=3.7" jmespath: ">=0.7.1,<2.0.0" s3transfer: ">=0.8.2,<0.9.0" - botocore: ">=1.33.6,<1.34.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.6-pyhd8ed1ab_0.conda + botocore: ">=1.33.7,<1.34.0" + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.33.7-pyhd8ed1ab_0.conda hash: - md5: fff8f43d8786f4e2a0ab4ed431f8c511 - sha256: 7fad398c6730cb751de3495b8204a7cd133aeecdd684273bc3359f31e1c01eca + md5: be6f34d34e000afe25b585cde53f5c3a + sha256: cd9e195dd981604033c64b9068e486feb71b087aaadb61047a14a704d03946a9 category: main optional: false - name: botocore @@ -5738,39 +5738,39 @@ package: category: main optional: false - name: fsspec - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: linux-64 dependencies: python: ">=3.8" - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.0-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda hash: - md5: 036539452871d3b0906ff194ad808c9b - sha256: 72c84d372aa5d60eb31c53c108bacefb0c6fb854047441b543738e144f1fae65 + md5: b38946846cdf39f9bce93f75f571d913 + sha256: 929e63a5916a8ebc50199d5404fdcedf75261580d8e229d9a1def57a05ef39eb category: main optional: false - name: fsspec - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: osx-64 dependencies: python: ">=3.8" - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.0-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda hash: - md5: 036539452871d3b0906ff194ad808c9b - sha256: 72c84d372aa5d60eb31c53c108bacefb0c6fb854047441b543738e144f1fae65 + md5: b38946846cdf39f9bce93f75f571d913 + sha256: 929e63a5916a8ebc50199d5404fdcedf75261580d8e229d9a1def57a05ef39eb category: main optional: false - name: fsspec - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: osx-arm64 dependencies: python: ">=3.8" - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.0-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda hash: - md5: 036539452871d3b0906ff194ad808c9b - sha256: 72c84d372aa5d60eb31c53c108bacefb0c6fb854047441b543738e144f1fae65 + md5: b38946846cdf39f9bce93f75f571d913 + sha256: 929e63a5916a8ebc50199d5404fdcedf75261580d8e229d9a1def57a05ef39eb category: main optional: false - name: furo @@ -5822,26 +5822,26 @@ package: category: main optional: false - name: gcsfs - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: linux-64 dependencies: aiohttp: "" decorator: ">4.1.2" - fsspec: 2023.12.0 + fsspec: 2023.12.1 google-auth: ">=1.2" google-auth-oauthlib: "" google-cloud-storage: ">1.40" python: ">=3.7" requests: "" - url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.1-pyhd8ed1ab_0.conda hash: - md5: ad178e852250983982f3e2247635029d - sha256: 39dfbcb02360069206835f395cc0f169ead32005ff0c941a73526cdcfc18cd4b + md5: 2f48942fbcd4c2ef56af0fbf3cc56fb0 + sha256: b5e02b08137b4e90d97f4f2d5ff3e06d7076cdba6873061ca2b346576923b093 category: main optional: false - name: gcsfs - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: osx-64 dependencies: @@ -5852,15 +5852,15 @@ package: google-auth: ">=1.2" decorator: ">4.1.2" google-cloud-storage: ">1.40" - fsspec: 2023.12.0 - url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.0-pyhd8ed1ab_0.conda + fsspec: 2023.12.1 + url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.1-pyhd8ed1ab_0.conda hash: - md5: ad178e852250983982f3e2247635029d - sha256: 39dfbcb02360069206835f395cc0f169ead32005ff0c941a73526cdcfc18cd4b + md5: 2f48942fbcd4c2ef56af0fbf3cc56fb0 + sha256: b5e02b08137b4e90d97f4f2d5ff3e06d7076cdba6873061ca2b346576923b093 category: main optional: false - name: gcsfs - version: 2023.12.0 + version: 2023.12.1 manager: conda platform: osx-arm64 dependencies: @@ -5871,11 +5871,11 @@ package: google-auth: ">=1.2" decorator: ">4.1.2" google-cloud-storage: ">1.40" - fsspec: 2023.12.0 - url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.0-pyhd8ed1ab_0.conda + fsspec: 2023.12.1 + url: https://conda.anaconda.org/conda-forge/noarch/gcsfs-2023.12.1-pyhd8ed1ab_0.conda hash: - md5: ad178e852250983982f3e2247635029d - sha256: 39dfbcb02360069206835f395cc0f169ead32005ff0c941a73526cdcfc18cd4b + md5: 2f48942fbcd4c2ef56af0fbf3cc56fb0 + sha256: b5e02b08137b4e90d97f4f2d5ff3e06d7076cdba6873061ca2b346576923b093 category: main optional: false - name: gdal @@ -6623,42 +6623,42 @@ package: category: main optional: false - name: google-cloud-sdk - version: 455.0.0 + version: 456.0.0 manager: conda platform: linux-64 dependencies: python: ">=3.11,<3.12.0a0" python_abi: 3.11.* - url: https://conda.anaconda.org/conda-forge/linux-64/google-cloud-sdk-455.0.0-py311h38be061_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/google-cloud-sdk-456.0.0-py311h38be061_0.conda hash: - md5: 1505ce6a9284c331e05de23b56d023ff - sha256: 17c439e5b01341a4aae55a2f1841878244d25b365cef52b39fb9bfd3e30c8315 + md5: 09eb149adf3420350bd241bda3d5fafe + sha256: f789db836de3cc6557c9250f5819ada0aaafa2f559c09b8fadd5bda703a2acdf category: main optional: false - name: google-cloud-sdk - version: 455.0.0 + version: 456.0.0 manager: conda platform: osx-64 dependencies: python: ">=3.11,<3.12.0a0" python_abi: 3.11.* - url: https://conda.anaconda.org/conda-forge/osx-64/google-cloud-sdk-455.0.0-py311h6eed73b_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/google-cloud-sdk-456.0.0-py311h6eed73b_0.conda hash: - md5: 0a635aa75ccc84e4dd16e06b559d3d49 - sha256: 8e133ed925ed75409a354b564ff2ebc2ebb3ebdd659f2d190b4c198b164c6f8e + md5: 287686faa4f60a48dae2884822a232ef + sha256: 9ce703e2c3b9df7f01dee5e21c3b40da1088114670ced4347af9ee368f304d4b category: main optional: false - name: google-cloud-sdk - version: 455.0.0 + version: 456.0.0 manager: conda platform: osx-arm64 dependencies: python: ">=3.11,<3.12.0a0" python_abi: 3.11.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/google-cloud-sdk-455.0.0-py311h267d04e_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/google-cloud-sdk-456.0.0-py311h267d04e_0.conda hash: - md5: 2f60b4b18d39e85bdf3557f19bd407be - sha256: 9511a6c98a01a1e5013c73d8e7cb0d1200643e9d531cbc49ebebfb5cd9e71f27 + md5: 321f57034a4eb43fbe44c09d5a270eeb + sha256: 89e0793bafe2d57cf3c656bc22853f7a40eb05b03a34d6c308dcd5ff572ace04 category: main optional: false - name: google-cloud-storage @@ -15566,8 +15566,8 @@ package: dependencies: numpy: "" pandas: "" - typing_extensions: "" packaging: "" + typing_extensions: "" pydantic: "" wrapt: "" multimethod: "" @@ -15587,8 +15587,8 @@ package: dependencies: numpy: "" pandas: "" - typing_extensions: "" packaging: "" + typing_extensions: "" pydantic: "" wrapt: "" multimethod: "" diff --git a/environments/conda-osx-64.lock.yml b/environments/conda-osx-64.lock.yml index 40a519d401..9af811d772 100644 --- a/environments/conda-osx-64.lock.yml +++ b/environments/conda-osx-64.lock.yml @@ -167,8 +167,8 @@ dependencies: - executing=2.0.1=pyhd8ed1ab_0 - filelock=3.13.1=pyhd8ed1ab_0 - frozenlist=1.4.0=py311h2725bcf_1 - - fsspec=2023.12.0=pyhca7485f_0 - - google-cloud-sdk=455.0.0=py311h6eed73b_0 + - fsspec=2023.12.1=pyhca7485f_0 + - google-cloud-sdk=456.0.0=py311h6eed73b_0 - greenlet=3.0.1=py311hd39e593_0 - hpack=4.0.0=pyh9f0ad1d_0 - httptools=0.6.1=py311he705e18_0 @@ -471,7 +471,7 @@ dependencies: - typeguard=4.1.5=pyhd8ed1ab_1 - typer=0.9.0=pyhd8ed1ab_0 - uvicorn-standard=0.24.0.post1=h6eed73b_0 - - boto3=1.33.6=pyhd8ed1ab_0 + - boto3=1.33.7=pyhd8ed1ab_0 - cachecontrol-with-filecache=0.13.1=pyhd8ed1ab_0 - dagster=1.5.10=pyhd8ed1ab_0 - datasette=0.64.4=pyhd8ed1ab_1 @@ -526,7 +526,7 @@ dependencies: - jupyter_server=2.11.2=pyhd8ed1ab_0 - libarrow-substrait=14.0.1=h2cc6c1c_3_cpu - nbconvert-pandoc=7.12.0=pyhd8ed1ab_0 - - gcsfs=2023.12.0=pyhd8ed1ab_0 + - gcsfs=2023.12.1=pyhd8ed1ab_0 - jupyter-lsp=2.2.1=pyhd8ed1ab_0 - jupyter-resource-usage=1.0.1=pyhd8ed1ab_0 - jupyterlab_server=2.25.2=pyhd8ed1ab_0 diff --git a/environments/conda-osx-arm64.lock.yml b/environments/conda-osx-arm64.lock.yml index 57b4d6b79e..dc2f270a4e 100644 --- a/environments/conda-osx-arm64.lock.yml +++ b/environments/conda-osx-arm64.lock.yml @@ -167,8 +167,8 @@ dependencies: - executing=2.0.1=pyhd8ed1ab_0 - filelock=3.13.1=pyhd8ed1ab_0 - frozenlist=1.4.0=py311heffc1b2_1 - - fsspec=2023.12.0=pyhca7485f_0 - - google-cloud-sdk=455.0.0=py311h267d04e_0 + - fsspec=2023.12.1=pyhca7485f_0 + - google-cloud-sdk=456.0.0=py311h267d04e_0 - greenlet=3.0.1=py311hbaf5611_0 - hpack=4.0.0=pyh9f0ad1d_0 - httptools=0.6.1=py311h05b510d_0 @@ -471,7 +471,7 @@ dependencies: - typeguard=4.1.5=pyhd8ed1ab_1 - typer=0.9.0=pyhd8ed1ab_0 - uvicorn-standard=0.24.0.post1=ha1ab1f8_0 - - boto3=1.33.6=pyhd8ed1ab_0 + - boto3=1.33.7=pyhd8ed1ab_0 - cachecontrol-with-filecache=0.13.1=pyhd8ed1ab_0 - dagster=1.5.10=pyhd8ed1ab_0 - datasette=0.64.4=pyhd8ed1ab_1 @@ -526,7 +526,7 @@ dependencies: - jupyter_server=2.11.2=pyhd8ed1ab_0 - libarrow-substrait=14.0.1=h594d712_3_cpu - nbconvert-pandoc=7.12.0=pyhd8ed1ab_0 - - gcsfs=2023.12.0=pyhd8ed1ab_0 + - gcsfs=2023.12.1=pyhd8ed1ab_0 - jupyter-lsp=2.2.1=pyhd8ed1ab_0 - jupyter-resource-usage=1.0.1=pyhd8ed1ab_0 - jupyterlab_server=2.25.2=pyhd8ed1ab_0 diff --git a/src/pudl/metadata/resources/ferc1_eia_record_linkage.py b/src/pudl/metadata/resources/ferc1_eia_record_linkage.py index e1a5f89032..c60ecedf3f 100644 --- a/src/pudl/metadata/resources/ferc1_eia_record_linkage.py +++ b/src/pudl/metadata/resources/ferc1_eia_record_linkage.py @@ -23,8 +23,8 @@ Because generators are often owned by multiple utilities, another dimension of this plant part table involves generating two records for each owner: one for the portion of the plant part they own and one for the plant part as a whole. The -portion records are labeled in the "ownership_record_type" column as "owned" -and the total records are labeled as "total". +portion records are labeled in the ``ownership_record_type`` column as ``owned`` +and the total records are labeled as ``total``. This table includes A LOT of duplicative information about EIA plants. It is primarily meant for use as an input into the record linkage between FERC1 plants and EIA.""",