From a885286e9cb60c676e21829cec17f2474ae4a4c3 Mon Sep 17 00:00:00 2001 From: "Edgar R. M" Date: Thu, 4 May 2023 12:18:50 -0600 Subject: [PATCH] ci: Use pre-commit.ci (#199) * ci: Use pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix files * Drop black, isort and mypy workflow steps --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/test_tap.yml | 7 ------- .pre-commit-config.yaml | 25 ++++++++++++++----------- tap_github/authenticator.py | 6 ++---- tap_github/client.py | 6 +++--- tap_github/repository_streams.py | 6 ++---- tap_github/user_streams.py | 7 +++---- tap_github/utils/filter_stdout.py | 2 +- 7 files changed, 25 insertions(+), 34 deletions(-) diff --git a/.github/workflows/test_tap.yml b/.github/workflows/test_tap.yml index 74ec9858..5125a505 100644 --- a/.github/workflows/test_tap.yml +++ b/.github/workflows/test_tap.yml @@ -55,13 +55,6 @@ jobs: run: | poetry install if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - - name: Check formatting with black and isort - run: | - poetry run black --check . - poetry run isort --check . - - name: Check typing annotations with mypy - run: | - poetry run mypy . --ignore-missing-imports - name: Test with pytest id: test_pytest continue-on-error: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fcf66fbb..befc9749 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,29 +1,37 @@ ci: - autofix_prs: false + autofix_prs: true + autoupdate_schedule: weekly + autoupdate_commit_msg: 'chore: pre-commit autoupdate' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.4.0 hooks: - id: check-json + exclude: "\\.vscode/.*.json" - id: check-toml - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/asottile/pyupgrade - rev: v2.31.0 + rev: v3.3.2 hooks: - id: pyupgrade - args: [--py36-plus] + args: [--py37-plus] - repo: https://github.com/psf/black - rev: 22.1.0 + rev: 23.3.0 hooks: - id: black +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.931 + rev: v1.2.0 hooks: - id: mypy pass_filenames: true @@ -31,8 +39,3 @@ repos: - types-requests - types-simplejson - types-python-dateutil - -- repo: https://github.com/pycqa/isort - rev: 5.10.1 - hooks: - - id: isort diff --git a/tap_github/authenticator.py b/tap_github/authenticator.py index 65c06373..7c9528d6 100644 --- a/tap_github/authenticator.py +++ b/tap_github/authenticator.py @@ -144,10 +144,8 @@ def prepare_tokens(self) -> Dict[str, TokenRateLimit]: if not (github_private_key): self.logger.warning( - ( - "GITHUB_APP_PRIVATE_KEY could not be parsed. The expected format is " - '":app_id:;;-----BEGIN RSA PRIVATE KEY-----\n_YOUR_P_KEY_\n-----END RSA PRIVATE KEY-----"' - ) + "GITHUB_APP_PRIVATE_KEY could not be parsed. The expected format is " + '":app_id:;;-----BEGIN RSA PRIVATE KEY-----\n_YOUR_P_KEY_\n-----END RSA PRIVATE KEY-----"' ) else: diff --git a/tap_github/client.py b/tap_github/client.py index b29b19f9..a2f97622 100644 --- a/tap_github/client.py +++ b/tap_github/client.py @@ -1,7 +1,7 @@ """REST client handling, including GitHubStream base class.""" import collections -import email +import email.utils import inspect import random import re @@ -346,7 +346,7 @@ def get_next_page_token( has_next_page_indices: List[int] = [] # Iterate over all the items and filter items with hasNextPage = True. - for (key, value) in next_page_results.items(): + for key, value in next_page_results.items(): # Check if key is even then add pair to new dictionary if any(value): pagination_index = int(str(key).split("_")[1]) @@ -362,7 +362,7 @@ def get_next_page_token( # We leverage previous_token to remember the pagination cursors # for indices below max_pagination_index. next_page_cursors: Dict[str, str] = dict() - for (key, value) in (previous_token or {}).items(): + for key, value in (previous_token or {}).items(): # Only keep pagination info for indices below max_pagination_index. pagination_index = int(str(key).split("_")[1]) if pagination_index < max_pagination_index: diff --git a/tap_github/repository_streams.py b/tap_github/repository_streams.py index 7f8a2810..3f3bdfae 100644 --- a/tap_github/repository_streams.py +++ b/tap_github/repository_streams.py @@ -125,10 +125,8 @@ def validate_response(self, response: requests.Response) -> None: # not exist, log some details, and move on to the next one repo_full_name = "/".join(repo_list[int(item[4:])]) self.logger.info( - ( - f"Repository not found: {repo_full_name} \t" - "Removing it from list" - ) + f"Repository not found: {repo_full_name} \t" + "Removing it from list" ) continue repos_with_ids.append( diff --git a/tap_github/user_streams.py b/tap_github/user_streams.py index e8eae385..486c8e6c 100644 --- a/tap_github/user_streams.py +++ b/tap_github/user_streams.py @@ -48,6 +48,7 @@ def get_user_ids(self, user_list: List[str]) -> List[Dict[str, str]]: It also removes non-existant repos and corrects casing to ensure data is correct downstream. """ + # use a temp handmade stream to reuse all the graphql setup of the tap class TempStream(GitHubGraphqlStream): name = "tempStream" @@ -97,10 +98,8 @@ def query(self) -> str: # not exist, log some details, and move on to the next one invalid_username = user_list[int(item[4:])] self.logger.info( - ( - f"Username not found: {invalid_username} \t" - "Removing it from list" - ) + f"Username not found: {invalid_username} \t" + "Removing it from list" ) continue # the databaseId (in graphql language) is not available on diff --git a/tap_github/utils/filter_stdout.py b/tap_github/utils/filter_stdout.py index ea775fed..bfddbe9b 100644 --- a/tap_github/utils/filter_stdout.py +++ b/tap_github/utils/filter_stdout.py @@ -5,7 +5,7 @@ from typing import Pattern, TextIO, Union -class FilterStdOutput(object): +class FilterStdOutput: """Filter out stdout/sterr given a regex pattern.""" def __init__(self, stream: TextIO, re_pattern: Union[str, Pattern]):