From 848aaa637248e4af8503f93a63a6a80e3679730e Mon Sep 17 00:00:00 2001 From: "geo-ghci-int[bot]" <146321879+geo-ghci-int[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 12:27:24 +0000 Subject: [PATCH] Apply pre-commit fix From the artifact of the previous workflow run --- github_app_geo_project/module/__init__.py | 3 +-- .../delete_old_workflow_runs/configuration.py | 4 +-- github_app_geo_project/module/utils.py | 6 ++--- .../module/versions/configuration.py | 4 +-- .../scripts/process_queue.py | 26 +++++++++---------- github_app_geo_project/templates/__init__.py | 4 +-- tests/test_template.py | 4 +-- 7 files changed, 22 insertions(+), 29 deletions(-) diff --git a/github_app_geo_project/module/__init__.py b/github_app_geo_project/module/__init__.py index 612b187f19..47f6e0ecf4 100644 --- a/github_app_geo_project/module/__init__.py +++ b/github_app_geo_project/module/__init__.py @@ -4,11 +4,10 @@ import logging from abc import abstractmethod from types import GenericAlias -from typing import Any, Generic, Literal, NamedTuple, TypedDict, TypeVar +from typing import Any, Generic, Literal, NamedTuple, NotRequired, TypedDict, TypeVar from pydantic import BaseModel, ValidationError from sqlalchemy.orm import Session -from typing_extensions import NotRequired from github_app_geo_project import configuration diff --git a/github_app_geo_project/module/delete_old_workflow_runs/configuration.py b/github_app_geo_project/module/delete_old_workflow_runs/configuration.py index 39262243f7..89b41b7b4d 100644 --- a/github_app_geo_project/module/delete_old_workflow_runs/configuration.py +++ b/github_app_geo_project/module/delete_old_workflow_runs/configuration.py @@ -1,8 +1,6 @@ """Automatically generated file from a JSON schema.""" -from typing import TypedDict - -from typing_extensions import Required +from typing import Required, TypedDict class DeleteOldWorkflowRunsConfiguration(TypedDict, total=False): diff --git a/github_app_geo_project/module/utils.py b/github_app_geo_project/module/utils.py index 0afff10364..afa75c7511 100644 --- a/github_app_geo_project/module/utils.py +++ b/github_app_geo_project/module/utils.py @@ -551,7 +551,7 @@ async def run_timeout( message.title = f"Find {command[0]}" _LOGGER.debug(message) return None, False, message - except asyncio.TimeoutError as exception: + except TimeoutError as exception: if async_proc: async_proc.kill() message = AnsiProcessMessage( @@ -635,9 +635,7 @@ def create_pull_request( pull_request.head.ref, ) # Create an issue it the pull request is open for 5 days - if pull_request.created_at < datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta( - days=5 - ): + if pull_request.created_at < datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(days=5): _LOGGER.warning("Pull request #%s is open for 5 days", pull_request.number) title = f"Pull request {message} is open for 5 days" body = f"See: #{pull_request.number}" diff --git a/github_app_geo_project/module/versions/configuration.py b/github_app_geo_project/module/versions/configuration.py index 293f59b631..bdec75461b 100644 --- a/github_app_geo_project/module/versions/configuration.py +++ b/github_app_geo_project/module/versions/configuration.py @@ -1,8 +1,6 @@ """Automatically generated file from a JSON schema.""" -from typing import Any, TypedDict - -from typing_extensions import Required +from typing import Any, Required, TypedDict VERSION_MAPPING_DEFAULT: dict[str, Any] = {} """ Default value of the field path 'Versions configuration version-mapping' """ diff --git a/github_app_geo_project/scripts/process_queue.py b/github_app_geo_project/scripts/process_queue.py index 5708d0b169..272834349f 100644 --- a/github_app_geo_project/scripts/process_queue.py +++ b/github_app_geo_project/scripts/process_queue.py @@ -251,7 +251,7 @@ async def _process_job( ) raise job.status = models.JobStatus.DONE if result is None or result.success else models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) job.log = "\n".join([handler.format(msg) for msg in handler.results]) if result is not None and result.transversal_status is not None: @@ -302,7 +302,7 @@ async def _process_job( new_issue_data = result.dashboard if result is not None else None except github.GithubException as exception: job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) root_logger.addHandler(handler) try: _LOGGER.exception( @@ -350,7 +350,7 @@ async def _process_job( raise except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as proc_error: job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) message = module_utils.ansi_proc_message(proc_error) message.title = f"Error process job '{job.id}' on module: {job.module}" root_logger.addHandler(handler) @@ -384,7 +384,7 @@ async def _process_job( raise except Exception as exception: job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) root_logger.addHandler(handler) try: _LOGGER.exception("Failed to process job id: %s on module: %s", job.id, job.module) @@ -633,7 +633,7 @@ async def _process_one_job( .where( models.Queue.status == models.JobStatus.PENDING, models.Queue.created_at - < datetime.datetime.now(tz=datetime.timezone.utc) + < datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(seconds=int(os.environ.get("GHCI_JOB_TIMEOUT_ERROR", 86400))), ) .values(status=models.JobStatus.ERROR) @@ -644,7 +644,7 @@ async def _process_one_job( .where( models.Queue.status == models.JobStatus.PENDING, models.Queue.started_at - < datetime.datetime.now(tz=datetime.timezone.utc) + < datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(seconds=int(os.environ.get("GHCI_JOB_TIMEOUT", 3600)) + 60), ) .values(status=models.JobStatus.NEW) @@ -677,14 +677,14 @@ async def _process_one_job( if make_pending: _LOGGER.info("Make job ID %s pending", job.id) job.status = models.JobStatus.PENDING - job.started_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.started_at = datetime.datetime.now(tz=datetime.UTC) session.commit() _LOGGER.debug("Process one job (max priority: %i): Make pending", max_priority) return False try: job.status = models.JobStatus.PENDING - job.started_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.started_at = datetime.datetime.now(tz=datetime.UTC) session.commit() _NB_JOBS.labels(models.JobStatus.PENDING.name).set( session.query(models.Queue).filter(models.Queue.status == models.JobStatus.PENDING).count() @@ -695,7 +695,7 @@ async def _process_one_job( if job.event_data.get("type") == "event": _process_event(config, job.event_data, session) job.status = models.JobStatus.DONE - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) elif job.event_name == "dashboard": success = _validate_job(config, job.application, job.event_data) if success: @@ -711,11 +711,11 @@ async def _process_one_job( job.status = models.JobStatus.DONE else: job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) else: _LOGGER.error("Unknown event name: %s", job.event_name) job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) success = False else: success = _validate_job(config, job.application, job.event_data) @@ -736,7 +736,7 @@ async def _process_one_job( if job.status == models.JobStatus.PENDING: _LOGGER.error("Job %s finished with pending status", job.id) job.status = models.JobStatus.ERROR - job.finished_at = datetime.datetime.now(tz=datetime.timezone.utc) + job.finished_at = datetime.datetime.now(tz=datetime.UTC) session.commit() _RUNNING_JOBS.pop(job.id) @@ -776,7 +776,7 @@ async def __call__(self, *args: Any, **kwds: Any) -> Any: ) if self.end_when_empty and empty: return - except asyncio.TimeoutError: + except TimeoutError: _LOGGER.exception("Timeout") except Exception: # pylint: disable=broad-exception-caught _LOGGER.exception("Failed to process job") diff --git a/github_app_geo_project/templates/__init__.py b/github_app_geo_project/templates/__init__.py index 48fe2c9ff8..da9da96e12 100644 --- a/github_app_geo_project/templates/__init__.py +++ b/github_app_geo_project/templates/__init__.py @@ -1,7 +1,7 @@ """The mako templates to render the pages.""" import logging -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta, timezone import html_sanitizer import markdown as markdown_lib # mypy: ignore[import-untyped] @@ -56,7 +56,7 @@ def pprint_short_date(date_in: str | datetime) -> str: date = datetime.fromisoformat(date_in) if isinstance(date_in, str) else date_in - delta = datetime.now(timezone.utc) - date + delta = datetime.now(UTC) - date if delta.total_seconds() < 1: short_date = "now" elif delta.total_seconds() < 60: diff --git a/tests/test_template.py b/tests/test_template.py index 7edeb0b9fe..c0a53421b5 100644 --- a/tests/test_template.py +++ b/tests/test_template.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from github_app_geo_project.templates import markdown, pprint_date, pprint_duration, sanitizer @@ -34,7 +34,7 @@ def test_pprint_date() -> None: assert pprint_date(date_str) == expected_output # Test case when date is now - now = datetime.now(timezone.utc) + now = datetime.now(UTC) date_str = now.isoformat() expected_output = 'now'.format(now.strftime("%Y-%m-%d %H:%M:%S")) assert pprint_date(date_str) == expected_output