diff --git a/github_app_geo_project/configuration.py b/github_app_geo_project/configuration.py index 7432110d86b..709bbb06c34 100644 --- a/github_app_geo_project/configuration.py +++ b/github_app_geo_project/configuration.py @@ -101,13 +101,21 @@ def get_github_application(config: dict[str, Any], application_name: str) -> Git def get_github_project( - config: dict[str, Any], application: GithubApplication | str, owner: str, repository: str + config: dict[str, Any], + application: GithubApplication | str, + owner: str, + repository: str, ) -> GithubProject: """Get the Github Application by name.""" objects = get_github_application(config, application) if isinstance(application, str) else application token = objects.integration.get_access_token(objects.integration.get_installation(owner, repository).id) - _LOGGER.debug("Generate token for %s/%s that expire at: %s", owner, repository, token.expires_at) + _LOGGER.debug( + "Generate token for %s/%s that expire at: %s", + owner, + repository, + token.expires_at, + ) github_application = github.Github(login_or_token=token.token) repo = github_application.get_repo(f"{owner}/{repository}") @@ -138,7 +146,8 @@ def get_configuration( return jsonmerge.merge( # type: ignore[no-any-return] APPLICATION_CONFIGURATION.get("profiles", {}).get( - project_custom_configuration.get("profile", APPLICATION_CONFIGURATION.get("default-profile")), {} + project_custom_configuration.get("profile", APPLICATION_CONFIGURATION.get("default-profile")), + {}, ), project_custom_configuration, ) diff --git a/github_app_geo_project/models.py b/github_app_geo_project/models.py index 6615773d4dd..1fe88c4f339 100644 --- a/github_app_geo_project/models.py +++ b/github_app_geo_project/models.py @@ -47,7 +47,10 @@ class Queue(Base): index=True, ) created_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False, server_default=sqlalchemy.sql.functions.now(), index=True + DateTime(timezone=True), + nullable=False, + server_default=sqlalchemy.sql.functions.now(), + index=True, ) started_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True) finished_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True) @@ -99,10 +102,15 @@ class Output(Base): id: Mapped[int] = mapped_column(Integer, primary_key=True, nullable=False, autoincrement=True) created_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False, server_default=sqlalchemy.sql.functions.now(), index=True + DateTime(timezone=True), + nullable=False, + server_default=sqlalchemy.sql.functions.now(), + index=True, ) status: Mapped[OutputStatus] = mapped_column( - Enum(OutputStatus, create_type=False, native_enum=False), nullable=False, index=True + Enum(OutputStatus, create_type=False, native_enum=False), + nullable=False, + index=True, ) owner: Mapped[str] = mapped_column(Unicode, nullable=False) repository: Mapped[str] = mapped_column(Unicode, nullable=False, index=True) diff --git a/github_app_geo_project/module/__init__.py b/github_app_geo_project/module/__init__.py index 47f6e0ecf4e..9f97228f83a 100644 --- a/github_app_geo_project/module/__init__.py +++ b/github_app_geo_project/module/__init__.py @@ -410,7 +410,10 @@ def transversal_status_from_json(self, data: dict[str, Any] | None) -> _TRANSVER try: return generic_element(**data) # type: ignore[no-any-return] except ValidationError: - _LOGGER.error("Invalid transversal status, try with empty transversal status: %s", data) + _LOGGER.error( + "Invalid transversal status, try with empty transversal status: %s", + data, + ) return generic_element() # type: ignore[no-any-return] return data # type: ignore[return-value] diff --git a/github_app_geo_project/module/audit/__init__.py b/github_app_geo_project/module/audit/__init__.py index 36e76863dc7..9732495c84d 100644 --- a/github_app_geo_project/module/audit/__init__.py +++ b/github_app_geo_project/module/audit/__init__.py @@ -90,7 +90,8 @@ def _process_error( output_url = urllib.parse.urljoin(context.service_url, f"output/{output_id}") issue_check.set_title( - key, f"{key}: {message} ([Error]({output_url}))" if message else f"{key} ([Error]({output_url}))" + key, + (f"{key}: {message} ([Error]({output_url}))" if message else f"{key} ([Error]({output_url}))"), ) elif message: issue_check.set_title(key, f"{key}: {message}") @@ -115,10 +116,20 @@ def _process_outdated( except github.GithubException as exception: if exception.status == 404: _LOGGER.debug("No SECURITY.md file in the repository") - _process_error(context, _OUTDATED, issue_check, message="No SECURITY.md file in the repository") + _process_error( + context, + _OUTDATED, + issue_check, + message="No SECURITY.md file in the repository", + ) else: _LOGGER.exception("Error while getting SECURITY.md") - _process_error(context, _OUTDATED, issue_check, message="Error while getting SECURITY.md") + _process_error( + context, + _OUTDATED, + issue_check, + message="Error while getting SECURITY.md", + ) raise @@ -203,7 +214,8 @@ async def _process_snyk_dpkg( ".github/dpkg-versions.yaml" ): await audit_utils.dpkg( - context.module_config.get("dpkg", {}), local_config.get("dpkg", {}) + context.module_config.get("dpkg", {}), + local_config.get("dpkg", {}), ) body_md += "\n" if body_md else "" @@ -215,7 +227,10 @@ async def _process_snyk_dpkg( ) if diff_proc.returncode != 0: proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check - ["git", "diff"], timeout=30, capture_output=True, encoding="utf-8" + ["git", "diff"], + timeout=30, + capture_output=True, + encoding="utf-8", ) message = module_utils.ansi_proc_message(proc) message.title = "Changes to be committed" @@ -245,7 +260,10 @@ async def _process_snyk_dpkg( _LOGGER.error("Error while create commit or pull request") else: if pull_request is not None: - issue_check.set_title(key, f"{key} ([Pull request]({pull_request.html_url}))") + issue_check.set_title( + key, + f"{key} ([Pull request]({pull_request.html_url}))", + ) short_message.append(f"[Pull request]({pull_request.html_url})") else: @@ -337,7 +355,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E if "SECURITY.md" in context.event_data.get("push", {}).get("files", []): return [ module.Action( - priority=module.PRIORITY_CRON, data=_EventData(type="outdated"), title="outdated" + priority=module.PRIORITY_CRON, + data=_EventData(type="outdated"), + title="outdated", ) ] results: list[module.Action[_EventData]] = [] @@ -355,7 +375,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E if not old_check.is_checked("outdated") and new_check.is_checked("outdated"): results.append( module.Action( - priority=module.PRIORITY_STANDARD, data=_EventData(type="outdated"), title="outdated" + priority=module.PRIORITY_STANDARD, + data=_EventData(type="outdated"), + title="outdated", ) ) if not old_check.is_checked("snyk") and new_check.is_checked("snyk"): @@ -366,7 +388,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E if context.event_data.get("type") == "event" and context.event_data.get("name") == "daily": results.append( module.Action( - priority=module.PRIORITY_CRON, data=_EventData(type="outdated"), title="outdated" + priority=module.PRIORITY_CRON, + data=_EventData(type="outdated"), + title="outdated", ) ) snyk = True @@ -382,7 +406,8 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E return results async def process( - self, context: module.ProcessContext[configuration.AuditConfiguration, _EventData, _TransversalStatus] + self, + context: module.ProcessContext[configuration.AuditConfiguration, _EventData, _TransversalStatus], ) -> module.ProcessOutput[_EventData, _TransversalStatus]: """ Process the action. diff --git a/github_app_geo_project/module/audit/utils.py b/github_app_geo_project/module/audit/utils.py index 161fa1658ca..9c2d232cada 100644 --- a/github_app_geo_project/module/audit/utils.py +++ b/github_app_geo_project/module/audit/utils.py @@ -128,7 +128,6 @@ async def _select_java_version( local_config: configuration.SnykConfiguration, env: dict[str, str], ) -> None: - if not os.path.exists("gradlew"): return @@ -211,7 +210,10 @@ async def _install_pipenv_dependencies( env: dict[str, str], ) -> None: proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check - ["git", "ls-files", "Pipfile", "*/Pipfile"], capture_output=True, encoding="utf-8", timeout=30 + ["git", "ls-files", "Pipfile", "*/Pipfile"], + capture_output=True, + encoding="utf-8", + timeout=30, ) if proc.returncode != 0: message = module_utils.ansi_proc_message(proc) @@ -271,7 +273,10 @@ async def _install_poetry_dependencies( [ "poetry", "install", - *local_config.get("poetry-install-arguments", config.get("poetry-install-arguments", [])), + *local_config.get( + "poetry-install-arguments", + config.get("poetry-install-arguments", []), + ), ], env, int(os.environ.get("GHCI_PYTHON_INSTALL_TIMEOUT", "600")), @@ -296,7 +301,8 @@ async def _snyk_monitor( "monitor", f"--target-reference={branch}", *local_config.get( - "monitor-arguments", config.get("monitor-arguments", configuration.SNYK_MONITOR_ARGUMENTS_DEFAULT) + "monitor-arguments", + config.get("monitor-arguments", configuration.SNYK_MONITOR_ARGUMENTS_DEFAULT), ), ] local_monitor_config = local_config.get("monitor", {}) @@ -345,7 +351,8 @@ async def _snyk_test( "snyk", "test", *local_config.get( - "test-arguments", config.get("test-arguments", configuration.SNYK_TEST_ARGUMENTS_DEFAULT) + "test-arguments", + config.get("test-arguments", configuration.SNYK_TEST_ARGUMENTS_DEFAULT), ), ] await module_utils.run_timeout( @@ -362,7 +369,8 @@ async def _snyk_test( "test", "--json", *local_config.get( - "test-arguments", config.get("test-arguments", configuration.SNYK_TEST_ARGUMENTS_DEFAULT) + "test-arguments", + config.get("test-arguments", configuration.SNYK_TEST_ARGUMENTS_DEFAULT), ), ] test_json_str, _, message = await module_utils.run_timeout( @@ -382,7 +390,9 @@ async def _snyk_test( _LOGGER.debug(message) else: _LOGGER.error( - "Snyk test JSON returned nothing on project %s branch %s", module_utils.get_cwd(), branch + "Snyk test JSON returned nothing on project %s branch %s", + module_utils.get_cwd(), + branch, ) test_json = json.loads(test_json_str) if test_json_str else [] @@ -525,7 +535,8 @@ async def _snyk_fix( "snyk", "fix", *local_config.get( - "fix-arguments", config.get("fix-arguments", configuration.SNYK_FIX_ARGUMENTS_DEFAULT) + "fix-arguments", + config.get("fix-arguments", configuration.SNYK_FIX_ARGUMENTS_DEFAULT), ), ] fix_message, snyk_fix_success, message = await module_utils.run_timeout( @@ -544,7 +555,12 @@ async def _snyk_fix( await module_utils.run_timeout( command, env_debug, - int(os.environ.get("GHCI_SNYK_FIX_TIMEOUT", os.environ.get("GHCI_SNYK_TIMEOUT", "300"))), + int( + os.environ.get( + "GHCI_SNYK_FIX_TIMEOUT", + os.environ.get("GHCI_SNYK_TIMEOUT", "300"), + ) + ), "Snyk fix (debug)", "Error while fixing the project (debug)", "Timeout while fixing the project (debug)", @@ -633,7 +649,9 @@ def outdated_versions( def _get_sources( - dist: str, config: configuration.DpkgConfiguration, local_config: configuration.DpkgConfiguration + dist: str, + config: configuration.DpkgConfiguration, + local_config: configuration.DpkgConfiguration, ) -> apt_repo.APTSources: """ Get the sources for the distribution. @@ -676,7 +694,9 @@ def _get_sources( async def _get_packages_version( - package: str, config: configuration.DpkgConfiguration, local_config: configuration.DpkgConfiguration + package: str, + config: configuration.DpkgConfiguration, + local_config: configuration.DpkgConfiguration, ) -> str | None: """Get the version of the package.""" global _GENERATION_TIME # pylint: disable=global-statement @@ -695,7 +715,8 @@ async def _get_packages_version( async def dpkg( - config: configuration.DpkgConfiguration, local_config: configuration.DpkgConfiguration + config: configuration.DpkgConfiguration, + local_config: configuration.DpkgConfiguration, ) -> None: """Update the version of packages in the file .github/dpkg-versions.yaml or ci/dpkg-versions.yaml.""" if not os.path.exists("ci/dpkg-versions.yaml") and not os.path.exists(".github/dpkg-versions.yaml"): diff --git a/github_app_geo_project/module/delete_old_workflow_runs/__init__.py b/github_app_geo_project/module/delete_old_workflow_runs/__init__.py index 572d4909640..3daa6b4fea0 100644 --- a/github_app_geo_project/module/delete_old_workflow_runs/__init__.py +++ b/github_app_geo_project/module/delete_old_workflow_runs/__init__.py @@ -57,7 +57,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi async def process( self, context: module.ProcessContext[ - configuration.DeleteOldWorkflowRunsConfiguration, dict[str, Any], dict[str, Any] + configuration.DeleteOldWorkflowRunsConfiguration, + dict[str, Any], + dict[str, Any], ], ) -> module.ProcessOutput[dict[str, Any], dict[str, Any]]: """ diff --git a/github_app_geo_project/module/pull_request/checks.py b/github_app_geo_project/module/pull_request/checks.py index a71f9754287..2db90931d8f 100644 --- a/github_app_geo_project/module/pull_request/checks.py +++ b/github_app_geo_project/module/pull_request/checks.py @@ -27,7 +27,9 @@ def _get_code_spell_command( context: module.ProcessContext[ - checks_configuration.PullRequestChecksConfiguration, dict[str, Any], dict[str, Any] + checks_configuration.PullRequestChecksConfiguration, + dict[str, Any], + dict[str, Any], ], ignore_file: NamedTemporaryFileStr, ) -> list[str]: @@ -45,7 +47,8 @@ def _get_code_spell_command( ): try: content = context.github_project.repo.get_contents( - spell_ignore_file, ref=context.event_data.get("pull_request", {}).get("head", {}).get("sha") + spell_ignore_file, + ref=context.event_data.get("pull_request", {}).get("head", {}).get("sha"), ) if isinstance(content, github.ContentFile.ContentFile): ignore_file.write(content.decoded_content.decode("utf-8")) @@ -91,7 +94,8 @@ def _commits_messages( message_lines = commit.commit.message.split("\n") head = message_lines[0] if commit_message_config.get( - "check-fixup", checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_FIXUP_DEFAULT + "check-fixup", + checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_FIXUP_DEFAULT, ) and head.startswith("fixup! "): _LOGGER.warning("Fixup message not allowed") messages.append(f":x: Fixup message not allowed in commit {commit.sha}") @@ -99,7 +103,8 @@ def _commits_messages( else: messages.append(f":heavy_check_mark: The commit {commit.sha} is not a fixup commit") if commit_message_config.get( - "check-squash", checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_SQUASH_DEFAULT + "check-squash", + checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_SQUASH_DEFAULT, ) and head.startswith("squash! "): _LOGGER.warning("Squash message not allowed") messages.append(f":x: Squash message not allowed in commit {commit.sha}") @@ -180,7 +185,8 @@ def _commits_spell( for commit in commits: with tempfile.NamedTemporaryFile("w+t", encoding="utf-8", suffix=".yaml") as temp_file: if config.get( - "only-head", checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_ONLY_HEAD_DEFAULT + "only-head", + checks_configuration.PULL_REQUEST_CHECKS_COMMITS_MESSAGES_ONLY_HEAD_DEFAULT, ): head = commit.commit.message.split("\n")[0] temp_file.write(head) @@ -243,7 +249,10 @@ def _pull_request_spell( else: messages.append( ":heavy_check_mark: Pull request title is correct" - if config.get("only_head", checks_configuration.PULL_REQUEST_CHECKS_ONLY_HEAD_DEFAULT) + if config.get( + "only_head", + checks_configuration.PULL_REQUEST_CHECKS_ONLY_HEAD_DEFAULT, + ) else ":heavy_check_mark: Pull request title and body are correct" ) @@ -253,7 +262,11 @@ def _pull_request_spell( class Checks( - module.Module[checks_configuration.PullRequestChecksConfiguration, dict[str, Any], dict[str, Any]] + module.Module[ + checks_configuration.PullRequestChecksConfiguration, + dict[str, Any], + dict[str, Any], + ] ): """Module to check the pull request message and commits.""" @@ -281,7 +294,8 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi def get_json_schema(self) -> dict[str, Any]: """Get the JSON schema for the configuration.""" with open( - os.path.join(os.path.dirname(__file__), "checks-schema.json"), encoding="utf-8" + os.path.join(os.path.dirname(__file__), "checks-schema.json"), + encoding="utf-8", ) as schema_file: schema = json.loads(schema_file.read()) for key in ("$schema", "$id"): @@ -309,7 +323,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di async def process( self, context: module.ProcessContext[ - checks_configuration.PullRequestChecksConfiguration, dict[str, Any], dict[str, Any] + checks_configuration.PullRequestChecksConfiguration, + dict[str, Any], + dict[str, Any], ], ) -> module.ProcessOutput[dict[str, Any], dict[str, Any]]: """Process the module.""" diff --git a/github_app_geo_project/module/pull_request/checks_configuration.py b/github_app_geo_project/module/pull_request/checks_configuration.py index 257a0c9cf65..5d7ce55cd79 100644 --- a/github_app_geo_project/module/pull_request/checks_configuration.py +++ b/github_app_geo_project/module/pull_request/checks_configuration.py @@ -4,15 +4,29 @@ from typing import TypedDict, Union -CODESPELL_ARGUMENTS_DEFAULT = ["--quiet-level=2", "--check-filenames", "--ignore-words-list=ro"] +CODESPELL_ARGUMENTS_DEFAULT = [ + "--quiet-level=2", + "--check-filenames", + "--ignore-words-list=ro", +] """ Default value of the field path 'Codespell arguments' """ -CODESPELL_DICTIONARIES_DEFAULT = ["clear", "rare", "informal", "code", "names", "en-GB_to_en-US"] +CODESPELL_DICTIONARIES_DEFAULT = [ + "clear", + "rare", + "informal", + "code", + "names", + "en-GB_to_en-US", +] """ Default value of the field path 'Codespell internal-dictionaries' """ -CODESPELL_IGNORE_REGULAR_EXPRESSION_DEFAULT = ["(.*/)?poetry\\.lock", "(.*/)?package-lock\\.json"] +CODESPELL_IGNORE_REGULAR_EXPRESSION_DEFAULT = [ + "(.*/)?poetry\\.lock", + "(.*/)?package-lock\\.json", +] """ Default value of the field path 'Codespell ignore-re' """ diff --git a/github_app_geo_project/module/pull_request/links.py b/github_app_geo_project/module/pull_request/links.py index c2c14dae06c..aef52175cbd 100644 --- a/github_app_geo_project/module/pull_request/links.py +++ b/github_app_geo_project/module/pull_request/links.py @@ -13,7 +13,8 @@ def _add_issue_link( - config: links_configuration.PullRequestAddLinksConfiguration, pull_request: github.PullRequest.PullRequest + config: links_configuration.PullRequestAddLinksConfiguration, + pull_request: github.PullRequest.PullRequest, ) -> str: """Add a comment with the link to Jira if needed.""" body = pull_request.body or "" @@ -68,13 +69,17 @@ def _add_issue_link( return "Nothing to add." pull_request.edit( - body=(pull_request.body + "\n".join(result)) if pull_request.body is not None else "\n".join(result) + body=((pull_request.body + "\n".join(result)) if pull_request.body is not None else "\n".join(result)) ) return "Pull request descriptions updated." class Links( - module.Module[links_configuration.PullRequestAddLinksConfiguration, dict[str, Any], dict[str, Any]] + module.Module[ + links_configuration.PullRequestAddLinksConfiguration, + dict[str, Any], + dict[str, Any], + ] ): """Module to add some links to the pull request message and commits.""" @@ -102,7 +107,8 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi def get_json_schema(self) -> dict[str, Any]: """Get the JSON schema for the configuration.""" with open( - os.path.join(os.path.dirname(__file__), "links-schema.json"), encoding="utf-8" + os.path.join(os.path.dirname(__file__), "links-schema.json"), + encoding="utf-8", ) as schema_file: schema = json.loads(schema_file.read()) for key in ("$schema", "$id"): @@ -129,7 +135,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di async def process( self, context: module.ProcessContext[ - links_configuration.PullRequestAddLinksConfiguration, dict[str, Any], dict[str, Any] + links_configuration.PullRequestAddLinksConfiguration, + dict[str, Any], + dict[str, Any], ], ) -> module.ProcessOutput[dict[str, Any], dict[str, Any]]: """Process the module.""" diff --git a/github_app_geo_project/module/standard/auto.py b/github_app_geo_project/module/standard/auto.py index c6de3564ce2..95a44d30d52 100644 --- a/github_app_geo_project/module/standard/auto.py +++ b/github_app_geo_project/module/standard/auto.py @@ -95,7 +95,8 @@ async def process( def get_json_schema(self) -> dict[str, Any]: """Get the JSON schema of the module configuration.""" with open( - os.path.join(os.path.dirname(__file__), "auto-schema.json"), encoding="utf-8" + os.path.join(os.path.dirname(__file__), "auto-schema.json"), + encoding="utf-8", ) as schema_file: return json.loads(schema_file.read()).get("definitions", {}).get("auto") # type: ignore[no-any-return] diff --git a/github_app_geo_project/module/standard/changelog.py b/github_app_geo_project/module/standard/changelog.py index 78e6ff41465..5c3f1a72622 100644 --- a/github_app_geo_project/module/standard/changelog.py +++ b/github_app_geo_project/module/standard/changelog.py @@ -452,7 +452,12 @@ def generate_changelog( if section_config["name"] not in sections: continue if section_config.get("closed", False): - result += ["
", "", f"## {section_config['title']}", ""] + result += [ + "
", + "", + f"## {section_config['title']}", + "", + ] else: result.append(f"## {section_config['title']}") result.append("") @@ -508,7 +513,8 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di if event_data.get("ref_type") == "tag": return [ module.Action( - priority=module.PRIORITY_STATUS, data={"type": "tag", "version": event_data["ref"]} + priority=module.PRIORITY_STATUS, + data={"type": "tag", "version": event_data["ref"]}, ) ] if ( @@ -647,7 +653,10 @@ async def process( tag_str, tag_name=tag_str, message=generate_changelog( - context.github_project.github, context.module_config, repository, tag_str + context.github_project.github, + context.module_config, + repository, + tag_str, ), ) return module.ProcessOutput() @@ -656,7 +665,8 @@ def get_json_schema(self) -> dict[str, Any]: """Get the JSON schema of the module configuration.""" # Get changelog-schema.json related to this file with open( - os.path.join(os.path.dirname(__file__), "changelog-schema.json"), encoding="utf-8" + os.path.join(os.path.dirname(__file__), "changelog-schema.json"), + encoding="utf-8", ) as schema_file: return json.loads(schema_file.read()).get("properties", {}).get("changelog") # type: ignore[no-any-return] diff --git a/github_app_geo_project/module/standard/patch.py b/github_app_geo_project/module/standard/patch.py index 2aec544b217..879b7b0dede 100644 --- a/github_app_geo_project/module/standard/patch.py +++ b/github_app_geo_project/module/standard/patch.py @@ -61,7 +61,8 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di return [] async def process( - self, context: module.ProcessContext[dict[str, Any], dict[str, Any], dict[str, Any]] + self, + context: module.ProcessContext[dict[str, Any], dict[str, Any], dict[str, Any]], ) -> module.ProcessOutput[dict[str, Any], dict[str, Any]]: """ Process the action. @@ -174,7 +175,12 @@ async def process( should_push = True if should_push: proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check - ["git", "push", "origin", f"HEAD:{workflow_run.head_branch}"], + [ + "git", + "push", + "origin", + f"HEAD:{workflow_run.head_branch}", + ], capture_output=True, encoding="utf-8", timeout=60, diff --git a/github_app_geo_project/module/tests/__init__.py b/github_app_geo_project/module/tests/__init__.py index 2ec0deb1dc2..40c97220a1c 100644 --- a/github_app_geo_project/module/tests/__init__.py +++ b/github_app_geo_project/module/tests/__init__.py @@ -54,7 +54,8 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E ] async def process( - self, context: module.ProcessContext[_ConfigType, _EventData, _TransversalDashboardData] + self, + context: module.ProcessContext[_ConfigType, _EventData, _TransversalDashboardData], ) -> module.ProcessOutput[_EventData, _TransversalDashboardData]: """ Process the action. @@ -85,7 +86,9 @@ async def process( if type_ == "success": result["output-multi-line-id"] = module_utils.add_output( - context, "Test", ["Test 1", {"title": "Test 2", "children": ["Test 3", "Test 4"]}] + context, + "Test", + ["Test 1", {"title": "Test 2", "children": ["Test 3", "Test 4"]}], ) result["output-error-id"] = module_utils.add_output( context, "Test", ["Test error"], status=models.OutputStatus.ERROR @@ -114,7 +117,10 @@ async def process( _LOGGER.info(message) if type_ == "log-json": - _LOGGER.info("JSON output:\n%s", utils.format_json({"test1": "value", "test2": "value"})) + _LOGGER.info( + "JSON output:\n%s", + utils.format_json({"test1": "value", "test2": "value"}), + ) with open("/tmp/test-result.yaml", "w", encoding="utf-8") as file: yaml.dump(result, file) diff --git a/github_app_geo_project/module/utils.py b/github_app_geo_project/module/utils.py index a5a62291de3..e8962a191f5 100644 --- a/github_app_geo_project/module/utils.py +++ b/github_app_geo_project/module/utils.py @@ -33,7 +33,7 @@ def add_output( title=title, status=status, owner=context.github_project.owner if context.github_project else "camptocamp", - repository=context.github_project.repository if context.github_project else "test", + repository=(context.github_project.repository if context.github_project else "test"), access_type=access_type, data=data, ) @@ -256,7 +256,13 @@ def to_html(self, style: str = "h3") -> str: sanitizer = html_sanitizer.Sanitizer( { - "tags": {*html_sanitizer.sanitizer.DEFAULT_SETTINGS["tags"], "span", "div", "pre", "code"}, + "tags": { + *html_sanitizer.sanitizer.DEFAULT_SETTINGS["tags"], + "span", + "div", + "pre", + "code", + }, "attributes": { "a": ( "href", @@ -366,7 +372,12 @@ class AnsiProcessMessage(AnsiMessage): """Represent a message from a subprocess.""" def __init__( - self, args: list[str], returncode: int | None, stdout: str, stderr: str, error: str | None = None + self, + args: list[str], + returncode: int | None, + stdout: str, + stderr: str, + error: str | None = None, ) -> None: """Initialize the process message.""" self.args: list[str] = [] @@ -435,7 +446,7 @@ def to_markdown(self, summary: bool = False) -> str: @staticmethod def from_process( - proc: subprocess.CompletedProcess[str] | subprocess.CalledProcessError | subprocess.TimeoutExpired, + proc: (subprocess.CompletedProcess[str] | subprocess.CalledProcessError | subprocess.TimeoutExpired), ) -> "AnsiProcessMessage": """Create a process message from a subprocess.""" if isinstance(proc, subprocess.TimeoutExpired): @@ -444,7 +455,7 @@ def from_process( def ansi_proc_message( - proc: subprocess.CompletedProcess[str] | subprocess.CalledProcessError | subprocess.TimeoutExpired, + proc: (subprocess.CompletedProcess[str] | subprocess.CalledProcessError | subprocess.TimeoutExpired), ) -> Message: """ Process the output of a subprocess for the dashboard (markdown)/HTML. @@ -557,8 +568,8 @@ async def run_timeout( message = AnsiProcessMessage( command, None, - "" if async_proc.stdout is None else (await async_proc.stdout.read()).decode(), - "" if async_proc.stderr is None else (await async_proc.stderr.read()).decode(), + ("" if async_proc.stdout is None else (await async_proc.stdout.read()).decode()), + ("" if async_proc.stderr is None else (await async_proc.stderr.read()).decode()), error=str(exception), ) message.title = timeout_message @@ -569,18 +580,28 @@ async def run_timeout( _LOGGER.exception("TimeoutError for %s: %s", command[0], exception) else: _LOGGER.warning("TimeoutError for %s", command[0]) - return None, False, AnsiProcessMessage(command, None, "", "", str(exception)) + return ( + None, + False, + AnsiProcessMessage(command, None, "", "", str(exception)), + ) def has_changes(include_un_followed: bool = False) -> bool: """Check if there are changes.""" if include_un_followed: proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check - ["git", "status", "--porcelain"], capture_output=True, encoding="utf-8", timeout=30 + ["git", "status", "--porcelain"], + capture_output=True, + encoding="utf-8", + timeout=30, ) return bool(proc.stdout) proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check - ["git", "diff", "--exit-code"], capture_output=True, encoding="utf-8", timeout=30 + ["git", "diff", "--exit-code"], + capture_output=True, + encoding="utf-8", + timeout=30, ) return proc.returncode != 0 @@ -596,7 +617,12 @@ async def create_commit(message: str, pre_commit_check: bool = True) -> bool: _LOGGER.warning(proc_message) return False _, success, _ = await run_timeout( - ["git", "commit", f"--message={message}", *([] if pre_commit_check else ["--no-verify"])], + [ + "git", + "commit", + f"--message={message}", + *([] if pre_commit_check else ["--no-verify"]), + ], None, 600, "Commit", @@ -610,7 +636,11 @@ async def create_commit(message: str, pre_commit_check: bool = True) -> bool: def create_pull_request( - branch: str, new_branch: str, message: str, body: str, project: configuration.GithubProject + branch: str, + new_branch: str, + message: str, + body: str, + project: configuration.GithubProject, ) -> tuple[bool, github.PullRequest.PullRequest | None]: """Create a pull request.""" proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check @@ -643,7 +673,8 @@ def create_pull_request( body = f"See: #{pull_request.number}" found = False issues = project.repo.get_issues( - state="open", creator=project.application.integration.get_app().slug + "[bot]" # type: ignore[arg-type] + state="open", + creator=project.application.integration.get_app().slug + "[bot]", # type: ignore[arg-type] ) if issues.totalCount > 0: for candidate in issues: @@ -671,7 +702,11 @@ def create_pull_request( async def create_commit_pull_request( - branch: str, new_branch: str, message: str, body: str, project: configuration.GithubProject + branch: str, + new_branch: str, + message: str, + body: str, + project: configuration.GithubProject, ) -> tuple[bool, github.PullRequest.PullRequest | None]: """Do a commit, then create a pull request.""" if os.path.exists(".pre-commit-config.yaml"): @@ -707,7 +742,8 @@ def close_pull_request_issues(new_branch: str, message: str, project: configurat title = f"Pull request {message} is open for 5 days" issues = project.repo.get_issues( - state="open", creator=project.application.integration.get_app().slug + "[bot]" # type: ignore[arg-type] + state="open", + creator=project.application.integration.get_app().slug + "[bot]", # type: ignore[arg-type] ) for issue in issues: if title == issue.title: @@ -872,7 +908,10 @@ def manage_updated(status: dict[str, Any], key: str, days_old: int = 2) -> None: def manage_updated_separated( - updated: dict[str, datetime.datetime], data: dict[str, Any], key: str, days_old: int = 2 + updated: dict[str, datetime.datetime], + data: dict[str, Any], + key: str, + days_old: int = 2, ) -> None: """ Manage the updated status. diff --git a/github_app_geo_project/module/versions/__init__.py b/github_app_geo_project/module/versions/__init__.py index 4c50ca05701..b42ca3d9adb 100644 --- a/github_app_geo_project/module/versions/__init__.py +++ b/github_app_geo_project/module/versions/__init__.py @@ -163,7 +163,9 @@ async def process( f"https://github.com/{context.github_project.owner}/{context.github_project.repository}" ) module_utils.manage_updated_separated( - context.transversal_status.updated, context.transversal_status.repositories, key + context.transversal_status.updated, + context.transversal_status.repositories, + key, ) _apply_additional_packages(context) @@ -307,7 +309,10 @@ def get_transversal_dashboard( names = _Names() for repo, repo_data in transversal_status.repositories.items(): for branch, branch_data in repo_data.versions.items(): - for datasource, datasource_data in branch_data.names_by_datasource.items(): + for ( + datasource, + datasource_data, + ) in branch_data.names_by_datasource.items(): for name in datasource_data.names: current_status = names.by_datasources.setdefault( datasource, _NamesByDataSources() @@ -713,8 +718,14 @@ def _build_internal_dependencies( dependencies_branch = dependencies_branches.by_branch.setdefault( version, _Dependencies(support=version_data.support) ) - for datasource_name, dependencies_data in version_data.dependencies_by_datasource.items(): - for dependency_name, dependency_versions in dependencies_data.versions_by_names.items(): + for ( + datasource_name, + dependencies_data, + ) in version_data.dependencies_by_datasource.items(): + for ( + dependency_name, + dependency_versions, + ) in dependencies_data.versions_by_names.items(): if datasource_name not in names.by_datasources: continue for dependency_version in dependency_versions.versions: @@ -762,7 +773,10 @@ def _build_reverse_dependency( ) -> None: all_datasource_names: dict[str, dict[str, str]] = {} for branch, version_name_data in repo_data.versions.items(): - for datasource_name, datasource_name_data in version_name_data.names_by_datasource.items(): + for ( + datasource_name, + datasource_name_data, + ) in version_name_data.names_by_datasource.items(): for package_name in datasource_name_data.names: all_datasource_names.setdefault(datasource_name, {})[package_name] = branch for other_repo, other_repo_data in transversal_status.repositories.items(): @@ -772,10 +786,16 @@ def _build_reverse_dependency( other_version, other_version_data, ) in other_repo_data.versions.items(): - for datasource_name, datasource_data in other_version_data.dependencies_by_datasource.items(): + for ( + datasource_name, + datasource_data, + ) in other_version_data.dependencies_by_datasource.items(): if datasource_name not in all_datasource_names: continue - for package_name, package_data in datasource_data.versions_by_names.items(): + for ( + package_name, + package_data, + ) in datasource_data.versions_by_names.items(): for version in package_data.versions: if datasource_name == "docker": package_name = f"{package_name}:{version}" @@ -801,7 +821,10 @@ def _build_reverse_dependency( support=other_version_data.support, color=( "--bs-body-bg" - if _is_supported(other_version_data.support, version_data.support) + if _is_supported( + other_version_data.support, + version_data.support, + ) else "--bs-danger" ), repo=other_repo, @@ -827,7 +850,9 @@ def _apply_additional_packages( ) -> None: for repo, data in context.module_config.get("additional-packages", {}).items(): module_utils.manage_updated_separated( - context.transversal_status.updated, context.transversal_status.repositories, repo + context.transversal_status.updated, + context.transversal_status.repositories, + repo, ) pydentic_data = _TransversalStatusRepo(**data) context.transversal_status.repositories[repo] = pydentic_data diff --git a/github_app_geo_project/module/versions/configuration.py b/github_app_geo_project/module/versions/configuration.py index 7010494552c..c514a9d437a 100644 --- a/github_app_geo_project/module/versions/configuration.py +++ b/github_app_geo_project/module/versions/configuration.py @@ -69,7 +69,8 @@ class _VersionsConfigurationExternalPackagesItem(TypedDict, total=False): _VersionsConfigurationPackageExtractorAdditionalproperties = dict[ - str, list["_VersionsConfigurationPackageExtractorAdditionalpropertiesAdditionalpropertiesItem"] + str, + list["_VersionsConfigurationPackageExtractorAdditionalpropertiesAdditionalpropertiesItem"], ] """ The package extractor by package name """ diff --git a/github_app_geo_project/scripts/process_queue.py b/github_app_geo_project/scripts/process_queue.py index aec3a2a7c1d..1233fa521c4 100644 --- a/github_app_geo_project/scripts/process_queue.py +++ b/github_app_geo_project/scripts/process_queue.py @@ -25,7 +25,13 @@ import sqlalchemy.orm from prometheus_client import Gauge, Info -from github_app_geo_project import configuration, models, module, project_configuration, utils +from github_app_geo_project import ( + configuration, + models, + module, + project_configuration, + utils, +) from github_app_geo_project.module import modules from github_app_geo_project.module import utils as module_utils from github_app_geo_project.views import webhook @@ -235,7 +241,7 @@ async def _process_job( try: check_run.edit( status="completed", - conclusion="success" if result is None or result.success else "failure", + conclusion=("success" if result is None or result.success else "failure"), output=check_output, ) except github.GithubException as exception: @@ -471,7 +477,11 @@ def _process_event( config: dict[str, str], event_data: dict[str, str], session: sqlalchemy.orm.Session ) -> None: for application in config["applications"].split(): - _LOGGER.info("Process the event: %s, application: %s", event_data.get("name"), application) + _LOGGER.info( + "Process the event: %s, application: %s", + event_data.get("name"), + application, + ) if "TEST_APPLICATION" in os.environ: webhook.process_event( @@ -507,10 +517,12 @@ def _process_event( def _get_dashboard_issue( - github_application: configuration.GithubApplication, repo: github.Repository.Repository + github_application: configuration.GithubApplication, + repo: github.Repository.Repository, ) -> github.Issue.Issue | None: open_issues = repo.get_issues( - state="open", creator=github_application.integration.get_app().slug + "[bot]" # type: ignore[arg-type] + state="open", + creator=github_application.integration.get_app().slug + "[bot]", # type: ignore[arg-type] ) if open_issues.totalCount > 0: for candidate in open_issues: @@ -548,7 +560,11 @@ def _process_dashboard_issue( module_old = utils.get_dashboard_issue_module(old_data, name) module_new = utils.get_dashboard_issue_module(new_data, name) if module_old != module_new: - _LOGGER.debug("Dashboard issue edited for module %s: %s", name, current_module.title()) + _LOGGER.debug( + "Dashboard issue edited for module %s: %s", + name, + current_module.title(), + ) if current_module.required_issue_dashboard(): for action in current_module.get_actions( module.GetActionContext( @@ -626,7 +642,10 @@ async def _process_one_job( ) if job is None: if no_steal_long_pending: - _LOGGER.debug("Process one job (max priority: %i): No job to process", max_priority) + _LOGGER.debug( + "Process one job (max priority: %i): No job to process", + max_priority, + ) return True # Very long pending job => error session.execute( @@ -652,7 +671,10 @@ async def _process_one_job( ) session.commit() - _LOGGER.debug("Process one job (max priority: %i): Steal long pending job", max_priority) + _LOGGER.debug( + "Process one job (max priority: %i): Steal long pending job", + max_priority, + ) return True sentry_sdk.set_context("job", {"id": job.id, "event": job.event_name, "module": job.module or "-"}) @@ -671,7 +693,11 @@ async def _process_one_job( root_logger.addHandler(handler) _LOGGER.info(message) _RUNNING_JOBS[job.id] = _JobInfo( - job.module or "-", job.event_name, job.repository, job.priority, max_priority + job.module or "-", + job.event_name, + job.repository, + job.priority, + max_priority, ) root_logger.removeHandler(handler) @@ -871,11 +897,19 @@ async def _async_main() -> None: models.Base.metadata.create_all(engine) if args.only_one: await _process_one_job( - config, Session, no_steal_long_pending=args.exit_when_empty, make_pending=args.make_pending + config, + Session, + no_steal_long_pending=args.exit_when_empty, + make_pending=args.make_pending, ) sys.exit(0) if args.make_pending: - await _process_one_job(config, Session, no_steal_long_pending=args.exit_when_empty, make_pending=True) + await _process_one_job( + config, + Session, + no_steal_long_pending=args.exit_when_empty, + make_pending=True, + ) sys.exit(0) if not args.exit_when_empty and "C2C_PROMETHEUS_PORT" in os.environ: diff --git a/github_app_geo_project/security.py b/github_app_geo_project/security.py index 1c81e2fbcb2..739e1f33f5f 100644 --- a/github_app_geo_project/security.py +++ b/github_app_geo_project/security.py @@ -74,7 +74,8 @@ def identity(self, request: pyramid.request.Request) -> User: digestmod=hashlib.sha256, ).hexdigest() if hmac.compare_digest( - our_signature, request.headers["X-Hub-Signature-256"].split("=", 1)[1] + our_signature, + request.headers["X-Hub-Signature-256"].split("=", 1)[1], ): user = User("github_webhook", None, None, None, True, None, request) else: @@ -122,7 +123,10 @@ def authenticated_userid(self, request: pyramid.request.Request) -> str | None: return identity.login def permits( - self, request: pyramid.request.Request, auth_config: c2cwsgiutils.auth.AuthConfig, permission: str + self, + request: pyramid.request.Request, + auth_config: c2cwsgiutils.auth.AuthConfig, + permission: str, ) -> Allowed | Denied: """Allow access to everything if signed in.""" identity = self.identity(request) diff --git a/github_app_geo_project/templates/__init__.py b/github_app_geo_project/templates/__init__.py index 036fb889d16..4a0c08b9ff9 100644 --- a/github_app_geo_project/templates/__init__.py +++ b/github_app_geo_project/templates/__init__.py @@ -123,7 +123,11 @@ def pprint_duration(duration_in: str | timedelta) -> str: day = 0 date = datetime.strptime(duration_in, "%H:%M:%S.%f" if "." in duration_in else "%H:%M:%S") duration = timedelta( - days=day, hours=date.hour, minutes=date.minute, seconds=date.second, microseconds=date.microsecond + days=day, + hours=date.hour, + minutes=date.minute, + seconds=date.second, + microseconds=date.microsecond, ) else: duration = duration_in diff --git a/github_app_geo_project/views/dashboard.py b/github_app_geo_project/views/dashboard.py index c9fd3a7f5c1..d16a1bcf09f 100644 --- a/github_app_geo_project/views/dashboard.py +++ b/github_app_geo_project/views/dashboard.py @@ -47,7 +47,8 @@ def dashboard(request: pyramid.request.Request) -> dict[str, Any]: module_status = {} output = module_instance.get_transversal_dashboard( module.TransversalDashboardContext( - module_instance.transversal_status_from_json(module_status or {}), dict(request.params) + module_instance.transversal_status_from_json(module_status or {}), + dict(request.params), ) ) data = output.data diff --git a/github_app_geo_project/views/home.py b/github_app_geo_project/views/home.py index 6ee0e996bd4..810716e0ff3 100644 --- a/github_app_geo_project/views/home.py +++ b/github_app_geo_project/views/home.py @@ -17,7 +17,8 @@ def _gt_access( - access_1: Literal["read", "write", "admin"], access_2: Literal["read", "write", "admin"] + access_1: Literal["read", "write", "admin"], + access_2: Literal["read", "write", "admin"], ) -> bool: access_number = {"read": 1, "write": 2, "admin": 3} return access_number[access_1] > access_number[access_2] @@ -119,7 +120,8 @@ def output(request: pyramid.request.Request) -> dict[str, Any]: # Test that all permissions are in github_permissions for permission, access in permissions.items(): if permission not in github_permissions or _gt_access( - access, github_permissions[permission] # type: ignore[arg-type,literal-required] + access, + github_permissions[permission], # type: ignore[arg-type,literal-required] ): application["errors"].append( f"Missing permission ({permission}={access}) in the GitHub application, " @@ -137,7 +139,8 @@ def output(request: pyramid.request.Request) -> dict[str, Any]: "\n".join([f"{k}={v}" for k, v in github_permissions.items()]), ) elif _gt_access( - github_permissions[permission], access # type: ignore[arg-type,literal-required] + github_permissions[permission], + access, # type: ignore[arg-type,literal-required] ): _LOGGER.error( "The GitHub application '%s' has more permission (%s=%s) than required, " diff --git a/github_app_geo_project/views/output.py b/github_app_geo_project/views/output.py index ea9f82dd49f..4f194702543 100644 --- a/github_app_geo_project/views/output.py +++ b/github_app_geo_project/views/output.py @@ -31,7 +31,10 @@ def output(request: pyramid.request.Request) -> dict[str, Any]: full_repository = f"{out.owner}/{out.repository}" permission = request.has_permission( full_repository, - {"github_repository": full_repository, "github_access_type": out.access_type}, + { + "github_repository": full_repository, + "github_access_type": out.access_type, + }, ) has_access = isinstance(permission, pyramid.security.Allowed) if has_access: diff --git a/github_app_geo_project/views/project.py b/github_app_geo_project/views/project.py index b2e9644d2ca..ede9e69b3ba 100644 --- a/github_app_geo_project/views/project.py +++ b/github_app_geo_project/views/project.py @@ -15,7 +15,11 @@ from github_app_geo_project import configuration, models, project_configuration, utils from github_app_geo_project.module import modules -from github_app_geo_project.templates import pprint_duration, pprint_full_date, pprint_short_date +from github_app_geo_project.templates import ( + pprint_duration, + pprint_full_date, + pprint_short_date, +) _LOGGER = logging.getLogger(__name__) @@ -99,7 +103,10 @@ def project(request: pyramid.request.Request) -> dict[str, Any]: except: # nosec, pylint: disable=bare-except _LOGGER.debug( - "The repository %s/%s is not installed in the application %s", owner, repository, app + "The repository %s/%s is not installed in the application %s", + owner, + repository, + app, ) module_config = [] for module_name in module_names: diff --git a/github_app_geo_project/views/schema.py b/github_app_geo_project/views/schema.py index 71e13fe5b2b..c34e05e46d5 100644 --- a/github_app_geo_project/views/schema.py +++ b/github_app_geo_project/views/schema.py @@ -25,7 +25,8 @@ def schema_view(request: pyramid.request.Request) -> dict[str, Any]: # get project-schema-content with open( - os.path.join(os.path.dirname(os.path.dirname(__file__)), "project-schema.json"), encoding="utf-8" + os.path.join(os.path.dirname(os.path.dirname(__file__)), "project-schema.json"), + encoding="utf-8", ) as schema_file: schema: dict[str, Any] = json.loads(schema_file.read()) diff --git a/github_app_geo_project/views/webhook.py b/github_app_geo_project/views/webhook.py index 30f2001ff2d..fdd503e7cc3 100644 --- a/github_app_geo_project/views/webhook.py +++ b/github_app_geo_project/views/webhook.py @@ -31,7 +31,12 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]: github_secret = request.registry.settings.get(f"application.{application}.github_app_webhook_secret") if github_secret: - dry_run = os.environ.get("GHCI_WEBHOOK_SECRET_DRY_RUN", "false").lower() in ("true", "1", "yes", "on") + dry_run = os.environ.get("GHCI_WEBHOOK_SECRET_DRY_RUN", "false").lower() in ( + "true", + "1", + "yes", + "on", + ) if "X-Hub-Signature-256" not in request.headers: _LOGGER.error("No signature in the request") if not dry_run: @@ -51,7 +56,9 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]: raise pyramid.httpexceptions.HTTPBadRequest("Invalid signature in the request") _LOGGER.debug( - "Webhook received for %s on %s", request.headers.get("X-GitHub-Event", "undefined"), application + "Webhook received for %s on %s", + request.headers.get("X-GitHub-Event", "undefined"), + application, ) application_object = None @@ -101,7 +108,9 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]: check_suite = project_github.repo.get_check_suite(data["check_suite"]["id"]) for check_run in check_suite.get_check_runs(): _LOGGER.info( - "Rerequest the check run %s from check suite %s", check_run.id, check_suite.id + "Rerequest the check run %s from check suite %s", + check_run.id, + check_suite.id, ) session.execute( sqlalchemy.update(models.Queue) @@ -236,7 +245,6 @@ def process_event(context: ProcessContext) -> None: jobs_unique_on = current_module.jobs_unique_on() if jobs_unique_on: - update = ( sqlalchemy.update(models.Queue) .where(models.Queue.status == models.JobStatus.NEW) @@ -288,14 +296,23 @@ def process_event(context: ProcessContext) -> None: repo = None if "TEST_APPLICATION" not in os.environ: github_project = configuration.get_github_project( - context.config, context.application, context.owner, context.repository + context.config, + context.application, + context.owner, + context.repository, ) repo = github_project.repo should_create_checks = action.checks if should_create_checks is None: # Auto (major of event that comes from GitHub) - for event_name in ["pull_request", "pusher", "check_run", "check_suite", "workflow_run"]: + for event_name in [ + "pull_request", + "pusher", + "check_run", + "check_suite", + "workflow_run", + ]: if event_name in context.event_data: should_create_checks = True break diff --git a/gunicorn.conf.py b/gunicorn.conf.py index d4bdfe0a9f6..2e3a3c2b7c4 100644 --- a/gunicorn.conf.py +++ b/gunicorn.conf.py @@ -8,7 +8,12 @@ import gunicorn.arbiter import gunicorn.workers.base -from c2cwsgiutils import get_config_defaults, get_logconfig_dict, get_paste_config, prometheus +from c2cwsgiutils import ( + get_config_defaults, + get_logconfig_dict, + get_paste_config, + prometheus, +) from prometheus_client import multiprocess bind = ":8080" # pylint: disable=invalid-name diff --git a/tests/test_module_versions.py b/tests/test_module_versions.py index 353d0dfc9ec..2e5652ab85d 100644 --- a/tests/test_module_versions.py +++ b/tests/test_module_versions.py @@ -56,9 +56,7 @@ async def test_process_step_2() -> None: context.github_project.repository = "test" context.module_config = {} os.environ["TEST"] = "TRUE" - os.environ[ - "RENOVATE_GRAPH" - ] = """WARN: GitHub token is required for some dependencies + os.environ["RENOVATE_GRAPH"] = """WARN: GitHub token is required for some dependencies "githubDeps": [ "camptocamp/backport-action", "actions/checkout", @@ -181,7 +179,8 @@ def test_get_transversal_dashboard() -> None: @pytest.mark.parametrize( - "other_support, expected_color", [("01/01/2044", "--bs-danger"), ("01/01/2046", "--bs-body-bg")] + "other_support, expected_color", + [("01/01/2044", "--bs-danger"), ("01/01/2046", "--bs-body-bg")], ) def test_get_transversal_dashboard_repo_forward(other_support: str, expected_color: str) -> None: versions = Versions() @@ -470,7 +469,8 @@ def test_get_transversal_dashboard_repo_forward_inexisting() -> None: @pytest.mark.parametrize( - "other_support, expected_color", [("01/01/2044", "--bs-body-bg"), ("01/01/2046", "--bs-danger")] + "other_support, expected_color", + [("01/01/2044", "--bs-body-bg"), ("01/01/2046", "--bs-danger")], ) def test_get_transversal_dashboard_repo_reverse(other_support: str, expected_color: str) -> None: versions = Versions() @@ -787,7 +787,10 @@ def test_update_upstream_versions() -> None: ) responses.get( "https://endoflife.date/api/package2.json", - json=[{"eol": "2038-12-31", "cycle": "v1.0"}, {"eol": "2039-12-31", "cycle": "v2.0"}], + json=[ + {"eol": "2038-12-31", "cycle": "v1.0"}, + {"eol": "2039-12-31", "cycle": "v2.0"}, + ], status=200, ) @@ -936,7 +939,13 @@ def test_read_dependency() -> None: ], "nvm": [ { - "deps": [{"depName": "node", "currentValue": "20", "datasource": "node-version"}], + "deps": [ + { + "depName": "node", + "currentValue": "20", + "datasource": "node-version", + } + ], "packageFile": ".nvmrc", } ], @@ -976,9 +985,21 @@ def test_read_dependency() -> None: "datasource": "pypi", "currentVersion": "1.8.2", }, - {"depName": "certifi", "currentValue": ">=2023.7.22", "datasource": "pypi"}, - {"depName": "setuptools", "currentValue": ">=65.5.1", "datasource": "pypi"}, - {"depName": "jinja2", "currentValue": ">=3.1.3", "datasource": "pypi"}, + { + "depName": "certifi", + "currentValue": ">=2023.7.22", + "datasource": "pypi", + }, + { + "depName": "setuptools", + "currentValue": ">=65.5.1", + "datasource": "pypi", + }, + { + "depName": "jinja2", + "currentValue": ">=3.1.3", + "datasource": "pypi", + }, ], "packageFile": "ci/requirements.txt", }, @@ -1117,7 +1138,8 @@ def test_read_dependency() -> None: def test_transversal_status_to_json(): status = _TransversalStatus( - updated={}, repositories={"package1": _TransversalStatusRepo(url="url1", versions={})} + updated={}, + repositories={"package1": _TransversalStatusRepo(url="url1", versions={})}, ) module = Versions() assert module.transversal_status_to_json(status) == { diff --git a/tests/test_template.py b/tests/test_template.py index 3240626d003..461f30c0f33 100644 --- a/tests/test_template.py +++ b/tests/test_template.py @@ -1,6 +1,12 @@ from datetime import datetime, timedelta, timezone -from github_app_geo_project.templates import markdown, markdown_lib, pprint_date, pprint_duration, sanitizer +from github_app_geo_project.templates import ( + markdown, + markdown_lib, + pprint_date, + pprint_duration, + sanitizer, +) def test_sanitizer() -> None: