diff --git a/github_app_geo_project/module/audit/__init__.py b/github_app_geo_project/module/audit/__init__.py
index 2f364436606..37a41bca8e9 100644
--- a/github_app_geo_project/module/audit/__init__.py
+++ b/github_app_geo_project/module/audit/__init__.py
@@ -174,97 +174,109 @@ async def _process_snyk_dpkg(
break
_LOGGER.debug("Branch name: %s", branch)
- # Checkout the right branch on a temporary directory
- with tempfile.TemporaryDirectory() as tmpdirname:
- os.chdir(tmpdirname)
- _LOGGER.debug("Clone the repository in the temporary directory: %s", tmpdirname)
- success &= module_utils.git_clone(context.github_project, branch)
- if not success:
- return ["Fail to clone the repository"], success
-
- local_config: configuration.AuditConfiguration = {}
- if context.module_event_data.type in ("snyk", "dpkg"):
- if os.path.exists(".github/ghci.yaml"):
- with open(".github/ghci.yaml", encoding="utf-8") as file:
- local_config = yaml.load(file, Loader=yaml.SafeLoader).get("audit", {})
-
- if context.module_event_data.type == "snyk":
- python_version = ""
- if os.path.exists(".tool-versions"):
- with open(".tool-versions", encoding="utf-8") as file:
- for line in file:
- if line.startswith("python "):
- python_version = ".".join(line.split(" ")[1].split(".")[0:2]).strip()
- break
-
- if python_version:
- env = _use_python_version(python_version)
- else:
- env = os.environ.copy()
-
- logs_url = urllib.parse.urljoin(context.service_url, f"logs/{context.job_id}")
- result, body, short_message, new_success = await audit_utils.snyk(
- branch, context.module_config.get("snyk", {}), local_config.get("snyk", {}), logs_url, env
- )
- success &= new_success
- output_url = _process_error(
- context,
- key,
- issue_check,
- [{"title": m.title, "children": [m.to_html("no-title")]} for m in result],
- ", ".join(short_message),
- )
- message: module_utils.Message = module_utils.HtmlMessage(
- "Output" % output_url
- )
- message.title = "Output URL"
- _LOGGER.debug(message)
- if output_url is not None:
- short_message.append(f"[See also]({output_url})")
- if body is not None:
- body.html += f"\n\n[See output]({output_url})"
- body.html += f"\n\n[See logs]({logs_url})"
-
- if context.module_event_data.type == "dpkg":
- body = module_utils.HtmlMessage("Update dpkg packages")
-
- if os.path.exists("ci/dpkg-versions.yaml"):
- await audit_utils.dpkg(
- context.module_config.get("dpkg", {}), local_config.get("dpkg", {})
+ async with module_utils.WORKING_DIRECTORY_LOCK:
+ # Checkout the right branch on a temporary directory
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ os.chdir(tmpdirname)
+ _LOGGER.debug("Clone the repository in the temporary directory: %s", tmpdirname)
+ success &= module_utils.git_clone(context.github_project, branch)
+ if not success:
+ return ["Fail to clone the repository"], success
+
+ local_config: configuration.AuditConfiguration = {}
+ if context.module_event_data.type in ("snyk", "dpkg"):
+ if os.path.exists(".github/ghci.yaml"):
+ with open(".github/ghci.yaml", encoding="utf-8") as file:
+ local_config = yaml.load(file, Loader=yaml.SafeLoader).get("audit", {})
+
+ if context.module_event_data.type == "snyk":
+ python_version = ""
+ if os.path.exists(".tool-versions"):
+ with open(".tool-versions", encoding="utf-8") as file:
+ for line in file:
+ if line.startswith("python "):
+ python_version = ".".join(line.split(" ")[1].split(".")[0:2]).strip()
+ break
+
+ if python_version:
+ env = _use_python_version(python_version)
+ else:
+ env = os.environ.copy()
+
+ logs_url = urllib.parse.urljoin(context.service_url, f"logs/{context.job_id}")
+ result, body, short_message, new_success = await audit_utils.snyk(
+ branch,
+ context.module_config.get("snyk", {}),
+ local_config.get("snyk", {}),
+ logs_url,
+ env,
)
+ success &= new_success
+ output_url = _process_error(
+ context,
+ key,
+ issue_check,
+ [{"title": m.title, "children": [m.to_html("no-title")]} for m in result],
+ ", ".join(short_message),
+ )
+ message: module_utils.Message = module_utils.HtmlMessage(
+ "Output" % output_url
+ )
+ message.title = "Output URL"
+ _LOGGER.debug(message)
+ if output_url is not None:
+ short_message.append(f"[See also]({output_url})")
+ if body is not None:
+ body.html += f"\n\n[See output]({output_url})"
+ body.html += f"\n\n[See logs]({logs_url})"
+
+ if context.module_event_data.type == "dpkg":
+ body = module_utils.HtmlMessage("Update dpkg packages")
+
+ if os.path.exists("ci/dpkg-versions.yaml"):
+ await audit_utils.dpkg(
+ context.module_config.get("dpkg", {}), local_config.get("dpkg", {})
+ )
- diff_proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
- ["git", "diff", "--quiet"], timeout=30
- )
- if diff_proc.returncode != 0:
- proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
- ["git", "diff"], timeout=30, capture_output=True, encoding="utf-8"
- )
- message = module_utils.ansi_proc_message(proc)
- message.title = "Changes to be committed"
- _LOGGER.debug(message)
-
- proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
- ["git", "checkout", "-b", new_branch], capture_output=True, encoding="utf-8", timeout=30
+ diff_proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ ["git", "diff", "--quiet"], timeout=30
)
- if proc.returncode != 0:
+ if diff_proc.returncode != 0:
+ proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ ["git", "diff"], timeout=30, capture_output=True, encoding="utf-8"
+ )
message = module_utils.ansi_proc_message(proc)
- message.title = "Error while creating the new branch"
- _LOGGER.error(message)
-
- else:
- repo = context.github_project.repo
- new_success, pull_request = await module_utils.create_commit_pull_request(
- branch, new_branch, f"Audit {key}", "" if body is None else body.to_markdown(), repo
+ message.title = "Changes to be committed"
+ _LOGGER.debug(message)
+
+ proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ ["git", "checkout", "-b", new_branch],
+ capture_output=True,
+ encoding="utf-8",
+ timeout=30,
)
- success &= new_success
- if not new_success:
- _LOGGER.error("Error while create commit or pull request")
+ if proc.returncode != 0:
+ message = module_utils.ansi_proc_message(proc)
+ message.title = "Error while creating the new branch"
+ _LOGGER.error(message)
+
else:
- if pull_request is not None:
- issue_check.set_title(key, f"{key} ([Pull request]({pull_request.html_url}))")
- else:
- _LOGGER.debug("No changes to commit")
+ repo = context.github_project.repo
+ new_success, pull_request = await module_utils.create_commit_pull_request(
+ branch,
+ new_branch,
+ f"Audit {key}",
+ "" if body is None else body.to_markdown(),
+ repo,
+ )
+ success &= new_success
+ if not new_success:
+ _LOGGER.error("Error while create commit or pull request")
+ else:
+ if pull_request is not None:
+ issue_check.set_title(key, f"{key} ([Pull request]({pull_request.html_url}))")
+ else:
+ _LOGGER.debug("No changes to commit")
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as proc_error:
message = module_utils.ansi_proc_message(proc_error)
_LOGGER.exception("Audit %s process error", key)
@@ -401,7 +413,7 @@ async def process(
f"{context.github_project.owner}/{context.github_project.repository}",
)
- # If no SECURITY.md apply on main branch
+ # If no SECURITY.md apply on default branch
key_starts = []
security_file = None
try:
@@ -448,11 +460,9 @@ async def process(
versions = []
if security_file is not None:
assert isinstance(security_file, github.ContentFile.ContentFile)
- security_file = c2cciutils.security.Security(
- security_file.decoded_content.decode("utf-8")
- )
+ security = c2cciutils.security.Security(security_file.decoded_content.decode("utf-8"))
- versions = module_utils.get_stabilization_versions(security_file)
+ versions = module_utils.get_stabilization_versions(security)
else:
_LOGGER.debug("No SECURITY.md file in the repository, apply on default branch")
versions = [repo.default_branch]
diff --git a/github_app_geo_project/module/standard/patch.py b/github_app_geo_project/module/standard/patch.py
index aa2116fe2ff..b408d219016 100644
--- a/github_app_geo_project/module/standard/patch.py
+++ b/github_app_geo_project/module/standard/patch.py
@@ -11,7 +11,7 @@
import requests
from github_app_geo_project import module
-from github_app_geo_project.module import utils
+from github_app_geo_project.module import utils as module_utils
_LOGGER = logging.getLogger(__name__)
@@ -55,7 +55,7 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di
context.event_data.get("action") == "completed"
and context.event_data.get("workflow_run", {}).get("conclusion") == "failure"
):
- return [module.Action(priority=module.PRIORITY_STATUS, data={})]
+ return [module.Action(priority=module.PRIORITY_STANDARD, data={})]
return []
async def process(
@@ -80,97 +80,103 @@ async def process(
should_push = False
result_message = []
- with tempfile.TemporaryDirectory() as tmpdirname:
- os.chdir(tmpdirname)
- if not is_clone:
- success = utils.git_clone(context.github_project, workflow_run.head_branch)
- if not success:
- return module.ProcessOutput(
- success=False,
- output={
- "summary": "Failed to clone the repository, see details on the application for details (link below)"
- },
- )
+ async with module_utils.WORKING_DIRECTORY_LOCK:
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ os.chdir(tmpdirname)
+ if not is_clone:
+ success = module_utils.git_clone(context.github_project, workflow_run.head_branch)
+ if not success:
+ return module.ProcessOutput(
+ success=False,
+ output={
+ "summary": "Failed to clone the repository, see details on the application for details (link below)"
+ },
+ )
+
+ for artifact in workflow_run.get_artifacts():
+ if not artifact.name.endswith(".patch"):
+ continue
- for artifact in workflow_run.get_artifacts():
- if not artifact.name.endswith(".patch"):
- continue
-
- if artifact.expired:
- _LOGGER.info("Artifact %s is expired", artifact.name)
- continue
-
- (
- status,
- headers,
- response_redirect,
- ) = workflow_run._requester.requestJson( # pylint: disable=protected-access
- "GET", artifact.archive_download_url
- )
- if status != 302:
- _LOGGER.error(
- "Failed to download artifact %s, status: %s, data:\n%s",
- artifact.name,
+ if artifact.expired:
+ _LOGGER.info("Artifact %s is expired", artifact.name)
+ continue
+
+ (
status,
+ headers,
response_redirect,
+ ) = workflow_run._requester.requestJson( # pylint: disable=protected-access
+ "GET", artifact.archive_download_url
)
- continue
-
- # Follow redirect.
- response = requests.get(headers["location"], timeout=120)
- if not response.ok:
- _LOGGER.error("Failed to download artifact %s", artifact.name)
- continue
-
- # unzip
- with zipfile.ZipFile(io.BytesIO(response.content)) as diff:
- if len(diff.namelist()) != 1:
- _LOGGER.info("Invalid artifact %s", artifact.name)
+ if status != 302:
+ _LOGGER.error(
+ "Failed to download artifact %s, status: %s, data:\n%s",
+ artifact.name,
+ status,
+ response_redirect,
+ )
continue
- with diff.open(diff.namelist()[0]) as file:
- patch_input = file.read().decode("utf-8")
- message: utils.Message = utils.HtmlMessage(patch_input, "Applied the patch input")
- _LOGGER.debug(message)
- if is_clone:
- result_message.extend(["```diff", patch_input, "```"])
- else:
- proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
- ["patch", "--strip=1"],
- input=patch_input,
- encoding="utf-8",
- capture_output=True,
- timeout=30,
- )
- message = utils.ansi_proc_message(proc)
- if proc.returncode != 0:
- message.title = f"Failed to apply the diff {artifact.name}"
- _LOGGER.warning(message)
- return module.ProcessOutput(
- success=False,
- output={
- "summary": "Failed to apply the diff, you should probably rebase your branch"
- },
- )
- message.title = f"Applied the diff {artifact.name}"
- _LOGGER.info(message)
+ # Follow redirect.
+ response = requests.get(headers["location"], timeout=120)
+ if not response.ok:
+ _LOGGER.error("Failed to download artifact %s", artifact.name)
+ continue
+
+ # unzip
+ with zipfile.ZipFile(io.BytesIO(response.content)) as diff:
+ if len(diff.namelist()) != 1:
+ _LOGGER.info("Invalid artifact %s", artifact.name)
+ continue
- if utils.has_changes(include_un_followed=True):
- success = await utils.create_commit(
- f"{artifact.name[:-6]}\n\nFrom the artifact of the previous workflow run"
+ with diff.open(diff.namelist()[0]) as file:
+ patch_input = file.read().decode("utf-8")
+ message: module_utils.Message = module_utils.HtmlMessage(
+ patch_input, "Applied the patch input"
+ )
+ _LOGGER.debug(message)
+ if is_clone:
+ result_message.extend(["```diff", patch_input, "```"])
+ else:
+ proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ ["patch", "--strip=1"],
+ input=patch_input,
+ encoding="utf-8",
+ capture_output=True,
+ timeout=30,
)
- if not success:
- raise PatchException("Failed to commit the changes, see logs for details")
- should_push = True
- if should_push:
- proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
- ["git", "push", "origin", f"HEAD:{workflow_run.head_branch}"],
- capture_output=True,
- encoding="utf-8",
- timeout=60,
- )
- if proc.returncode != 0:
- raise PatchException(f"Failed to push the changes{format_process_output(proc)}")
+ message = module_utils.ansi_proc_message(proc)
+ if proc.returncode != 0:
+ message.title = f"Failed to apply the diff {artifact.name}"
+ _LOGGER.warning(message)
+ return module.ProcessOutput(
+ success=False,
+ output={
+ "summary": "Failed to apply the diff, you should probably rebase your branch"
+ },
+ )
+ message.title = f"Applied the diff {artifact.name}"
+ _LOGGER.info(message)
+
+ if module_utils.has_changes(include_un_followed=True):
+ success = await module_utils.create_commit(
+ f"{artifact.name[:-6]}\n\nFrom the artifact of the previous workflow run"
+ )
+ if not success:
+ raise PatchException(
+ "Failed to commit the changes, see logs for details"
+ )
+ should_push = True
+ if should_push:
+ proc = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ ["git", "push", "origin", f"HEAD:{workflow_run.head_branch}"],
+ capture_output=True,
+ encoding="utf-8",
+ timeout=60,
+ )
+ if proc.returncode != 0:
+ raise PatchException(f"Failed to push the changes{format_process_output(proc)}")
+ os.chdir("/")
if is_clone and result_message:
return module.ProcessOutput(
success=False,
diff --git a/github_app_geo_project/module/utils.py b/github_app_geo_project/module/utils.py
index 59ef50a53ac..c56fa0c5478 100644
--- a/github_app_geo_project/module/utils.py
+++ b/github_app_geo_project/module/utils.py
@@ -18,6 +18,7 @@
from github_app_geo_project import configuration, models, module
_LOGGER = logging.getLogger(__name__)
+WORKING_DIRECTORY_LOCK = asyncio.Lock()
def add_output(