From 77e92fe539bf5d2612d47362445f2ca72ac57550 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Fri, 8 Mar 2024 18:16:12 +0400 Subject: [PATCH 01/13] feat: get all the linked issue of the a PR with the HTML --- dags/github/github_api_helpers/__init__.py | 1 + dags/github/github_api_helpers/issues.py | 16 ++++++++++ .../github_api_helpers/pull_requests.py | 31 ++++++++++++++++++- requirements.txt | 1 + 4 files changed, 48 insertions(+), 1 deletion(-) diff --git a/dags/github/github_api_helpers/__init__.py b/dags/github/github_api_helpers/__init__.py index f709f6e5..a9105240 100644 --- a/dags/github/github_api_helpers/__init__.py +++ b/dags/github/github_api_helpers/__init__.py @@ -13,5 +13,6 @@ get_all_reactions_of_review_comment, get_all_review_comments_of_pull_request, get_all_reviews_of_pull_request, + extract_linked_issues_from_pr ) from .repos import get_all_org_repos, get_all_repo_contributors diff --git a/dags/github/github_api_helpers/issues.py b/dags/github/github_api_helpers/issues.py index 44d6f4c6..9325b84e 100644 --- a/dags/github/github_api_helpers/issues.py +++ b/dags/github/github_api_helpers/issues.py @@ -3,6 +3,22 @@ from .smart_proxy import get +def fetch_issue(owner: str, repo: str, issue_number: int): + """ + Fetches a specific issue from a GitHub repository. + + :param owner: The owner of the repository. + :param repo: The name of the repository. + :param issue_number: The number of the issue. + :return: The issue data. + """ + endpoint = f"https://api.github.com/repos/{owner}/{repo}/issues/{issue_number}" + response = get(endpoint) + response_data = response.json() + + logging.info(f"Fetched issue {issue_number} for {owner}/{repo}. Issue: {response_data}") + return response_data + # Issues def fetch_issues(owner: str, repo: str, page: int, per_page: int = 100): """ diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index 8a86a4fb..5d9c0488 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -1,7 +1,8 @@ import logging +from .issues import fetch_issue from .smart_proxy import get - +from bs4 import BeautifulSoup def fetch_pull_requests(owner: str, repo: str, page: int, per_page: int = 100): """ @@ -57,6 +58,34 @@ def get_all_pull_requests(owner: str, repo: str): ) return all_pull_requests +def extract_issue_info_from_url(url): + splitted_url = url.split('/') + owner = splitted_url[-4] + repo = splitted_url[-3] + issue_number = splitted_url[-1] + + return { + 'owner': owner, + 'repo': repo, + 'issue_number': issue_number + } + +def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): + html_pr_url = f"https://github.com/{owner}/{repo}/pull/{pull_number}" + response = get(html_pr_url) + linked_issue = [] + + soup = BeautifulSoup(response.text, 'html.parser') + html_linked_issues = soup.find_all('span', class_='Truncate truncate-with-responsive-width my-1', attrs={"data-view-component": "true"}) + + for html_linked_issue in html_linked_issues: + issue_url = html_linked_issue.find('a').get('href') + issue_data = extract_issue_info_from_url(issue_url) + issue_info = fetch_issue(owner, repo, issue_data['issue_number']) + + linked_issue.append(issue_info) + + return linked_issue def fetch_pull_requests_commits( owner: str, repo: str, pull_number: int, page: int, per_page: int = 100 diff --git a/requirements.txt b/requirements.txt index f5b8d815..3ce159ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,3 +19,4 @@ python-dotenv>=1.0.0, <2.0.0 urlextract==1.8.0 tc-hivemind-backend==1.1.3 traceloop-sdk==0.9.4 +beautifulsoup4==4.12.3 From 0c309f65aaecc310cc678932ee2be5f99fe69cf8 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Fri, 8 Mar 2024 18:25:02 +0400 Subject: [PATCH 02/13] feat: save linked_issue of the PR in neo4j --- dags/github/neo4j_storage/neo4j_enums.py | 1 + dags/github/neo4j_storage/pull_requests.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/dags/github/neo4j_storage/neo4j_enums.py b/dags/github/neo4j_storage/neo4j_enums.py index 1091bc88..2a0108ec 100644 --- a/dags/github/neo4j_storage/neo4j_enums.py +++ b/dags/github/neo4j_storage/neo4j_enums.py @@ -27,3 +27,4 @@ class Relationship(Enum): COMMITTED = "COMMITTED" IS_ON = "IS_ON" CHANGED = "CHANGED" + LINKED = "LINKED" diff --git a/dags/github/neo4j_storage/pull_requests.py b/dags/github/neo4j_storage/pull_requests.py index 86492b4a..8cc0d05b 100644 --- a/dags/github/neo4j_storage/pull_requests.py +++ b/dags/github/neo4j_storage/pull_requests.py @@ -12,6 +12,7 @@ def save_pull_request_to_neo4j(pr: dict, repository_id: str): assignees = pr.pop("assignees", None) requested_reviewers = pr.pop("requested_reviewers", None) labels = pr.pop("labels", None) + linked_issues = pr.pop("linked_issues", None) cleaned_pr = remove_nested_collections(pr) if assignee: @@ -56,6 +57,16 @@ def save_pull_request_to_neo4j(pr: dict, repository_id: str): SET haslb.latestSavedAt = datetime() """ + linked_issues_query = f""" + WITH pr + UNWIND $linked_issues as linked_issue + MERGE (i:{Node.Issue.value} {{id: linked_issue.id}}) + SET i += linked_issue, i.latestSavedAt = datetime() + WITH pr, i + MERGE (pr)-[islinked:{Relationship.LINKED.value}]->(i) + SET islinked.latestSavedAt = datetime() + """ + with driver.session() as session: session.execute_write( lambda tx: tx.run( @@ -74,6 +85,7 @@ def save_pull_request_to_neo4j(pr: dict, repository_id: str): { assignees_query } { requested_reviewers_query } { labels_query } + { linked_issues_query } """, pr=cleaned_pr, @@ -83,6 +95,7 @@ def save_pull_request_to_neo4j(pr: dict, repository_id: str): assignees=assignees, labels=labels, requested_reviewers=requested_reviewers, + linked_issues=linked_issues ) ) driver.close() From 12df1cfc20becc8ea87f036c36ed69636949fd5e Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Fri, 8 Mar 2024 18:41:12 +0400 Subject: [PATCH 03/13] fix: as pr can connect to other issue --- dags/github/github_api_helpers/pull_requests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index 5d9c0488..3b988a13 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -81,7 +81,7 @@ def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): for html_linked_issue in html_linked_issues: issue_url = html_linked_issue.find('a').get('href') issue_data = extract_issue_info_from_url(issue_url) - issue_info = fetch_issue(owner, repo, issue_data['issue_number']) + issue_info = fetch_issue(issue_data['owner'], issue_data['repo'], issue_data['issue_number']) linked_issue.append(issue_info) From 0025cc0ff4418ff35a58204e74e06ec9c9c25bc4 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 10:41:24 +0400 Subject: [PATCH 04/13] fix: updated based on linter issue --- dags/github/github_api_helpers/__init__.py | 4 ++-- dags/github/github_api_helpers/pull_requests.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dags/github/github_api_helpers/__init__.py b/dags/github/github_api_helpers/__init__.py index a9105240..096d75b0 100644 --- a/dags/github/github_api_helpers/__init__.py +++ b/dags/github/github_api_helpers/__init__.py @@ -5,6 +5,7 @@ from .labels import get_all_repo_labels from .orgs import fetch_org_details, get_all_org_members from .pull_requests import ( + extract_linked_issues_from_pr, get_all_comments_of_pull_request, get_all_commits_of_pull_request, get_all_pull_request_files, @@ -12,7 +13,6 @@ get_all_reactions_of_comment, get_all_reactions_of_review_comment, get_all_review_comments_of_pull_request, - get_all_reviews_of_pull_request, - extract_linked_issues_from_pr + get_all_reviews_of_pull_request ) from .repos import get_all_org_repos, get_all_repo_contributors diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index 3b988a13..9bd56b1d 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -1,8 +1,10 @@ import logging +from bs4 import BeautifulSoup + from .issues import fetch_issue from .smart_proxy import get -from bs4 import BeautifulSoup + def fetch_pull_requests(owner: str, repo: str, page: int, per_page: int = 100): """ From 47f8b16aae65ca5968fd1c48dbcd34fab50e3b0f Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 13:15:59 +0400 Subject: [PATCH 05/13] fix --- dags/github/github_api_helpers/issues.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dags/github/github_api_helpers/issues.py b/dags/github/github_api_helpers/issues.py index 9325b84e..50ad7e94 100644 --- a/dags/github/github_api_helpers/issues.py +++ b/dags/github/github_api_helpers/issues.py @@ -20,6 +20,7 @@ def fetch_issue(owner: str, repo: str, issue_number: int): return response_data # Issues + def fetch_issues(owner: str, repo: str, page: int, per_page: int = 100): """ Fetches the issues for a specific repo page by page. From 93fa335937664fe77b34143976c10952ab139151 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 13:32:02 +0400 Subject: [PATCH 06/13] fix: fix lint issue --- dags/github/github_api_helpers/__init__.py | 2 +- dags/github/github_api_helpers/issues.py | 2 +- dags/github/github_api_helpers/pull_requests.py | 9 +++++++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/dags/github/github_api_helpers/__init__.py b/dags/github/github_api_helpers/__init__.py index 096d75b0..52f9326d 100644 --- a/dags/github/github_api_helpers/__init__.py +++ b/dags/github/github_api_helpers/__init__.py @@ -13,6 +13,6 @@ get_all_reactions_of_comment, get_all_reactions_of_review_comment, get_all_review_comments_of_pull_request, - get_all_reviews_of_pull_request + get_all_reviews_of_pull_request, ) from .repos import get_all_org_repos, get_all_repo_contributors diff --git a/dags/github/github_api_helpers/issues.py b/dags/github/github_api_helpers/issues.py index 50ad7e94..373f23bd 100644 --- a/dags/github/github_api_helpers/issues.py +++ b/dags/github/github_api_helpers/issues.py @@ -19,8 +19,8 @@ def fetch_issue(owner: str, repo: str, issue_number: int): logging.info(f"Fetched issue {issue_number} for {owner}/{repo}. Issue: {response_data}") return response_data -# Issues +# Issues def fetch_issues(owner: str, repo: str, page: int, per_page: int = 100): """ Fetches the issues for a specific repo page by page. diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index 9bd56b1d..eb8803b8 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -60,6 +60,7 @@ def get_all_pull_requests(owner: str, repo: str): ) return all_pull_requests + def extract_issue_info_from_url(url): splitted_url = url.split('/') owner = splitted_url[-4] @@ -72,14 +73,17 @@ def extract_issue_info_from_url(url): 'issue_number': issue_number } + def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): html_pr_url = f"https://github.com/{owner}/{repo}/pull/{pull_number}" response = get(html_pr_url) linked_issue = [] soup = BeautifulSoup(response.text, 'html.parser') - html_linked_issues = soup.find_all('span', class_='Truncate truncate-with-responsive-width my-1', attrs={"data-view-component": "true"}) - + html_linked_issues = soup.find_all('span', + class_='Truncate truncate-with-responsive-width my-1', + attrs={"data-view-component": "true"} + ) for html_linked_issue in html_linked_issues: issue_url = html_linked_issue.find('a').get('href') issue_data = extract_issue_info_from_url(issue_url) @@ -87,6 +91,7 @@ def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): linked_issue.append(issue_info) + return linked_issue def fetch_pull_requests_commits( From 70f866a68182997f6513aa728d73924887429a6c Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 14:15:21 +0400 Subject: [PATCH 07/13] fix: fix lint issue --- dags/github/github_api_helpers/issues.py | 4 +++- .../github_api_helpers/pull_requests.py | 24 +++++++++---------- dags/github/neo4j_storage/pull_requests.py | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/dags/github/github_api_helpers/issues.py b/dags/github/github_api_helpers/issues.py index 373f23bd..3ae41746 100644 --- a/dags/github/github_api_helpers/issues.py +++ b/dags/github/github_api_helpers/issues.py @@ -16,7 +16,9 @@ def fetch_issue(owner: str, repo: str, issue_number: int): response = get(endpoint) response_data = response.json() - logging.info(f"Fetched issue {issue_number} for {owner}/{repo}. Issue: {response_data}") + logging.info( + f"Fetched issue {issue_number} for {owner}/{repo}. Issue: {response_data}" + ) return response_data diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index eb8803b8..dd750c84 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -62,16 +62,12 @@ def get_all_pull_requests(owner: str, repo: str): def extract_issue_info_from_url(url): - splitted_url = url.split('/') + splitted_url = url.split("/") owner = splitted_url[-4] repo = splitted_url[-3] issue_number = splitted_url[-1] - return { - 'owner': owner, - 'repo': repo, - 'issue_number': issue_number - } + return {"owner": owner, "repo": repo, "issue_number": issue_number} def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): @@ -79,19 +75,21 @@ def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): response = get(html_pr_url) linked_issue = [] - soup = BeautifulSoup(response.text, 'html.parser') - html_linked_issues = soup.find_all('span', - class_='Truncate truncate-with-responsive-width my-1', - attrs={"data-view-component": "true"} - ) + soup = BeautifulSoup(response.text, "html.parser") + html_linked_issues = soup.find_all( + "span", + class_="Truncate truncate-with-responsive-width my-1", + attrs={"data-view-component": "true"}, + ) for html_linked_issue in html_linked_issues: issue_url = html_linked_issue.find('a').get('href') issue_data = extract_issue_info_from_url(issue_url) - issue_info = fetch_issue(issue_data['owner'], issue_data['repo'], issue_data['issue_number']) + issue_info = fetch_issue( + issue_data['owner'], issue_data['repo'], issue_data['issue_number'] + ) linked_issue.append(issue_info) - return linked_issue def fetch_pull_requests_commits( diff --git a/dags/github/neo4j_storage/pull_requests.py b/dags/github/neo4j_storage/pull_requests.py index 8cc0d05b..588ee259 100644 --- a/dags/github/neo4j_storage/pull_requests.py +++ b/dags/github/neo4j_storage/pull_requests.py @@ -95,7 +95,7 @@ def save_pull_request_to_neo4j(pr: dict, repository_id: str): assignees=assignees, labels=labels, requested_reviewers=requested_reviewers, - linked_issues=linked_issues + linked_issues=linked_issues, ) ) driver.close() From 15fb7295adae905a929fc0f24810e4fbd9335501 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 14:41:21 +0400 Subject: [PATCH 08/13] fix: fix lint issue --- dags/github/github_api_helpers/pull_requests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index dd750c84..aaf41654 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -92,6 +92,7 @@ def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): return linked_issue + def fetch_pull_requests_commits( owner: str, repo: str, pull_number: int, page: int, per_page: int = 100 ): From 9f56633ba09af6752de29651b8d8e42104b9542b Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 14:43:40 +0400 Subject: [PATCH 09/13] fix: lint issue fixed --- dags/github/github_api_helpers/pull_requests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dags/github/github_api_helpers/pull_requests.py b/dags/github/github_api_helpers/pull_requests.py index aaf41654..3463f81e 100644 --- a/dags/github/github_api_helpers/pull_requests.py +++ b/dags/github/github_api_helpers/pull_requests.py @@ -82,10 +82,10 @@ def extract_linked_issues_from_pr(owner: str, repo: str, pull_number: int): attrs={"data-view-component": "true"}, ) for html_linked_issue in html_linked_issues: - issue_url = html_linked_issue.find('a').get('href') + issue_url = html_linked_issue.find("a").get("href") issue_data = extract_issue_info_from_url(issue_url) issue_info = fetch_issue( - issue_data['owner'], issue_data['repo'], issue_data['issue_number'] + issue_data["owner"], issue_data["repo"], issue_data["issue_number"] ) linked_issue.append(issue_info) From bc122dadc9d79b0545b76dd791e7cd2d96c7491b Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 19:12:57 +0400 Subject: [PATCH 10/13] feat: extract pull request is part of the main flow --- dags/github.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/dags/github.py b/dags/github.py index 1e949097..72520c59 100644 --- a/dags/github.py +++ b/dags/github.py @@ -37,6 +37,7 @@ get_all_repo_labels, get_all_repo_review_comments, get_all_reviews_of_pull_request, + extract_linked_issues_from_pr ) from github.neo4j_storage import ( get_orgs_profile_from_neo4j, @@ -181,6 +182,23 @@ def extract_pull_requests(data): new_data = {"prs": prs, **data} return new_data + @task + def extract_pull_request_linked_issues(data): + logging.info(f"All data from last stage: {data}") + repo = data["repo"] + prs = data["prs"] + owner = repo["owner"]["login"] + repo_name = repo["name"] + + new_prs = [] + for pr in prs: + pr_number = pr["number"] + linked_issues = extract_linked_issues_from_pr(owner=owner, repo=repo_name, pull_number=pr_number) + new_prs.append({ **pr, "linked_issues": linked_issues }) + + new_data = { **data, "prs": new_prs } + return new_data + @task def transform_pull_requests(data): logging.info(f"All data from last stage: {data}") @@ -516,7 +534,8 @@ def load_commits_files_changes(data): load_label = load_labels.expand(data=transform_label) prs = extract_pull_requests.expand(data=repos) - transform_prs = transform_pull_requests.expand(data=prs) + prs_linked_issues = extract_pull_request_linked_issues.expand(data=prs) + transform_prs = transform_pull_requests.expand(data=prs_linked_issues) load_prs = load_pull_requests.expand(data=transform_prs) load_contributors >> load_prs load_label >> load_prs From 51d9fb8425e3251a04c3eccd417e9ae11c88993e Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Mon, 11 Mar 2024 19:22:41 +0400 Subject: [PATCH 11/13] feat: reorder the task --- dags/github.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/dags/github.py b/dags/github.py index 72520c59..126b39d3 100644 --- a/dags/github.py +++ b/dags/github.py @@ -533,12 +533,19 @@ def load_commits_files_changes(data): transform_label = transform_labels.expand(data=labels) load_label = load_labels.expand(data=transform_label) + issues = extract_issues.expand(data=repos) + transform_issue = transform_issues.expand(data=issues) + load_issue = load_issues.expand(data=transform_issue) + load_contributors >> load_issue + load_label >> load_issue + prs = extract_pull_requests.expand(data=repos) prs_linked_issues = extract_pull_request_linked_issues.expand(data=prs) transform_prs = transform_pull_requests.expand(data=prs_linked_issues) load_prs = load_pull_requests.expand(data=transform_prs) load_contributors >> load_prs load_label >> load_prs + load_issue >> load_prs pr_files_changes = extract_pull_request_files_changes.expand(data=prs) transform_pr_files_changes = transform_pull_request_files_changes.expand( @@ -548,12 +555,6 @@ def load_commits_files_changes(data): data=transform_pr_files_changes ) - issues = extract_issues.expand(data=repos) - transform_issue = transform_issues.expand(data=issues) - load_issue = load_issues.expand(data=transform_issue) - load_contributors >> load_issue - load_label >> load_issue - pr_reviews = extract_pr_review.expand(data=prs) transform_pr_review = transform_pr_review.expand(data=pr_reviews) load_pr_review = load_pr_review.expand(data=transform_pr_review) From c599bf9937fdc00962f11aad72adef1cc00a2235 Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Wed, 13 Mar 2024 16:57:22 +0400 Subject: [PATCH 12/13] fix: lint issue fixed --- dags/github.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dags/github.py b/dags/github.py index 126b39d3..31cceb8f 100644 --- a/dags/github.py +++ b/dags/github.py @@ -24,6 +24,7 @@ from airflow import DAG from airflow.decorators import task from github.github_api_helpers import ( + extract_linked_issues_from_pr, fetch_commit_files, fetch_org_details, get_all_commits, @@ -37,7 +38,6 @@ get_all_repo_labels, get_all_repo_review_comments, get_all_reviews_of_pull_request, - extract_linked_issues_from_pr ) from github.neo4j_storage import ( get_orgs_profile_from_neo4j, @@ -194,9 +194,9 @@ def extract_pull_request_linked_issues(data): for pr in prs: pr_number = pr["number"] linked_issues = extract_linked_issues_from_pr(owner=owner, repo=repo_name, pull_number=pr_number) - new_prs.append({ **pr, "linked_issues": linked_issues }) + new_prs.append({**pr, "linked_issues": linked_issues}) - new_data = { **data, "prs": new_prs } + new_data = {**data, "prs": new_prs} return new_data @task From 4b8080e8bef3bc09b5ed1d7a90c45cc47eb035db Mon Sep 17 00:00:00 2001 From: Mohammad Twin Date: Wed, 13 Mar 2024 17:23:50 +0400 Subject: [PATCH 13/13] fix: lint issue fixed --- dags/github.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dags/github.py b/dags/github.py index 31cceb8f..2d1bca37 100644 --- a/dags/github.py +++ b/dags/github.py @@ -193,7 +193,9 @@ def extract_pull_request_linked_issues(data): new_prs = [] for pr in prs: pr_number = pr["number"] - linked_issues = extract_linked_issues_from_pr(owner=owner, repo=repo_name, pull_number=pr_number) + linked_issues = extract_linked_issues_from_pr( + owner=owner, repo=repo_name, pull_number=pr_number + ) new_prs.append({**pr, "linked_issues": linked_issues}) new_data = {**data, "prs": new_prs}