Skip to content

Commit

Permalink
Merge branch 'contrib/SEKOIA-IO_Add/SekoiaXDR' into Add/SekoiaXDR
Browse files Browse the repository at this point in the history
  • Loading branch information
TOUFIKIzakarya authored Jul 10, 2024
2 parents 2333e91 + e795a04 commit d500b29
Show file tree
Hide file tree
Showing 947 changed files with 17,648 additions and 6,679 deletions.
2 changes: 1 addition & 1 deletion .github/content_roles.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"CONTRIBUTION_REVIEWERS": [
"MosheEichler",
"barryyosi-panw",
"samuelFain",
"israelpoli"
],
Expand Down
119 changes: 92 additions & 27 deletions .github/github_workflow_scripts/handle_external_pr.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
import json
import os
from pathlib import Path
import sys
import urllib3
Expand All @@ -9,7 +8,7 @@
from git import Repo
from github.PullRequest import PullRequest
from github.Repository import Repository
from demisto_sdk.commands.common.tools import get_pack_metadata
from demisto_sdk.commands.common.tools import get_pack_metadata, get_yaml
from demisto_sdk.commands.content_graph.objects.base_content import BaseContent
from demisto_sdk.commands.content_graph.objects.integration import Integration
from demisto_sdk.commands.common.content_constant_paths import CONTENT_PATH
Expand All @@ -21,8 +20,9 @@
timestamped_print,
Checkout,
get_content_reviewers,
get_support_level,
get_content_roles,
get_support_level
get_metadata
)
from demisto_sdk.commands.common.tools import get_pack_name
from urllib3.exceptions import InsecureRequestWarning
Expand Down Expand Up @@ -167,7 +167,8 @@ def packs_to_check_in_pr(file_paths: list[str]) -> set:
return pack_dirs_to_check


def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str, repo_name: str = 'content') -> str:
def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str, remote_fork_owner: str,
repo_name: str = 'content') -> str:
"""
Get The contributions' support level label.
Expand All @@ -183,6 +184,7 @@ def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str
Args:
file_paths(str): file paths
external_pr_branch (str): the branch of the external PR.
remote_fork_owner: the remote fork owner
repo_name(str): the name of the forked repo (without the owner)
Returns:
Expand All @@ -198,12 +200,11 @@ def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str
f'to retrieve support level of {pack_dirs_to_check_support_levels_labels}'
)
try:
fork_owner = os.getenv('GITHUB_ACTOR')
with Checkout(
repo=Repo(Path().cwd(), search_parent_directories=True),
branch_to_checkout=external_pr_branch,
# in marketplace contributions the name of the owner should be xsoar-contrib
fork_owner=fork_owner if fork_owner != 'xsoar-bot' else 'xsoar-contrib',
fork_owner=remote_fork_owner if remote_fork_owner != 'xsoar-bot' else 'xsoar-contrib',
repo_name=repo_name
):
packs_support_levels = get_support_level(pack_dirs_to_check_support_levels_labels)
Expand Down Expand Up @@ -253,35 +254,96 @@ def is_requires_security_reviewer(pr_files: list[str]) -> bool:
return False


def is_tim_content(pr_files: list[str]) -> bool:
def check_if_item_is_tim(content_object: BaseContent | None) -> bool:
"""
This is where the actual search for feed:True or relevant tags or categories are being searched
according to the login in is_tim_reviewer_needed
Checks whether a given object (graph object) is a feed or related to TIM
Arguments:
- pr_files: List[str] The list of files changed in the Pull Request.
- `content_object`: ``BaseContent``: Content object taken from the graph
Returns: returns True or False if tim reviewer needed
Returns: `bool` whether the content object is a feed or has the relevant tags/categories
"""
integrations_checked = []
for file in pr_files:
if 'CONTRIBUTORS.json' in file:
continue
integration = BaseContent.from_path(CONTENT_PATH / file)
if not isinstance(integration, Integration) or integration.path in integrations_checked:
continue
integrations_checked.append(integration.path)
if integration.is_feed:
return True
pack = integration.in_pack
if isinstance(content_object, Integration) and content_object.is_feed:
return True
try:
pack = content_object.in_pack # type: ignore
tags = pack.tags
categories = pack.categories
if TIM_TAGS in tags or TIM_CATEGORIES in categories:
return True
except Exception as er:
print(f"The pack is not TIM: {er}")
finally:
return False


def check_files_of_pr_manually(pr_files: list[str]) -> bool:
"""
If the checkout of the branch has failed, this function will go over the files and check whether the contribution
need to be reviewed by TIM owner
Arguments:
- `pr_files`: ``List[str]``: The list of files changed in the Pull Request. Will be used to determine
whether a security engineer is required for the review.
Returns: `bool` whether a security engineer should be assigned
"""
pack_dirs_to_check = packs_to_check_in_pr(pr_files)
pack_metadata_list = get_metadata(pack_dirs_to_check)
for file in pr_files:
if "yml" in file and "Integrations" in file:
content_yml = get_yaml(file_path=file)
is_feed = content_yml.get("script").get("feed", "False")
print(f'Is it a feed: {is_feed}')
if is_feed:
return True
for pack_metadata in pack_metadata_list:
print(f'the metadata is: {pack_metadata}')
tags = pack_metadata.get("tags")
categories = pack_metadata.get("categories")
if TIM_TAGS in tags or TIM_CATEGORIES in categories: # type: ignore
return True
return False


def is_tim_reviewer_needed(pr_files: list[str], support_label: str) -> bool:
def is_tim_content(pr_files: list[str], external_pr_branch: str, remote_fork_owner: str, repo_name: str) -> bool:
"""
Checks if tim reviewer needed, if the pack is new and not part of Master.
First the remote branch is going to be checked out and then verified for the data
Arguments:
- `pr_files`: ``List[str]``: The list of files changed in the Pull Request. Will be used to determine
whether a security engineer is required for the review.
- 'external_pr_branch': str : name of branch to checkout
- 'remote_fork_owner' (str) : name of the remote owner for checkout
- 'repo_name': str : name of repository
Returns: `bool` whether a security engineer should be assigned
"""
try:
with Checkout(
repo=Repo(Path().cwd(), search_parent_directories=True),
branch_to_checkout=external_pr_branch,
# in marketplace contributions the name of the owner should be xsoar-contrib
fork_owner=remote_fork_owner if remote_fork_owner != 'xsoar-bot' else 'xsoar-contrib',
repo_name=repo_name
):
for file in pr_files:
if 'CONTRIBUTORS.json' in file or 'Author_image' in file or 'README.md' in file or ".pack-ignore" in file:
continue
content_object = BaseContent.from_path(CONTENT_PATH / file)
is_tim_needed = check_if_item_is_tim(content_object)
if is_tim_needed:
return True
except Exception as er:
print(f"couldn't checkout branch to get metadata, error is {er}")
# if the checkout didn't work for any reason, will try to go over files manually
return check_files_of_pr_manually(pr_files)
return False


def is_tim_reviewer_needed(pr_files: list[str], support_label: str, external_pr_branch: str,
remote_fork_owner: str, repo_name: str) -> bool:
"""
Checks whether the PR need to be reviewed by a TIM reviewer.
It check the yml file of the integration - if it has the feed: True
Expand All @@ -291,11 +353,14 @@ def is_tim_reviewer_needed(pr_files: list[str], support_label: str) -> bool:
Arguments:
- pr_files: tThe list of files changed in the Pull Request
- support_label: the support label of the PR - the highest one.
- 'external_pr_branch' (str) : name of the external branch to checkout
- 'remote_fork_owner' (str) : name of the remote owner for checkout
- 'repo_name' (str) : name of the external repository
Returns: True or false if tim reviewer needed
"""
if support_label in (XSOAR_SUPPORT_LEVEL_LABEL, PARTNER_SUPPORT_LEVEL_LABEL):
return is_tim_content(pr_files)
return is_tim_content(pr_files, external_pr_branch, remote_fork_owner, repo_name)
return False


Expand Down Expand Up @@ -427,9 +492,9 @@ def main():

pr_files = [file.filename for file in pr.get_files()]
print(f'{pr_files=} for {pr_number=}')

remote_fork_owner = pr.head.repo.full_name.split('/')[0]
labels_to_add = [CONTRIBUTION_LABEL, EXTERNAL_LABEL]
if support_label := get_packs_support_level_label(pr_files, pr.head.ref, repo_name):
if support_label := get_packs_support_level_label(pr_files, pr.head.ref, remote_fork_owner, repo_name):
labels_to_add.append(support_label)

# Add the initial labels to PR:
Expand Down Expand Up @@ -489,7 +554,7 @@ def main():
pr.add_to_labels(SECURITY_LABEL)

# adding TIM reviewer
if is_tim_reviewer_needed(pr_files, support_label):
if is_tim_reviewer_needed(pr_files, support_label, pr.head.ref, remote_fork_owner, repo_name):
reviewers.append(tim_reviewer)
pr.add_to_labels(TIM_LABEL)

Expand Down
50 changes: 37 additions & 13 deletions .github/github_workflow_scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,19 +124,21 @@ def __init__(self, repo: git.Repo, branch_to_checkout: str, fork_owner: str | No
self.repo.create_remote(name=forked_remote_name, url=url)
print(f'Successfully created remote {forked_remote_name} for repo {url}') # noqa: T201
except Exception as error:
print(f'could not create remote from {url}, {error=}') # noqa: T201
# handle the case where the name of the forked repo is not content
if github_event_path := os.getenv("GITHUB_EVENT_PATH"):
try:
payload = json.loads(github_event_path)
except ValueError:
print('failed to load GITHUB_EVENT_PATH') # noqa: T201
raise ValueError(f'cannot checkout to the forked branch {branch_to_checkout} of the owner {fork_owner}')
# forked repo name includes fork_owner + repo name, for example foo/content.
forked_repo_name = payload.get("pull_request", {}).get("head", {}).get("repo", {}).get("full_name")
self.repo.create_remote(name=forked_remote_name, url=f"https://github.com/{forked_repo_name}")
else:
raise
if f'{forked_remote_name} already exists' not in str(error):
print(f'could not create remote from {url}, {error=}') # noqa: T201
# handle the case where the name of the forked repo is not content
if github_event_path := os.getenv("GITHUB_EVENT_PATH"):
try:
payload = json.loads(github_event_path)
except ValueError:
print('failed to load GITHUB_EVENT_PATH') # noqa: T201
raise ValueError(f'cannot checkout to the forked branch {branch_to_checkout} of the '
f'owner {fork_owner}')
# forked repo name includes fork_owner + repo name, for example foo/content.
forked_repo_name = payload.get("pull_request", {}).get("head", {}).get("repo", {}).get("full_name")
self.repo.create_remote(name=forked_remote_name, url=f"https://github.com/{forked_repo_name}")
else:
raise

forked_remote = self.repo.remote(forked_remote_name)
forked_remote.fetch(branch_to_checkout)
Expand Down Expand Up @@ -327,3 +329,25 @@ def get_repo_path(path: str = ".") -> Path:
except (git.exc.InvalidGitRepositoryError, ValueError):
print("Unable to get repo root path. Terminating...")
sys.exit(1)


def get_metadata(pack_dirs: set[str]) -> list[dict]:
"""
Get the pack metadata.
Args:
pack_dirs (set): paths to the packs that were changed
Return:
- pack metadata dictionary
"""
pack_metadata_list = []

for pack_dir in pack_dirs:
if pack_metadata := get_pack_metadata(pack_dir):
print(f"pack metadata was retrieved for pack {pack_dir}") # noqa: T201
pack_metadata_list.append(pack_metadata)
else:
print(f'Could not find pack support level for pack {pack_dir}') # noqa: T201

return pack_metadata_list
38 changes: 38 additions & 0 deletions .github/workflows/protect-files.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: Protect Infrastructure Files

on:
pull_request:
types:
- opened
- synchronize

jobs:
protect-infra-files:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 100 # to speed up. changed-files will fetch more if necessary

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"

- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v44 # disable-secrets-detection
with:
files: |
.gitlab/ci/.gitlab-ci.yml
- name: Prevent changing protected files
if: steps.changed-files.outputs.any_changed == 'true'
env:
FILES: ${{ steps.changed-files.outputs.all_changed_files }}
run: |
for file in ${FILES}; do
echo "::error file=$file,line=1,endLine=1,title=Protected file modified::This file should not be changed in master.%0AIf you are sure it's necessary, ask for a force merge and explain the rationale."
done
exit 1
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Protect Infrastructure Directories
name: Protect Infrastructure Directories (Contributions)

on:
pull_request:
Expand All @@ -7,7 +7,7 @@ on:
- synchronize

jobs:
check_changes:
protect-non-packs:
runs-on: ubuntu-latest
if: 'startsWith(github.head_ref, ''contrib'') || (github.event.pull_request.head.repo.fork == true && contains(github.event.pull_request.base.ref, ''contrib''))'
steps:
Expand Down
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ CommonServerUserPython.py
CommonServerPowerShell.ps1
demistomock.py
demistomock.ps1
!Tests/demistomock/demistomock.py
!Tests/demistomock/demistomock.ps1
Tests/demistomock/demistomock.py
Tests/demistomock/demistomock.ps1
Tests/filter_file.txt
Tests/filter_file_old.txt
Tests/id_set.json
Expand Down
2 changes: 1 addition & 1 deletion Packs/AWS-ACM/.pack-ignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[file:AWS-ACM.yml]
ignore=BA108,BA109,IN124,BA124
ignore=BA108,BA109,BA124

2 changes: 1 addition & 1 deletion Packs/AWS-ACM/Integrations/AWS-ACM/AWS-ACM.yml
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ script:
description: The certificate chain that contains the root certificate issued by the certificate authority (CA).
type: string
description: Retrieves a certificate specified by an ARN and its certificate chain . The chain is an ordered list of certificates that contains the end entity certificate, intermediate certificates of subordinate CAs, and the root certificate in that order. The certificate and certificate chain are base64 encoded. If you want to decode the certificate to see the individual fields, you can use OpenSSL.
dockerimage: demisto/boto3py3:1.0.0.87582
dockerimage: demisto/boto3py3:1.0.0.100496
subtype: python3
tests:
- ACM-Test
Expand Down
6 changes: 6 additions & 0 deletions Packs/AWS-ACM/ReleaseNotes/1_1_36.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### AWS - ACM

- Updated the Docker image to: *demisto/boto3py3:1.0.0.100496*.
2 changes: 1 addition & 1 deletion Packs/AWS-ACM/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "AWS - ACM",
"description": "Amazon Web Services Certificate Manager Service (acm)",
"support": "xsoar",
"currentVersion": "1.1.35",
"currentVersion": "1.1.36",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
Loading

0 comments on commit d500b29

Please sign in to comment.