Skip to content
This repository has been archived by the owner on Feb 15, 2024. It is now read-only.

Commit

Permalink
Merge pull request #292 from RedHatProductSecurity/refactor-some-options
Browse files Browse the repository at this point in the history
Ping osidb-bindings==3.6.0 and change default search to fallback to --search-latest
  • Loading branch information
JimFuller-RedHat authored Feb 2, 2024
2 parents ab8bfb2 + e94ae03 commit 809ff83
Show file tree
Hide file tree
Showing 8 changed files with 85 additions and 119 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

### Changed
* if default --search_provides finds no children then revert to --search-latest
* pin osidb-bindings==3.6.0

## [0.5.4] - 2024-01-23
### Fixed
* fixed searches with middleware CLI enabled (GRIF-221)


## [0.5.3] - 2024-01-22
### Fixed
* fixed regex escaping logic for -r and -s
Expand Down
145 changes: 58 additions & 87 deletions griffon/services/core_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,115 +338,113 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
"limit": 50,
"include_fields": "purl,type,name,related_url,namespace,software_build,nvr,release,version,arch,product_streams.product_versions,product_streams.name,product_streams.ofuri,product_streams.active,product_streams.exclude_components,product_streams.relations", # noqa
}
if not (self.include_inactive_product_streams):
params["active_streams"] = "True"
if self.exclude_unreleased:
params["released_components"] = "True"
if not (self.include_container_roots):
params["type"] = "RPM"
params["arch"] = "src"

component_name = self.component_name
if not self.strict_name_search and not self.regex_name_search:
component_name = re.escape(component_name)

if self.search_latest:
search_latest_params = copy.deepcopy(params)
if self.search_provides:
search_provides_params = copy.deepcopy(params)
if not (self.strict_name_search):
search_latest_params["re_name"] = component_name
search_provides_params["re_provides_name"] = component_name
else:
search_latest_params["name"] = component_name
search_provides_params["provides_name"] = component_name
if self.ns:
search_latest_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_latest_params["active_streams"] = "True"
if self.exclude_unreleased:
search_latest_params["released_components"] = "True"
if not self.include_container_roots:
search_latest_params["type"] = "RPM"
search_latest_params["root_components"] = "True"
search_latest_params["latest_components_by_streams"] = "True"
status.update("searching latest root component(s).")
latest_components_cnt = self.corgi_session.components.count(**search_latest_params)
status.update(f"found {latest_components_cnt} latest component(s).")
search_provides_params["namespace"] = self.ns
search_provides_params["latest_components_by_streams"] = "True"
status.update("searching latest provided child component(s).")
latest_components_cnt = self.corgi_session.components.count(**search_provides_params)
status.update(f"found {latest_components_cnt} latest provides component(s).")
latest_components = self.corgi_session.components.retrieve_list_iterator_async(
**search_latest_params, max_results=10000
**search_provides_params, max_results=10000
)

status.update(
f"found {latest_components_cnt} latest provides child component(s)- retrieving children, sources & upstreams." # noqa
)
status.update(f"found {latest_components_cnt} latest root component(s).") # noqa
with multiprocessing.Pool() as pool:
for processed_component in pool.map(
partial(process_component, self.corgi_session, search_latest_params),
partial(process_component, self.corgi_session, search_provides_params),
latest_components,
):
results.append(processed_component)
# if we have found no children then search_latest for roots
if not (results):
self.search_latest = True

if not self.no_community:
status.update("searching latest community root component(s).")
status.update("searching latest community provided child component(s).")
community_component_cnt = self.community_session.components.count(
**search_latest_params
**search_provides_params
)
status.update(
f"found {community_component_cnt} latest community root component(s)." # noqa
f"found {community_component_cnt} latest community provided child component(s)." # noqa
)
latest_community_components = (
self.community_session.components.retrieve_list_iterator_async(
**search_latest_params, max_results=10000
**search_provides_params, max_results=10000
)
)
status.update(
f"found {community_component_cnt} latest community root component(s)- retrieving children, sources & upstreams." # noqa
f"found {community_component_cnt} latest community provided child component(s)- retrieving children, sources & upstreams." # noqa
)
with multiprocessing.Pool() as pool:
for processed_component in pool.map(
partial(process_component, self.community_session, search_latest_params),
partial(process_component, self.community_session, search_provides_params),
latest_community_components,
):
results.append(processed_component)

if self.search_provides:
search_provides_params = copy.deepcopy(params)
if self.search_latest:
search_latest_params = copy.deepcopy(params)
if not (self.strict_name_search):
search_provides_params["re_provides_name"] = component_name
search_latest_params["re_name"] = component_name
else:
search_provides_params["provides_name"] = component_name
search_latest_params["name"] = component_name
if self.ns:
search_provides_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_provides_params["active_streams"] = "True"
if self.exclude_unreleased:
search_provides_params["released_components"] = "True"
if not self.include_container_roots:
search_provides_params["type"] = "RPM"
search_provides_params["latest_components_by_streams"] = "True"
status.update("searching latest provided child component(s).")
latest_components_cnt = self.corgi_session.components.count(**search_provides_params)
status.update(f"found {latest_components_cnt} latest provides component(s).")
search_latest_params["namespace"] = self.ns
search_latest_params["root_components"] = "True"
search_latest_params["latest_components_by_streams"] = "True"
status.update("searching latest root component(s).")
latest_components_cnt = self.corgi_session.components.count(**search_latest_params)
status.update(f"found {latest_components_cnt} latest component(s).")
latest_components = self.corgi_session.components.retrieve_list_iterator_async(
**search_provides_params, max_results=10000
)

status.update(
f"found {latest_components_cnt} latest provides child component(s)- retrieving children, sources & upstreams." # noqa
**search_latest_params, max_results=10000
)
status.update(f"found {latest_components_cnt} latest root component(s).") # noqa
with multiprocessing.Pool() as pool:
for processed_component in pool.map(
partial(process_component, self.corgi_session, search_provides_params),
partial(process_component, self.corgi_session, search_latest_params),
latest_components,
):
results.append(processed_component)

if not self.no_community:
status.update("searching latest community provided child component(s).")
status.update("searching latest community root component(s).")
community_component_cnt = self.community_session.components.count(
**search_provides_params
**search_latest_params
)
status.update(
f"found {community_component_cnt} latest community provided child component(s)." # noqa
f"found {community_component_cnt} latest community root component(s)." # noqa
)
latest_community_components = (
self.community_session.components.retrieve_list_iterator_async(
**search_provides_params, max_results=10000
**search_latest_params, max_results=10000
)
)
status.update(
f"found {community_component_cnt} latest community provided child component(s)- retrieving children, sources & upstreams." # noqa
f"found {community_component_cnt} latest community root component(s)- retrieving children, sources & upstreams." # noqa
)
with multiprocessing.Pool() as pool:
for processed_component in pool.map(
partial(process_component, self.community_session, search_provides_params),
partial(process_component, self.community_session, search_latest_params),
latest_community_components,
):
results.append(processed_component)
Expand All @@ -460,12 +458,6 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_upstreams_params["upstreams_name"] = component_name
if self.ns:
search_upstreams_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_upstreams_params["active_streams"] = "True"
if self.exclude_unreleased:
search_upstreams_params["released_components"] = "True"
if not self.include_container_roots:
search_upstreams_params["type"] = "RPM"
search_upstreams_params["latest_components_by_streams"] = "True"
status.update("searching latest upstreams child component(s).")
latest_components_cnt = self.corgi_session.components.count(**search_upstreams_params)
Expand Down Expand Up @@ -513,13 +505,6 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_related_url_params["namespace"] = self.ns
if self.component_type:
search_related_url_params["type"] = self.component_type
if not (self.include_inactive_product_streams):
search_related_url_params["active_streams"] = "True"
if self.exclude_unreleased:
search_related_url_params["released_components"] = "True"
if not self.include_container_roots:
search_related_url_params["type"] = "RPM"
search_related_url_params["released_components"] = "True"
related_url_components_cnt = self.corgi_session.components.count(
**search_related_url_params,
)
Expand Down Expand Up @@ -555,12 +540,6 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_all_params["type"] = self.component_type
if self.ns:
search_all_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_all_params["active_streams"] = "True"
if self.exclude_unreleased:
search_all_params["released_components"] = "True"
if not self.include_container_roots:
search_all_params["type"] = "RPM"
all_components_cnt = self.corgi_session.components.count(**search_all_params)
status.update(f"found {all_components_cnt} all component(s).")
# TODO: remove max_results
Expand All @@ -569,6 +548,8 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
)
status.update(f"found {all_components_cnt} all component(s).")
for c in all_components:
c.upstreams = []
c.sources = []
results.append(c)

if not self.no_community:
Expand All @@ -585,6 +566,8 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
)
)
for c in all_community_components:
c.upstreams = []
c.sources = []
results.append(c)

if self.search_all_roots:
Expand All @@ -596,18 +579,14 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_all_roots_params["name"] = component_name
if self.ns:
search_all_roots_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_all_roots_params["active_streams"] = "True"
if self.exclude_unreleased:
search_all_roots_params["released_components"] = "True"
if not self.include_container_roots:
search_all_roots_params["type"] = "RPM"
all_src_components_cnt = self.corgi_session.components.count(**search_all_roots_params)
status.update(f"found {all_src_components_cnt} all root component(s).")
all_src_components = self.corgi_session.components.retrieve_list_iterator_async(
**search_all_roots_params, max_results=10000
)
for c in all_src_components:
c.upstreams = []
c.sources = []
results.append(c)
if not self.no_community:
all_src_community_components_cnt = self.community_session.components.count(
Expand All @@ -622,6 +601,8 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
f"found {all_src_community_components_cnt} community all root component(s)." # noqa
)
for c in all_src_community_components:
c.upstreams = []
c.sources = []
results.append(c)

if self.search_all_upstreams:
Expand All @@ -633,12 +614,6 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_all_upstreams_params["name"] = component_name
if self.component_type:
search_all_upstreams_params["type"] = self.component_type
if not (self.include_inactive_product_streams):
search_all_upstreams_params["active_streams"] = "True"
if self.exclude_unreleased:
search_all_upstreams_params["released_components"] = "True"
if not self.include_container_roots:
search_all_upstreams_params["type"] = "RPM"
upstream_components_cnt = self.corgi_session.components.count(
**search_all_upstreams_params
)
Expand Down Expand Up @@ -726,10 +701,6 @@ def execute(self, status=None) -> List[Dict[str, Any]]:
search_community_params["name"] = component_name
if self.ns:
search_community_params["namespace"] = self.ns
if not (self.include_inactive_product_streams):
search_community_params["active_streams"] = "True"
if self.exclude_unreleased:
search_community_params["released_components"] = "True"
all_community_components_cnt = self.community_session.components.count(
**search_community_params
)
Expand Down
2 changes: 1 addition & 1 deletion requirements/base.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
osidb-bindings==3.5.1
osidb-bindings==3.6.0
component-registry-bindings==1.4.0
click
click-completion
Expand Down
8 changes: 4 additions & 4 deletions requirements/base.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements/base.in
Expand Down Expand Up @@ -458,9 +458,9 @@ multidict==6.0.4 \
# via
# aiohttp
# yarl
osidb-bindings==3.5.1 \
--hash=sha256:70ac8e7e714611a126f0b73e48db93ff7769725d653fdcca47badcd9c1a4964c \
--hash=sha256:a27b9211ad2bb9102a41fec6db375f51e480dbb42c8516f00e1be04ec135cb61
osidb-bindings==3.6.0 \
--hash=sha256:0be7fc0dc0a49a4fe77a997e85cfe1133d38e7cc6d51cf4eb076a418d818391e \
--hash=sha256:448ca7f065289b5b4ecab54884f9c0fe8bc9d50840d84cf12ffe0c71dc3f411b
# via -r requirements/base.in
packageurl-python==0.10.4 \
--hash=sha256:5c91334f942cd55d45eb0c67dd339a535ef90e25f05b9ec016ad188ed0ef9048 \
Expand Down
15 changes: 4 additions & 11 deletions requirements/dev.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements/dev.in
Expand Down Expand Up @@ -432,7 +432,6 @@ exceptiongroup==1.1.0 \
# via
# -r requirements/lint.txt
# -r requirements/test.txt
# pytest
executing==1.2.0 \
--hash=sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc \
--hash=sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107
Expand Down Expand Up @@ -869,9 +868,9 @@ mypy-extensions==1.0.0 \
# via
# -r requirements/test.txt
# mypy
osidb-bindings==3.5.1 \
--hash=sha256:70ac8e7e714611a126f0b73e48db93ff7769725d653fdcca47badcd9c1a4964c \
--hash=sha256:a27b9211ad2bb9102a41fec6db375f51e480dbb42c8516f00e1be04ec135cb61
osidb-bindings==3.6.0 \
--hash=sha256:0be7fc0dc0a49a4fe77a997e85cfe1133d38e7cc6d51cf4eb076a418d818391e \
--hash=sha256:448ca7f065289b5b4ecab54884f9c0fe8bc9d50840d84cf12ffe0c71dc3f411b
# via
# -r requirements/base.txt
# -r requirements/test.txt
Expand Down Expand Up @@ -1104,12 +1103,6 @@ tomli==2.0.1 \
# via
# -r requirements/lint.txt
# -r requirements/test.txt
# build
# coverage
# ipdb
# mypy
# pyproject-hooks
# pytest
traitlets==5.9.0 \
--hash=sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8 \
--hash=sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9
Expand Down
2 changes: 1 addition & 1 deletion requirements/lint.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements/lint.in
Expand Down
Loading

0 comments on commit 809ff83

Please sign in to comment.