Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): update pre-commit hook jsh9/pydoclint to v0.5.10 #2061

Merged
merged 4 commits into from
Dec 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.4
rev: v0.8.2
hooks:
- id: ruff
args:
Expand Down Expand Up @@ -48,7 +48,7 @@ repos:
require_serial: true # avoid possible race conditions

- repo: https://github.com/jsh9/pydoclint # Run after 'custom-checks' as these may auto-fix
rev: 0.5.9
rev: 0.5.10
hooks:
- id: pydoclint
require_serial: true # Spammy in run-all scenarios (more than fast enough already)
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from cognite.client.config import ClientConfig, global_config
from cognite.client.data_classes import data_modeling

__all__ = ["ClientConfig", "CogniteClient", "__version__", "global_config", "data_modeling"]
__all__ = ["ClientConfig", "CogniteClient", "__version__", "data_modeling", "global_config"]

if _RUNNING_IN_BROWSER:
from cognite.client.utils._pyodide_helpers import patch_sdk_for_pyodide
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api/datapoint_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ def _is_task_done(self, n: int) -> bool:
return (
not self.next_cursor
or n < self.max_query_limit
or not self.uses_cursor and self.next_start == self.end
or (not self.uses_cursor and self.next_start == self.end)
) # fmt: skip


Expand Down
29 changes: 11 additions & 18 deletions cognite/client/_api/datapoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,10 +623,8 @@ def __call__(
# get 10k/100k datapoints per request. Thus, we round up the given chunk size to the nearest integer multiple of 100k,
# then subdivide and yield client-side (we use the raw limit also when dealing with aggregates):
request_limit = self._DPS_LIMIT_RAW * math.ceil(chunk_size_datapoints / self._DPS_LIMIT_RAW)
if (
not is_finite(chunk_size_datapoints)
or chunk_size_datapoints != request_limit
and request_limit % chunk_size_datapoints
if not is_finite(chunk_size_datapoints) or (
chunk_size_datapoints != request_limit and request_limit % chunk_size_datapoints
):
raise ValueError(
"The 'chunk_size_datapoints' must be a positive integer that evenly divides 100k OR an integer multiple of 100k "
Expand Down Expand Up @@ -1938,11 +1936,11 @@ def _extract_raw_data_from_datapoints_array(self, dps: DatapointsArray) -> list[

if dps.null_timestamps:
# 'Missing' and NaN can not be differentiated when we read from numpy arrays:
values = [None if ts in dps.null_timestamps else dp for ts, dp in zip(timestamps, values)]
values = [None if ts in dps.null_timestamps else dp for ts, dp in zip(timestamps, values)] # type: ignore [arg-type]

if dps.status_code is None:
return list(map(_InsertDatapoint, timestamps, values))
return list(map(_InsertDatapoint, timestamps, values, dps.status_code.tolist()))
return list(map(_InsertDatapoint, timestamps, values)) # type: ignore [arg-type]
return list(map(_InsertDatapoint, timestamps, values, dps.status_code.tolist())) # type: ignore [arg-type]


class RetrieveLatestDpsFetcher:
Expand Down Expand Up @@ -2056,23 +2054,18 @@ def _prepare_requests(
dct["targetUnitSystem"] = i_target_unit_system

# Careful logic: "Not given" vs "given" vs "default" with "truthy/falsy":
if (
self.settings_include_status.get(idx) is True
or self.settings_include_status.get(idx) is None
and self.default_include_status is True
if self.settings_include_status.get(idx) is True or (
self.settings_include_status.get(idx) is None and self.default_include_status is True
):
dct["includeStatus"] = True

if (
self.settings_ignore_bad_datapoints.get(idx) is False
or self.settings_ignore_bad_datapoints.get(idx) is None
and self.default_ignore_bad_datapoints is False
if self.settings_ignore_bad_datapoints.get(idx) is False or (
self.settings_ignore_bad_datapoints.get(idx) is None and self.default_ignore_bad_datapoints is False
):
dct["ignoreBadDataPoints"] = False

if (
self.settings_treat_uncertain_as_bad.get(idx) is False
or self.settings_treat_uncertain_as_bad.get(idx) is None
if self.settings_treat_uncertain_as_bad.get(idx) is False or (
self.settings_treat_uncertain_as_bad.get(idx) is None
and self.default_treat_uncertain_as_bad is False
):
dct["treatUncertainAsBad"] = False
Expand Down
6 changes: 3 additions & 3 deletions cognite/client/_api/transformations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@
from cognite.client.config import ClientConfig

__all__ = [
"TransformationJobsAPI",
"TransformationNotificationsAPI",
"TransformationSchedulesAPI",
"TransformationSchemaAPI",
"TransformationsAPI",
"TransformationSchedulesAPI",
"TransformationNotificationsAPI",
"TransformationJobsAPI",
]


Expand Down
6 changes: 2 additions & 4 deletions cognite/client/_api/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,10 +775,8 @@ def get_single(wf_xid: WorkflowVersionId, ignore_missing: bool = ignore_unknown_
if any(wf_id.version is None for wf_id in given_wf_ids):
raise ValueError("Version must be specified for all workflow version IDs.")

is_single = (
isinstance(workflow_external_id, WorkflowVersionId)
or isinstance(workflow_external_id, tuple)
and len(given_wf_ids) == 1
is_single = isinstance(workflow_external_id, WorkflowVersionId) or (
isinstance(workflow_external_id, tuple) and len(given_wf_ids) == 1
)
if is_single:
return get_single(given_wf_ids[0], ignore_missing=True)
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def _is_retryable(self, method: str, path: str) -> bool:
if method not in valid_methods:
raise ValueError(f"Method {method} is not valid. Must be one of {valid_methods}")

return method in ["GET", "PUT", "PATCH"] or method == "POST" and self._url_is_retryable(path)
return method in ["GET", "PUT", "PATCH"] or (method == "POST" and self._url_is_retryable(path))

@classmethod
@functools.lru_cache(64)
Expand Down
Loading
Loading