Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DOGE-15] Update Sequence Issue #2057

Merged
merged 11 commits into from
Dec 12, 2024
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ Changes are grouped as follows
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [7.70.3] - 2024-12-04
### Fixed
- Upserting a Sequence with columns no longer silently skips the columns, but instead updates them as intended.

## [7.70.2] - 2024-12-04
### Fixed
- Retrieving `ExtractionPipeline` either with `client.extraction_pipelines.retrieve` or
Expand Down
5 changes: 3 additions & 2 deletions cognite/client/_api/annotations.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from __future__ import annotations

from collections.abc import Sequence
from collections.abc import Hashable, Sequence
from copy import deepcopy
from typing import TYPE_CHECKING, Any, Literal, cast, overload

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
from cognite.client.data_classes import Annotation, AnnotationFilter, AnnotationList, AnnotationUpdate
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes._base import CogniteResource, PropertySpec, T_WritableCogniteResource
from cognite.client.data_classes.annotations import AnnotationCore, AnnotationReverseLookupFilter, AnnotationWrite
from cognite.client.data_classes.contextualization import ResourceReference, ResourceReferenceList
from cognite.client.utils._auxiliary import is_unlimited, split_into_chunks
Expand Down Expand Up @@ -102,6 +102,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
if not isinstance(resource, Annotation):
return APIClient._convert_resource_to_patch_object(resource, update_attributes)
Expand Down
5 changes: 3 additions & 2 deletions cognite/client/_api/hosted_extractors/sources.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from __future__ import annotations

from collections.abc import Iterator, Sequence
from collections.abc import Hashable, Iterator, Sequence
from typing import TYPE_CHECKING, Any, Literal, overload

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes._base import CogniteResource, PropertySpec, T_WritableCogniteResource
from cognite.client.data_classes.hosted_extractors.sources import Source, SourceList, SourceUpdate, SourceWrite
from cognite.client.utils._experimental import FeaturePreviewWarning
from cognite.client.utils._identifier import IdentifierSequence
Expand Down Expand Up @@ -237,6 +237,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
output = super()._convert_resource_to_patch_object(resource, update_attributes, mode)
if hasattr(resource, "_type"):
Expand Down
61 changes: 61 additions & 0 deletions cognite/client/_api/sequences.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import collections
import math
import typing
import warnings
Expand All @@ -17,9 +18,11 @@
SequenceUpdate,
filters,
)
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes.aggregations import AggregationFilter, CountAggregate, UniqueResultList
from cognite.client.data_classes.filters import _BASIC_FILTERS, Filter, _validate_filter
from cognite.client.data_classes.sequences import (
SequenceColumnUpdate,
SequenceCore,
SequenceProperty,
SequenceSort,
Expand Down Expand Up @@ -692,15 +695,73 @@ def upsert(
>>> new_sequence = Sequence(external_id="new_sequence", description="New sequence")
>>> res = client.sequences.upsert([existing_sequence, new_sequence], mode="replace")
"""

if isinstance(item, SequenceWrite):
if item.external_id is None:
raise ValueError("External ID must be set when upserting a SequenceWrite object.")
cdf_item = self.retrieve(external_id=item.external_id)
if cdf_item and cdf_item.external_id:
cdf_item_by_id: dict[str, Sequence] = {cdf_item.external_id: cdf_item}
elif isinstance(item, collections.abc.Sequence):
external_ids = [i.external_id for i in item if isinstance(i, SequenceWrite)]
if None in external_ids:
raise ValueError("External ID must be set when upserting a SequenceWrite object.")
cdf_items = self.retrieve_multiple(external_ids=typing.cast(list[str], external_ids))
cdf_item_by_id = {cdf_item.external_id: cdf_item for cdf_item in cdf_items if cdf_item.external_id}
doctrino marked this conversation as resolved.
Show resolved Hide resolved
else:
cdf_item_by_id = {}
return self._upsert_multiple(
item,
list_cls=SequenceList,
resource_cls=Sequence,
update_cls=SequenceUpdate,
input_resource_cls=Sequence,
mode=mode,
# String is not recognized as hashable by mypy
cdf_item_by_id=cdf_item_by_id, # type: ignore[arg-type]
Comment on lines +724 to +725
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not just use dict[str, T_WritableCogniteResource]?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not all identifiers are string. Workflow and data modeling are two examples that uses frozen dataclasses.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh right, makes sense. Then you must use Mapping/MutableMapping which is covariant (dict is invariant)

)

@classmethod
def _convert_resource_to_patch_object(
cls,
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[str, Sequence] | None = None, # type: ignore[override]
) -> dict[str, dict[str, dict]]:
update_obj = super()._convert_resource_to_patch_object(resource, update_attributes, mode)
if not isinstance(resource, SequenceWrite):
return update_obj
doctrino marked this conversation as resolved.
Show resolved Hide resolved
# Lookup columns to check what to add, remove and modify
cdf_item: Sequence | None = None
if cdf_item_by_id and resource.external_id:
cdf_item = cdf_item_by_id.get(resource.external_id)
update_obj["update"]["columns"] = {}
if cdf_item is None:
update_obj["update"]["columns"]["add"] = [column.dump() for column in resource.columns]
else:
cdf_columns_by_external_id = {
column.external_id: column for column in cdf_item.columns or [] if column.external_id
}
columns_by_external_id = {column.external_id: column for column in resource.columns if column.external_id}
if to_remove := (set(cdf_columns_by_external_id.keys()) - set(columns_by_external_id.keys())):
update_obj["update"]["columns"]["remove"] = list(to_remove)
if to_add := (set(columns_by_external_id.keys()) - set(cdf_columns_by_external_id.keys())):
update_obj["update"]["columns"]["add"] = [columns_by_external_id[ext_id].dump() for ext_id in to_add]
if to_modify := (set(columns_by_external_id.keys()) & set(cdf_columns_by_external_id.keys())):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This doesn't seem to distinguish based on mode ("replace_ignore_null", "patch", "replace")?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You are right, good catch. Since columns are not nullable, I think replace and 'replace_ignore_null' should be the same behavior. While patch should not do the remove.

modify_list: list[dict[str, dict[str, Any]]] = []
for col_ext_id in to_modify:
col_write_obj = columns_by_external_id[col_ext_id]
column_update = super()._convert_resource_to_patch_object(
col_write_obj,
SequenceColumnUpdate._get_update_properties(col_write_obj),
mode,
)
modify_list.append(column_update)
update_obj["update"]["columns"]["modify"] = modify_list

return update_obj

def search(
self,
name: str | None = None,
Expand Down
16 changes: 14 additions & 2 deletions cognite/client/_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import re
import warnings
from collections import UserList
from collections.abc import Iterator, MutableMapping, Sequence
from collections.abc import Hashable, Iterator, MutableMapping, Sequence
from typing import (
TYPE_CHECKING,
Any,
Expand Down Expand Up @@ -1004,6 +1004,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResource: ...

@overload
Expand All @@ -1018,6 +1019,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResourceList: ...

def _update_multiple(
Expand All @@ -1031,6 +1033,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResourceList | T_CogniteResource:
resource_path = resource_path or self._RESOURCE_PATH
patch_objects = []
Expand All @@ -1047,6 +1050,7 @@ def _update_multiple(
item,
update_cls._get_update_properties(item),
mode,
cdf_item_by_id,
)
)
elif isinstance(item, CogniteUpdate):
Expand Down Expand Up @@ -1085,14 +1089,21 @@ def _upsert_multiple(
mode: Literal["patch", "replace"],
input_resource_cls: type[CogniteResource] | None = None,
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_WritableCogniteResource | T_CogniteResourceList:
if mode not in ["patch", "replace"]:
raise ValueError(f"mode must be either 'patch' or 'replace', got {mode!r}")
is_single = isinstance(items, WriteableCogniteResource)
items = cast(Sequence[T_WritableCogniteResource], [items] if is_single else items)
try:
result = self._update_multiple(
items, list_cls, resource_cls, update_cls, mode=mode, api_subversion=api_subversion
items,
list_cls,
resource_cls,
update_cls,
mode=mode,
api_subversion=api_subversion,
cdf_item_by_id=cdf_item_by_id,
)
except CogniteNotFoundError as not_found_error:
items_by_external_id = {item.external_id: item for item in items if item.external_id is not None} # type: ignore [attr-defined]
Expand Down Expand Up @@ -1232,6 +1243,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
dumped = resource.dump(camel_case=True)

Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_version.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations

__version__ = "7.70.2"
__version__ = "7.70.3"

__api_subversion__ = "20230101"
2 changes: 1 addition & 1 deletion cognite/client/data_classes/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ def _get_update_properties(cls, item: CogniteResource | None = None) -> list[Pro
# Sequences do not support setting metadata to an empty array.
PropertySpec("metadata", is_object=True, is_nullable=False),
PropertySpec("data_set_id"),
# PropertySpec("columns", is_list=True),
# Columns are handled separately in the .upsert() method
]


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "cognite-sdk"

version = "7.70.2"
version = "7.70.3"

description = "Cognite Python SDK"
readme = "README.md"
Expand Down
62 changes: 61 additions & 1 deletion tests/tests_integration/test_api/test_sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,12 @@
SequenceUpdate,
filters,
)
from cognite.client.data_classes.sequences import SequenceProperty, SortableSequenceProperty
from cognite.client.data_classes.sequences import (
SequenceColumnWrite,
SequenceProperty,
SequenceWrite,
SortableSequenceProperty,
)
from cognite.client.exceptions import CogniteNotFoundError
from cognite.client.utils._text import random_string
from tests.utils import set_request_limit
Expand Down Expand Up @@ -329,3 +334,58 @@ def test_aggregate_unique_metadata_keys(self, cognite_client: CogniteClient, seq
assert {tuple(item.value["property"]) for item in result} >= {
("metadata", key.casefold()) for a in sequence_list for key in a.metadata or []
}

def test_upsert_sequence(self, cognite_client: CogniteClient) -> None:
original_sequence = SequenceWrite(
external_id=f"upsert_sequence_{random_string(5)}",
columns=[
SequenceColumnWrite(
description="KW Description", name="KW Name", value_type="Double", external_id="kw_seq_01"
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)
upsert = SequenceWrite(
external_id=original_sequence.external_id,
columns=[
SequenceColumnWrite(
description="KW Description",
name="KW Name",
# UPPER to match what the API returns
value_type="DOUBLE",
external_id="kw_seq_01",
metadata={},
),
SequenceColumnWrite(
description="PW Description",
name="PW Name",
value_type="DOUBLE",
external_id="pw_seq_01",
metadata={},
),
SequenceColumnWrite(
description="LW Description",
name="LW Name",
value_type="DOUBLE",
external_id="lw_seq_01",
metadata={},
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)

created: Sequence | None = None
try:
created = cognite_client.sequences.create(original_sequence)

upserted = cognite_client.sequences.upsert(upsert)

retrieved = cognite_client.sequences.retrieve(external_id=upserted.external_id)

assert retrieved is not None
assert retrieved.as_write().columns.dump() == upsert.columns.dump()
doctrino marked this conversation as resolved.
Show resolved Hide resolved
finally:
if created:
cognite_client.sequences.delete(external_id=created.external_id, ignore_unknown_ids=True)
Loading