Skip to content

Commit

Permalink
[DOGE-15] Update Sequence Issue (#2057)
Browse files Browse the repository at this point in the history
  • Loading branch information
doctrino authored Dec 12, 2024
1 parent 29408d2 commit caeee0d
Show file tree
Hide file tree
Showing 9 changed files with 207 additions and 13 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ Changes are grouped as follows
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [7.70.5] - 2024-12-12
### Fixed
- Upserting a Sequence with columns no longer silently skips the columns, but instead updates them as intended.

## [7.70.4] - 2024-12-11
### Fixed
- Added missing `diagramParsingACl` to capabilities.
Expand Down
5 changes: 3 additions & 2 deletions cognite/client/_api/annotations.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from __future__ import annotations

from collections.abc import Sequence
from collections.abc import Hashable, Sequence
from copy import deepcopy
from typing import TYPE_CHECKING, Any, Literal, cast, overload

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
from cognite.client.data_classes import Annotation, AnnotationFilter, AnnotationList, AnnotationUpdate
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes._base import CogniteResource, PropertySpec, T_WritableCogniteResource
from cognite.client.data_classes.annotations import AnnotationCore, AnnotationReverseLookupFilter, AnnotationWrite
from cognite.client.data_classes.contextualization import ResourceReference, ResourceReferenceList
from cognite.client.utils._auxiliary import is_unlimited, split_into_chunks
Expand Down Expand Up @@ -102,6 +102,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
if not isinstance(resource, Annotation):
return APIClient._convert_resource_to_patch_object(resource, update_attributes)
Expand Down
5 changes: 3 additions & 2 deletions cognite/client/_api/hosted_extractors/sources.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from __future__ import annotations

from collections.abc import Iterator, Sequence
from collections.abc import Hashable, Iterator, Sequence
from typing import TYPE_CHECKING, Any, Literal, overload

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes._base import CogniteResource, PropertySpec, T_WritableCogniteResource
from cognite.client.data_classes.hosted_extractors.sources import Source, SourceList, SourceUpdate, SourceWrite
from cognite.client.utils._experimental import FeaturePreviewWarning
from cognite.client.utils._identifier import IdentifierSequence
Expand Down Expand Up @@ -237,6 +237,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
output = super()._convert_resource_to_patch_object(resource, update_attributes, mode)
if hasattr(resource, "_type"):
Expand Down
72 changes: 70 additions & 2 deletions cognite/client/_api/sequences.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import collections
import math
import typing
import warnings
Expand All @@ -17,9 +18,11 @@
SequenceUpdate,
filters,
)
from cognite.client.data_classes._base import CogniteResource, PropertySpec
from cognite.client.data_classes.aggregations import AggregationFilter, CountAggregate, UniqueResultList
from cognite.client.data_classes.filters import _BASIC_FILTERS, Filter, _validate_filter
from cognite.client.data_classes.sequences import (
SequenceColumnUpdate,
SequenceCore,
SequenceProperty,
SequenceSort,
Expand Down Expand Up @@ -685,22 +688,87 @@ def upsert(
Upsert for sequences:
>>> from cognite.client import CogniteClient
>>> from cognite.client.data_classes import Sequence
>>> from cognite.client.data_classes import SequenceWrite, SequenceColumnWrite
>>> client = CogniteClient()
>>> existing_sequence = client.sequences.retrieve(id=1)
>>> existing_sequence.description = "New description"
>>> new_sequence = Sequence(external_id="new_sequence", description="New sequence")
>>> new_sequence = SequenceWrite(
... external_id="new_sequence",
... description="New sequence",
... columns=[SequenceColumnWrite(external_id="col1", value_type="String")]
... )
>>> res = client.sequences.upsert([existing_sequence, new_sequence], mode="replace")
"""

if isinstance(item, SequenceWrite):
if item.external_id is None:
raise ValueError("External ID must be set when upserting a SequenceWrite object.")
cdf_item = self.retrieve(external_id=item.external_id)
if cdf_item and cdf_item.external_id:
cdf_item_by_id: dict[str, Sequence] = {cdf_item.external_id: cdf_item}
elif isinstance(item, collections.abc.Sequence):
external_ids = [i.external_id for i in item if isinstance(i, SequenceWrite)]
if None in external_ids:
raise ValueError("External ID must be set when upserting a SequenceWrite object.")
cdf_items = self.retrieve_multiple(external_ids=typing.cast(list[str], external_ids))
cdf_item_by_id = cdf_items._external_id_to_item
else:
cdf_item_by_id = {}
return self._upsert_multiple(
item,
list_cls=SequenceList,
resource_cls=Sequence,
update_cls=SequenceUpdate,
input_resource_cls=Sequence,
mode=mode,
# String is not recognized as hashable by mypy
cdf_item_by_id=cdf_item_by_id, # type: ignore[arg-type]
)

@classmethod
def _convert_resource_to_patch_object(
cls,
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[str, Sequence] | None = None, # type: ignore[override]
) -> dict[str, dict[str, dict]]:
update_obj = super()._convert_resource_to_patch_object(resource, update_attributes, mode)
if not isinstance(resource, SequenceWrite):
return update_obj
# Lookup columns to check what to add, remove and modify
cdf_item: Sequence | None = None
if cdf_item_by_id and resource.external_id:
cdf_item = cdf_item_by_id.get(resource.external_id)
update_obj["update"]["columns"] = {}
if cdf_item is None:
update_obj["update"]["columns"]["add"] = [column.dump() for column in resource.columns]
else:
cdf_columns_by_external_id = {
column.external_id: column for column in cdf_item.columns or [] if column.external_id
}
columns_by_external_id = {column.external_id: column for column in resource.columns if column.external_id}
if mode != "patch" and (
to_remove := (set(cdf_columns_by_external_id.keys()) - set(columns_by_external_id.keys()))
):
# Replace or replace_ignore_null, remove all columns that are not in the new columns
update_obj["update"]["columns"]["remove"] = list(to_remove)
if to_add := (set(columns_by_external_id.keys()) - set(cdf_columns_by_external_id.keys())):
update_obj["update"]["columns"]["add"] = [columns_by_external_id[ext_id].dump() for ext_id in to_add]
if to_modify := (set(columns_by_external_id.keys()) & set(cdf_columns_by_external_id.keys())):
modify_list: list[dict[str, dict[str, Any]]] = []
for col_ext_id in to_modify:
col_write_obj = columns_by_external_id[col_ext_id]
column_update = super()._convert_resource_to_patch_object(
col_write_obj,
SequenceColumnUpdate._get_update_properties(col_write_obj),
mode,
)
modify_list.append(column_update)
update_obj["update"]["columns"]["modify"] = modify_list

return update_obj

def search(
self,
name: str | None = None,
Expand Down
16 changes: 14 additions & 2 deletions cognite/client/_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import re
import warnings
from collections import UserList
from collections.abc import Iterator, MutableMapping, Sequence
from collections.abc import Hashable, Iterator, MutableMapping, Sequence
from typing import (
TYPE_CHECKING,
Any,
Expand Down Expand Up @@ -1002,6 +1002,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResource: ...

@overload
Expand All @@ -1016,6 +1017,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResourceList: ...

def _update_multiple(
Expand All @@ -1029,6 +1031,7 @@ def _update_multiple(
headers: dict[str, Any] | None = None,
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_CogniteResourceList | T_CogniteResource:
resource_path = resource_path or self._RESOURCE_PATH
patch_objects = []
Expand All @@ -1045,6 +1048,7 @@ def _update_multiple(
item,
update_cls._get_update_properties(item),
mode,
cdf_item_by_id,
)
)
elif isinstance(item, CogniteUpdate):
Expand Down Expand Up @@ -1083,14 +1087,21 @@ def _upsert_multiple(
mode: Literal["patch", "replace"],
input_resource_cls: type[CogniteResource] | None = None,
api_subversion: str | None = None,
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> T_WritableCogniteResource | T_CogniteResourceList:
if mode not in ["patch", "replace"]:
raise ValueError(f"mode must be either 'patch' or 'replace', got {mode!r}")
is_single = isinstance(items, WriteableCogniteResource)
items = cast(Sequence[T_WritableCogniteResource], [items] if is_single else items)
try:
result = self._update_multiple(
items, list_cls, resource_cls, update_cls, mode=mode, api_subversion=api_subversion
items,
list_cls,
resource_cls,
update_cls,
mode=mode,
api_subversion=api_subversion,
cdf_item_by_id=cdf_item_by_id,
)
except CogniteNotFoundError as not_found_error:
items_by_external_id = {item.external_id: item for item in items if item.external_id is not None} # type: ignore [attr-defined]
Expand Down Expand Up @@ -1230,6 +1241,7 @@ def _convert_resource_to_patch_object(
resource: CogniteResource,
update_attributes: list[PropertySpec],
mode: Literal["replace_ignore_null", "patch", "replace"] = "replace_ignore_null",
cdf_item_by_id: dict[Hashable, T_WritableCogniteResource] | None = None,
) -> dict[str, dict[str, dict]]:
dumped = resource.dump(camel_case=True)

Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_version.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations

__version__ = "7.70.4"
__version__ = "7.70.5"

__api_subversion__ = "20230101"
2 changes: 1 addition & 1 deletion cognite/client/data_classes/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ def _get_update_properties(cls, item: CogniteResource | None = None) -> list[Pro
# Sequences do not support setting metadata to an empty array.
PropertySpec("metadata", is_object=True, is_nullable=False),
PropertySpec("data_set_id"),
# PropertySpec("columns", is_list=True),
# Columns are handled separately in the .upsert() method
]


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "cognite-sdk"

version = "7.70.4"
version = "7.70.5"

description = "Cognite Python SDK"
readme = "README.md"
Expand Down
112 changes: 110 additions & 2 deletions tests/tests_integration/test_api/test_sequences.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from unittest import mock
from unittest import TestCase, mock

import pytest

Expand All @@ -14,7 +14,13 @@
SequenceUpdate,
filters,
)
from cognite.client.data_classes.sequences import SequenceProperty, SortableSequenceProperty
from cognite.client.data_classes.sequences import (
SequenceColumnWrite,
SequenceColumnWriteList,
SequenceProperty,
SequenceWrite,
SortableSequenceProperty,
)
from cognite.client.exceptions import CogniteNotFoundError
from cognite.client.utils._text import random_string
from tests.utils import set_request_limit
Expand Down Expand Up @@ -329,3 +335,105 @@ def test_aggregate_unique_metadata_keys(self, cognite_client: CogniteClient, seq
assert {tuple(item.value["property"]) for item in result} >= {
("metadata", key.casefold()) for a in sequence_list for key in a.metadata or []
}

def test_upsert_sequence_replace(self, cognite_client: CogniteClient) -> None:
original_sequence = SequenceWrite(
external_id=f"upsert_sequence_{random_string(5)}",
columns=[
SequenceColumnWrite(
description="KW Description", name="KW Name", value_type="Double", external_id="kw_seq_01"
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)
upsert = SequenceWrite(
external_id=original_sequence.external_id,
columns=[
SequenceColumnWrite(
description="KW Description",
name="KW Name",
# UPPER to match what the API returns
value_type="DOUBLE",
external_id="kw_seq_01",
metadata={},
),
SequenceColumnWrite(
description="PW Description",
name="PW Name",
value_type="DOUBLE",
external_id="pw_seq_01",
metadata={},
),
SequenceColumnWrite(
description="LW Description",
name="LW Name",
value_type="DOUBLE",
external_id="lw_seq_01",
metadata={},
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)

created: Sequence | None = None
try:
created = cognite_client.sequences.create(original_sequence)

upserted = cognite_client.sequences.upsert(upsert, mode="replace")

retrieved = cognite_client.sequences.retrieve(external_id=upserted.external_id)

assert retrieved is not None
TestCase().assertCountEqual(retrieved.as_write().columns.dump(), upsert.columns.dump())
finally:
if created:
cognite_client.sequences.delete(external_id=created.external_id, ignore_unknown_ids=True)

def test_upsert_sequence_patch(self, cognite_client: CogniteClient) -> None:
original_sequence = SequenceWrite(
external_id=f"upsert_sequence_{random_string(5)}",
columns=[
SequenceColumnWrite(
description="KW Description",
name="KW Name",
value_type="DOUBLE",
external_id="kw_seq_01",
metadata={},
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)
upsert = SequenceWrite(
external_id=original_sequence.external_id,
columns=[
SequenceColumnWrite(
description="PW Description",
name="PW Name",
value_type="DOUBLE",
external_id="pw_seq_01",
metadata={},
),
],
description="Description of the Test Sequence",
name="Test Sequence Name",
)

created: Sequence | None = None
try:
created = cognite_client.sequences.create(original_sequence)

upserted = cognite_client.sequences.upsert(upsert, mode="patch")

retrieved = cognite_client.sequences.retrieve(external_id=upserted.external_id)

assert retrieved is not None
TestCase().assertCountEqual(
retrieved.as_write().columns.dump(),
SequenceColumnWriteList([original_sequence.columns[0], upsert.columns[0]]).dump(),
)
finally:
if created:
cognite_client.sequences.delete(external_id=created.external_id, ignore_unknown_ids=True)

0 comments on commit caeee0d

Please sign in to comment.