Skip to content

Commit

Permalink
Drop support for protobuf v3 (#1688)
Browse files Browse the repository at this point in the history
  • Loading branch information
haakonvt authored Mar 26, 2024
1 parent d6f4cdf commit 136b382
Show file tree
Hide file tree
Showing 14 changed files with 153 additions and 1,071 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ repos:
- --select=E,W,F,I,T,RUF,TID,UP
- --fixable=E,W,F,I,T,RUF,TID,UP
- --target-version=py38
- --exclude=cognite/client/_proto,cognite/client/_proto_legacy
- --exclude=cognite/client/_proto
- id: ruff-format
args:
- --line-length=120
Expand Down
14 changes: 2 additions & 12 deletions cognite/client/_api/datapoint_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@
from google.protobuf.message import Message
from typing_extensions import NotRequired, TypeAlias

from cognite.client._proto.data_point_list_response_pb2 import DataPointListItem
from cognite.client._proto.data_points_pb2 import AggregateDatapoint, NumericDatapoint, StringDatapoint
from cognite.client.data_classes.datapoints import NUMPY_IS_AVAILABLE, Aggregate, Datapoints, DatapointsArray
from cognite.client.utils._auxiliary import is_unlimited
from cognite.client.utils._identifier import Identifier
from cognite.client.utils._importing import import_legacy_protobuf
from cognite.client.utils._text import convert_all_keys_to_snake_case, to_camel_case, to_snake_case
from cognite.client.utils._time import (
align_start_and_end_for_granularity,
Expand All @@ -48,17 +49,6 @@
)
from cognite.client.utils.useful_types import SequenceNotStr

if not import_legacy_protobuf():
from cognite.client._proto.data_point_list_response_pb2 import DataPointListItem
from cognite.client._proto.data_points_pb2 import AggregateDatapoint, NumericDatapoint, StringDatapoint
else:
from cognite.client._proto_legacy.data_point_list_response_pb2 import DataPointListItem # type: ignore [assignment]
from cognite.client._proto_legacy.data_points_pb2 import ( # type: ignore [assignment]
AggregateDatapoint,
NumericDatapoint,
StringDatapoint,
)

if NUMPY_IS_AVAILABLE:
import numpy as np

Expand Down
30 changes: 2 additions & 28 deletions cognite/client/_api/datapoints.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
from __future__ import annotations

import contextlib
import functools
import heapq
import itertools
import math
import time
import warnings
from abc import ABC, abstractmethod
from collections.abc import Mapping
from datetime import datetime
Expand Down Expand Up @@ -39,6 +37,7 @@
)
from cognite.client._api.synthetic_time_series import SyntheticDatapointsAPI
from cognite.client._api_client import APIClient
from cognite.client._proto.data_point_list_response_pb2 import DataPointListItem, DataPointListResponse
from cognite.client.data_classes.datapoints import (
Aggregate,
Datapoints,
Expand All @@ -57,7 +56,7 @@
)
from cognite.client.utils._concurrency import ConcurrencySettings, execute_tasks
from cognite.client.utils._identifier import Identifier, IdentifierSequence, IdentifierSequenceCore
from cognite.client.utils._importing import import_as_completed, import_legacy_protobuf, local_import
from cognite.client.utils._importing import import_as_completed, local_import
from cognite.client.utils._time import (
align_large_granularity,
pandas_date_range_tz,
Expand All @@ -69,14 +68,6 @@
from cognite.client.utils._validation import assert_type, validate_user_input_dict_with_identifier
from cognite.client.utils.useful_types import SequenceNotStr

if not import_legacy_protobuf():
from cognite.client._proto.data_point_list_response_pb2 import DataPointListItem, DataPointListResponse
else:
from cognite.client._proto_legacy.data_point_list_response_pb2 import ( # type: ignore [assignment]
DataPointListItem,
DataPointListResponse,
)

if TYPE_CHECKING:
from concurrent.futures import Future, ThreadPoolExecutor

Expand Down Expand Up @@ -140,23 +131,6 @@ def __init__(
self.api_subversion = api_subversion
self.n_queries = len(all_queries)

# Fetching datapoints relies on protobuf, which, depending on OS and major version used
# might be running in pure python or compiled C code. We issue a warning if we can determine
# that the user is running in pure python mode (quite a bit slower...)
with contextlib.suppress(ImportError):
from google.protobuf.descriptor import _USE_C_DESCRIPTORS

if _USE_C_DESCRIPTORS is False:
warnings.warn(
"Your installation of 'protobuf' is missing compiled C binaries, and will run in pure-python mode, "
"which causes datapoints fetching to be ~5x slower. To verify, set the environment variable "
"`PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp` before running (this will cause the code to fail). "
"The easiest fix is probably to pin your 'protobuf' dependency to major version 4 (or higher), "
"see: https://developers.google.com/protocol-buffers/docs/news/2022-05-06#python-updates",
UserWarning,
stacklevel=3,
)

def fetch_all_datapoints(self) -> DatapointsList:
pool = ConcurrencySettings.get_executor(max_workers=self.max_workers)
return DatapointsList(
Expand Down
1 change: 1 addition & 0 deletions cognite/client/_proto/data_point_list_response_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

118 changes: 37 additions & 81 deletions cognite/client/_proto/data_point_list_response_pb2.pyi
Original file line number Diff line number Diff line change
@@ -1,81 +1,37 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import data_points_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys

if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions

DESCRIPTOR: google.protobuf.descriptor.FileDescriptor

@typing_extensions.final
class DataPointListItem(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor

ID_FIELD_NUMBER: builtins.int
EXTERNALID_FIELD_NUMBER: builtins.int
ISSTRING_FIELD_NUMBER: builtins.int
ISSTEP_FIELD_NUMBER: builtins.int
UNIT_FIELD_NUMBER: builtins.int
NEXTCURSOR_FIELD_NUMBER: builtins.int
UNITEXTERNALID_FIELD_NUMBER: builtins.int
NUMERICDATAPOINTS_FIELD_NUMBER: builtins.int
STRINGDATAPOINTS_FIELD_NUMBER: builtins.int
AGGREGATEDATAPOINTS_FIELD_NUMBER: builtins.int
id: builtins.int
externalId: builtins.str
isString: builtins.bool
isStep: builtins.bool
unit: builtins.str
nextCursor: builtins.str
unitExternalId: builtins.str
@property
def numericDatapoints(self) -> data_points_pb2.NumericDatapoints: ...
@property
def stringDatapoints(self) -> data_points_pb2.StringDatapoints: ...
@property
def aggregateDatapoints(self) -> data_points_pb2.AggregateDatapoints: ...
def __init__(
self,
*,
id: builtins.int = ...,
externalId: builtins.str = ...,
isString: builtins.bool = ...,
isStep: builtins.bool = ...,
unit: builtins.str = ...,
nextCursor: builtins.str = ...,
unitExternalId: builtins.str = ...,
numericDatapoints: data_points_pb2.NumericDatapoints | None = ...,
stringDatapoints: data_points_pb2.StringDatapoints | None = ...,
aggregateDatapoints: data_points_pb2.AggregateDatapoints | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["aggregateDatapoints", b"aggregateDatapoints", "datapointType", b"datapointType", "numericDatapoints", b"numericDatapoints", "stringDatapoints", b"stringDatapoints"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["aggregateDatapoints", b"aggregateDatapoints", "datapointType", b"datapointType", "externalId", b"externalId", "id", b"id", "isStep", b"isStep", "isString", b"isString", "nextCursor", b"nextCursor", "numericDatapoints", b"numericDatapoints", "stringDatapoints", b"stringDatapoints", "unit", b"unit", "unitExternalId", b"unitExternalId"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["datapointType", b"datapointType"]) -> typing_extensions.Literal["numericDatapoints", "stringDatapoints", "aggregateDatapoints"] | None: ...

global___DataPointListItem = DataPointListItem

@typing_extensions.final
class DataPointListResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor

ITEMS_FIELD_NUMBER: builtins.int
@property
def items(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DataPointListItem]: ...
def __init__(
self,
*,
items: collections.abc.Iterable[global___DataPointListItem] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["items", b"items"]) -> None: ...

global___DataPointListResponse = DataPointListResponse
import data_points_pb2 as _data_points_pb2
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union

DESCRIPTOR: _descriptor.FileDescriptor

class DataPointListItem(_message.Message):
__slots__ = ("id", "externalId", "isString", "isStep", "unit", "nextCursor", "unitExternalId", "numericDatapoints", "stringDatapoints", "aggregateDatapoints")
ID_FIELD_NUMBER: _ClassVar[int]
EXTERNALID_FIELD_NUMBER: _ClassVar[int]
ISSTRING_FIELD_NUMBER: _ClassVar[int]
ISSTEP_FIELD_NUMBER: _ClassVar[int]
UNIT_FIELD_NUMBER: _ClassVar[int]
NEXTCURSOR_FIELD_NUMBER: _ClassVar[int]
UNITEXTERNALID_FIELD_NUMBER: _ClassVar[int]
NUMERICDATAPOINTS_FIELD_NUMBER: _ClassVar[int]
STRINGDATAPOINTS_FIELD_NUMBER: _ClassVar[int]
AGGREGATEDATAPOINTS_FIELD_NUMBER: _ClassVar[int]
id: int
externalId: str
isString: bool
isStep: bool
unit: str
nextCursor: str
unitExternalId: str
numericDatapoints: _data_points_pb2.NumericDatapoints
stringDatapoints: _data_points_pb2.StringDatapoints
aggregateDatapoints: _data_points_pb2.AggregateDatapoints
def __init__(self, id: _Optional[int] = ..., externalId: _Optional[str] = ..., isString: bool = ..., isStep: bool = ..., unit: _Optional[str] = ..., nextCursor: _Optional[str] = ..., unitExternalId: _Optional[str] = ..., numericDatapoints: _Optional[_Union[_data_points_pb2.NumericDatapoints, _Mapping]] = ..., stringDatapoints: _Optional[_Union[_data_points_pb2.StringDatapoints, _Mapping]] = ..., aggregateDatapoints: _Optional[_Union[_data_points_pb2.AggregateDatapoints, _Mapping]] = ...) -> None: ...

class DataPointListResponse(_message.Message):
__slots__ = ("items",)
ITEMS_FIELD_NUMBER: _ClassVar[int]
items: _containers.RepeatedCompositeFieldContainer[DataPointListItem]
def __init__(self, items: _Optional[_Iterable[_Union[DataPointListItem, _Mapping]]] = ...) -> None: ...
29 changes: 16 additions & 13 deletions cognite/client/_proto/data_points_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 136b382

Please sign in to comment.