Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump pydantic support to v2 #1645

Merged
merged 10 commits into from
Apr 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,6 @@ jobs:
- name: "Clean sparsezoo directory"
run: rm -r sparsezoo/
- name: ⚙️ Install dependencies
run: pip install .[dev,haystack]
run: pip install .[dev]
- name: Run integrations tests
run: make test_integrations
1 change: 0 additions & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
include LICENSE
include utils/artifacts.py
include src/deepsparse/transformers/haystack/haystack_reqs.txt
recursive-include src/deepsparse/avx2 *
recursive-include src/deepsparse/avx512 *
recursive-include src/deepsparse/neon *
6 changes: 2 additions & 4 deletions examples/vit_pose/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from typing import List

import numpy
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict

from deepsparse.pipelines.computer_vision import ComputerVisionSchema

Expand All @@ -32,6 +32,4 @@ class VitPoseInput(ComputerVisionSchema):

class VitPoseOutput(BaseModel):
out: List[numpy.ndarray]

class Config:
arbitrary_types_allowed = True
model_config = ConfigDict(arbitrary_types_allowed=True)
307 changes: 0 additions & 307 deletions integrations/haystack/README.md

This file was deleted.

111 changes: 0 additions & 111 deletions integrations/haystack/tests/test_smoke.py

This file was deleted.

14 changes: 14 additions & 0 deletions integrations/test_placeholder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

def test_placeholder():
"""
Needed to make the test suite run and not throw
an error about no tests being found when
`make test_integrations` is used.

The error would look like this:
make: *** [Makefile:61: test_integrations] Error 5

More information can be found here:
https://github.com/pytest-dev/pytest/issues/2393
"""
pass
23 changes: 2 additions & 21 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,17 +77,10 @@
]


def _parse_requirements_file(file_path):
with open(file_path, "r") as requirements_file:
lines = requirements_file.read().splitlines()

return [line for line in lines if len(line) > 0 and line[0] != "#"]


_deps = [
"numpy>=1.16.3",
"onnx>=1.5.0,<1.15.0",
"pydantic>=1.8.2,<2.0.0",
"pydantic>=2.0.0,<2.8.0",
"requests>=2.0.0",
"tqdm>=4.0.0",
"protobuf>=3.12.2",
Expand Down Expand Up @@ -122,7 +115,7 @@ def _parse_requirements_file(file_path):
]
_server_deps = [
"uvicorn>=0.15.0",
"fastapi>=0.70.0,<0.87.0",
"fastapi>=0.100.0,<0.111",
"requests>=2.26.0",
"python-multipart>=0.0.5",
"prometheus-client>=0.14.1",
Expand Down Expand Up @@ -153,17 +146,6 @@ def _parse_requirements_file(file_path):
]
_sentence_transformers_integration_deps = ["optimum-deepsparse"] + _torch_deps

# haystack dependencies are installed from a requirements file to avoid
# conflicting versions with NM's deepsparse/transformers
_haystack_requirements_file_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"src",
"deepsparse",
"transformers",
"haystack",
"haystack_reqs.txt",
)
_haystack_integration_deps = _parse_requirements_file(_haystack_requirements_file_path)
_clip_deps = [
"open_clip_torch==2.20.0",
"transformers<4.40",
Expand Down Expand Up @@ -270,7 +252,6 @@ def _setup_extras() -> Dict:
"image_classification": _computer_vision_deps,
"yolo": _computer_vision_deps,
"yolov5": _computer_vision_deps,
"haystack": _haystack_integration_deps,
"openpifpaf": _openpifpaf_integration_deps,
"yolov8": _yolov8_integration_deps,
"transformers": _transformers_integration_deps,
Expand Down
10 changes: 3 additions & 7 deletions src/deepsparse/benchmark/data_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import random
import string
from os import path
from typing import Dict, List, Tuple
from typing import Dict, List, Tuple, get_args

import numpy

Expand Down Expand Up @@ -58,15 +58,11 @@ def get_input_schema_type(pipeline: Pipeline) -> str:
if SchemaType.TEXT_SEQ in input_schema_requirements:
if input_schema_fields.get(SchemaType.TEXT_SEQ).alias == SchemaType.TEXT_PROMPT:
return SchemaType.TEXT_PROMPT
sequence_types = [
f.outer_type_ for f in input_schema_fields[SchemaType.TEXT_SEQ].sub_fields
]
sequence_types = get_args(input_schema_fields[SchemaType.TEXT_SEQ].annotation)
if List[str] in sequence_types:
return SchemaType.TEXT_SEQ
elif SchemaType.TEXT_INPUT in input_schema_requirements:
sequence_types = [
f.outer_type_ for f in input_schema_fields[SchemaType.TEXT_INPUT].sub_fields
]
sequence_types = get_args(input_schema_fields[SchemaType.TEXT_INPUT].annotation)
if List[str] in sequence_types:
return SchemaType.TEXT_INPUT
elif SchemaType.QUESTION in input_schema_requirements:
Expand Down
4 changes: 2 additions & 2 deletions src/deepsparse/clip/decoder_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ class CLIPDecoderInput(BaseModel):
"""

text_embeddings: Any = Field(
description="np.array of text emebddings from the " "text branch"
None, description="np.array of text emebddings from the " "text branch"
)
image_embeddings: Any = Field(
description="np.array of image embeddings from the " "visual branch"
None, description="np.array of image embeddings from the " "visual branch"
)


Expand Down
17 changes: 9 additions & 8 deletions src/deepsparse/evaluation/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,15 @@ class Metric(BaseModel):


class Dataset(BaseModel):
type: Optional[str] = Field(description="Type of dataset")
type: Optional[str] = Field(None, description="Type of dataset")
name: str = Field(description="Name of the dataset")
config: Any = Field(description="Configuration for the dataset")
split: Optional[str] = Field(description="Split of the dataset")
config: Any = Field(None, description="Configuration for the dataset")
split: Optional[str] = Field(None, description="Split of the dataset")


class EvalSample(BaseModel):
input: Any = Field(description="Sample input to the model")
output: Any = Field(description="Sample output from the model")
input: Any = Field(None, description="Sample input to the model")
output: Any = Field(None, description="Sample output from the model")


class Evaluation(BaseModel):
Expand All @@ -55,7 +55,7 @@ class Evaluation(BaseModel):
dataset: Dataset = Field(description="Dataset that the evaluation was performed on")
metrics: List[Metric] = Field(description="List of metrics for the evaluation")
samples: Optional[List[EvalSample]] = Field(
description="List of samples for the evaluation"
None, description="List of samples for the evaluation"
)


Expand All @@ -64,8 +64,9 @@ class Result(BaseModel):
description="Evaluation result represented in the unified, structured format"
)
raw: Any = Field(
None,
description="Evaluation result represented in the raw format "
"(characteristic for the specific evaluation integration)"
"(characteristic for the specific evaluation integration)",
)


Expand Down Expand Up @@ -97,7 +98,7 @@ def _save_to_json(result: Result, save_path: str):


def _save_to_yaml(result: Result, save_path: str):
_save(yaml.dump(result.dict()), save_path, expected_ext=".yaml")
_save(yaml.dump(result.model_dump()), save_path, expected_ext=".yaml")


def _save(data: str, save_path: str, expected_ext: str):
Expand Down
5 changes: 3 additions & 2 deletions src/deepsparse/legacy/loggers/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from typing import Any, Dict, List, Optional

from pydantic import BaseModel, Field, validator
from pydantic import BaseModel, Field, field_validator


"""
Expand Down Expand Up @@ -57,7 +57,8 @@ class MetricFunctionConfig(BaseModel):
"the subset of loggers (specified here by a list of their names).",
)

@validator("frequency")
@field_validator("frequency")
@classmethod
def non_zero_frequency(cls, frequency: int) -> int:
if frequency <= 0:
raise ValueError(
Expand Down
5 changes: 4 additions & 1 deletion src/deepsparse/legacy/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,10 @@ def check_register_task(
elif cls.is_haystack(task):
# trigger haystack pipeline as well as transformers pipelines to
# register with Pipeline.register
import deepsparse.transformers.haystack # noqa: F401
raise DeprecationWarning(
"Haystack support with deepsparse has been deprecated, "
"kindly use deepsparse-nightly==1.8.20240404 or older"
)

elif cls.is_embedding_extraction(task):
# trigger embedding_extraction pipelines to register with
Expand Down
5 changes: 2 additions & 3 deletions src/deepsparse/loggers/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,11 @@
from typing import Dict, List, Optional

import yaml
from pydantic import BaseModel, Extra, Field, validator
from pydantic import BaseModel, ConfigDict, Field, validator


class LoggerConfig(BaseModel):
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")

name: str = Field(
default="PythonLogger",
Expand Down
2 changes: 1 addition & 1 deletion src/deepsparse/loggers/logger_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class LoggerManager(AsyncExecutor, LoggerFactory):
"""

def __init__(self, config: str = ""):
self.config = LoggingConfig.from_config(config).dict()
self.config = LoggingConfig.from_config(config).model_dump()
super().__init__(config=self.config)

def log(
Expand Down
2 changes: 1 addition & 1 deletion src/deepsparse/loggers/root_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class RootLogger(FrequencyFilter):
its own FrequencyFilter

:param config: config with respect to
the log_type (LoggerConfig().dict().get(log_type))
the log_type (LoggerConfig().model_dump().get(log_type))
:param leaf_logger: leaf logger singleton shared among other RootLogger

"""
Expand Down
9 changes: 3 additions & 6 deletions src/deepsparse/open_pif_paf/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import numpy
from PIL import Image
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field

from deepsparse.pipelines.computer_vision import ComputerVisionSchema

Expand Down Expand Up @@ -76,8 +76,7 @@ def from_files(
input_schema = cls(*args, images=files_numpy, **kwargs)
return input_schema

class Config:
arbitrary_types_allowed = True
model_config = ConfigDict(arbitrary_types_allowed=True)


class OpenPifPafOutput(BaseModel):
Expand Down Expand Up @@ -105,6 +104,4 @@ class OpenPifPafOutput(BaseModel):
"For every prediction, it is a list of tuples of body "
"part indices. "
)

class Config:
arbitrary_types_allowed = True
model_config = ConfigDict(arbitrary_types_allowed=True)
Loading
Loading