Skip to content
This repository has been archived by the owner on Oct 14, 2024. It is now read-only.

Commit

Permalink
Merge pull request #335 from microsoft/feature/dictionary
Browse files Browse the repository at this point in the history
feature/dictionary
  • Loading branch information
baywet authored Jul 26, 2024
2 parents 023c556 + 742032a commit 9f91b3b
Show file tree
Hide file tree
Showing 6 changed files with 72 additions and 5 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.3.0] - 2024-07-26

### Added
- Support `dict[str, Any]` and `list[dict[str, Any]]` when writing additional data.

### Changed

## [1.2.0] - 2024-04-09

### Added
Expand Down
2 changes: 1 addition & 1 deletion kiota_serialization_json/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION: str = '1.2.0'
VERSION: str = '1.3.0'
43 changes: 42 additions & 1 deletion kiota_serialization_json/json_serialization_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def write_collection_of_primitive_values(
if isinstance(values, list):
result = []
for val in values:
temp_writer = self._create_new_writer()
temp_writer: JsonSerializationWriter = self._create_new_writer()
temp_writer.write_any_value(None, val)
result.append(temp_writer.value)

Expand Down Expand Up @@ -253,6 +253,28 @@ def write_collection_of_enum_values(
else:
self.value = result

def __write_collection_of_dict_values(
self, key: Optional[str], values: Optional[List[Dict[str, Any]]]
) -> None:
"""Writes the specified collection of dictionary values to the stream with an optional
given key.
Args:
key (Optional[str]): The key to be used for the written value. May be null.
values (Optional[List[Dict[str, Any]]]): The collection of dictionary values
to be written.
"""
if isinstance(values, list):
result = []
for val in values:
temp_writer: JsonSerializationWriter = self._create_new_writer()
temp_writer.__write_dict_value(None, val)
result.append(temp_writer.value)

if key:
self.writer[key] = result
else:
self.value = result

def write_bytes_value(self, key: Optional[str], value: bytes) -> None:
"""Writes the specified byte array as a base64 string to the stream with an optional
given key.
Expand Down Expand Up @@ -320,6 +342,21 @@ def write_null_value(self, key: Optional[str]) -> None:
else:
self.value = "null"

def __write_dict_value(self, key: Optional[str], value: Dict[str, Any]) -> None:
"""Writes the specified dictionary value to the stream with an optional given key.
Args:
key (Optional[str]): The key to be used for the written value. May be null.
value (Dict[str, Any]): The dictionary value to be written.
"""
if isinstance(value, dict):
temp_writer: JsonSerializationWriter = self._create_new_writer()
for dict_key, dict_value in value.items():
temp_writer.write_any_value(dict_key, dict_value)
if key:
self.writer[key] = temp_writer.writer
else:
self.value = temp_writer.writer

def write_additional_data_value(self, value: Dict[str, Any]) -> None:
"""Writes the specified additional data to the stream.
Args:
Expand Down Expand Up @@ -440,11 +477,15 @@ def write_any_value(self, key: Optional[str], value: Any) -> Any:
self.write_collection_of_enum_values(key, value)
elif all((type(x) in PRIMITIVE_TYPES) for x in value):
self.write_collection_of_primitive_values(key, value)
elif all(isinstance(x, dict) for x in value):
self.__write_collection_of_dict_values(key, value)
else:
raise TypeError(
f"Encountered an unknown collection type during serialization \
{value_type} with key {key}"
)
elif isinstance(value, dict):
self.__write_dict_value(key, value)
elif hasattr(value, '__dict__'):
self.write_non_parsable_object_value(key, value)
else:
Expand Down
7 changes: 6 additions & 1 deletion tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,12 @@ def user1_json():
'"updated_at": "2022-01-27T12:59:45.596117+00:00", "is_active": true}, '\
'"approvers": [{"id": "8f841f30-e6e3-439a-a812-ebd369559c36", '\
'"updated_at": "2022-01-27T12:59:45.596117+00:00", "is_active": true}, '\
'{"display_name": "John Doe", "age": 32}]}}'
'{"display_name": "John Doe", "age": 32}], '\
'"data": {'\
'"groups": ['\
'{"friends": [{"display_name": "John Doe", "age": 32}]}'\
']'\
'}}}'\


@pytest.fixture
Expand Down
8 changes: 8 additions & 0 deletions tests/unit/test_json_parse_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,14 @@ def test_get_object_value(user1_json):
"age": 32
}
]
assert result.additional_data["additional_data"]["data"] == {
"groups": [{
"friends": [{
"display_name": "John Doe",
"age": 32
}]
}]
}


def test_get_collection_of_object_values(users_json):
Expand Down
10 changes: 8 additions & 2 deletions tests/unit/test_json_serialization_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,12 @@ def test_write_additional_data_value(user_1, user_2):
"businessPhones": ["+1 205 555 0108"],
"manager": user_1,
"approvers": [user_1, user_2],
"created_at": date(2022, 1, 27)
"created_at": date(2022, 1, 27),
"data": {
"groups": [{
"friends": [user_2]
}]
}
}
)
content = json_serialization_writer.get_serialized_content()
Expand All @@ -266,4 +271,5 @@ def test_write_additional_data_value(user_1, user_2):
'"updated_at": "2022-01-27T12:59:45.596117+00:00", "is_active": true}, '\
'"approvers": [{"id": "8f841f30-e6e3-439a-a812-ebd369559c36", '\
'"updated_at": "2022-01-27T12:59:45.596117+00:00", "is_active": true}, '\
'{"display_name": "John Doe", "age": 32}], "created_at": "2022-01-27"}'
'{"display_name": "John Doe", "age": 32}], "created_at": "2022-01-27", '\
'"data": {"groups": [{"friends": [{"display_name": "John Doe", "age": 32}]}]}}'

0 comments on commit 9f91b3b

Please sign in to comment.