forked from nadineloepfe/hedera_sdk_python
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
d2f2ce3
commit f658537
Showing
5 changed files
with
197 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
from hedera_sdk_python.hapi.services import basic_types_pb2 | ||
|
||
class TokenType: | ||
""" | ||
Represents the type of a Hedera token. | ||
This class mirrors the enum defined in Hedera's protobufs: | ||
0: FUNGIBLE_COMMON | ||
1: NON_FUNGIBLE_UNIQUE | ||
""" | ||
FUNGIBLE_COMMON = 0 | ||
NON_FUNGIBLE_UNIQUE = 1 | ||
|
||
def __init__(self, token_type=FUNGIBLE_COMMON): | ||
""" | ||
Initializes a TokenType instance with: | ||
TokenType.FUNGIBLE_COMMON or TokenType.NON_FUNGIBLE_UNIQUE. | ||
""" | ||
if token_type not in (self.FUNGIBLE_COMMON, self.NON_FUNGIBLE_UNIQUE): | ||
raise ValueError(f"Invalid TokenType value: {token_type}") | ||
self._token_type = token_type | ||
|
||
@classmethod | ||
def from_proto(cls, token_type_proto): | ||
""" | ||
Creates a TokenType instance from the integer value in the protobuf enum. | ||
Args: | ||
token_type_proto (int): The integer value corresponding to the TokenType. | ||
Returns: | ||
TokenType: A TokenType instance matching the protobuf value. | ||
""" | ||
if token_type_proto == basic_types_pb2.TokenType.FUNGIBLE_COMMON: | ||
return cls(cls.FUNGIBLE_COMMON) | ||
elif token_type_proto == basic_types_pb2.TokenType.NON_FUNGIBLE_UNIQUE: | ||
return cls(cls.NON_FUNGIBLE_UNIQUE) | ||
else: | ||
raise ValueError(f"Unknown TokenType proto value: {token_type_proto}") | ||
|
||
def to_proto(self): | ||
""" | ||
Converts this TokenType instance into its protobuf enum integer. | ||
Returns: | ||
int: The integer value corresponding to the Hedera protobuf enum. | ||
""" | ||
if self._token_type == self.FUNGIBLE_COMMON: | ||
return basic_types_pb2.TokenType.FUNGIBLE_COMMON | ||
else: | ||
return basic_types_pb2.TokenType.NON_FUNGIBLE_UNIQUE | ||
|
||
|
||
@classmethod | ||
def from_string(cls, token_type_str): | ||
""" | ||
Parses a string like "FUNGIBLE_COMMON" or "NON_FUNGIBLE_UNIQUE" | ||
and returns a TokenType instance. | ||
Args: | ||
token_type_str (str): A string describing the token type. | ||
Returns: | ||
TokenType: A TokenType instance. | ||
Raises: | ||
ValueError: If the provided string doesn't match known token types. | ||
""" | ||
token_type_str = token_type_str.strip().upper() | ||
if token_type_str == "FUNGIBLE_COMMON": | ||
return cls(cls.FUNGIBLE_COMMON) | ||
elif token_type_str == "NON_FUNGIBLE_UNIQUE": | ||
return cls(cls.NON_FUNGIBLE_UNIQUE) | ||
else: | ||
raise ValueError(f"Invalid token type string: {token_type_str}") | ||
|
||
def __str__(self): | ||
""" | ||
Returns a human-readable string for this TokenType. For example: | ||
'FUNGIBLE_COMMON' or 'NON_FUNGIBLE_UNIQUE'. | ||
""" | ||
if self._token_type == self.FUNGIBLE_COMMON: | ||
return "FUNGIBLE_COMMON" | ||
else: | ||
return "NON_FUNGIBLE_UNIQUE" | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
import pytest | ||
from hedera_sdk_python.hapi.services import basic_types_pb2 | ||
from hedera_sdk_python.tokens.token_type import TokenType | ||
|
||
def test_default_init(): | ||
""" | ||
Test the default, a TokenType with FUNGIBLE_COMMON. | ||
""" | ||
token_type = TokenType() | ||
assert token_type._token_type == TokenType.FUNGIBLE_COMMON | ||
|
||
def test_init_fungible(): | ||
""" | ||
Test initializing a TokenType as FUNGIBLE_COMMON. | ||
""" | ||
token_type = TokenType(TokenType.FUNGIBLE_COMMON) | ||
assert token_type._token_type == TokenType.FUNGIBLE_COMMON | ||
|
||
def test_init_non_fungible(): | ||
""" | ||
Test initializing a TokenType as NON_FUNGIBLE_UNIQUE. | ||
""" | ||
token_type = TokenType(TokenType.NON_FUNGIBLE_UNIQUE) | ||
assert token_type._token_type == TokenType.NON_FUNGIBLE_UNIQUE | ||
|
||
def test_init_invalid(): | ||
""" | ||
Test that initializing a TokenType with an invalid value raises a ValueError. | ||
""" | ||
with pytest.raises(ValueError, match="Invalid TokenType value"): | ||
TokenType(2) # Not 0 or 1 | ||
|
||
def test_from_proto_fungible(): | ||
""" | ||
Test creating a TokenType instance from the protobuf enum (FUNGIBLE_COMMON). | ||
""" | ||
token_type = TokenType.from_proto(basic_types_pb2.TokenType.FUNGIBLE_COMMON) | ||
assert token_type._token_type == TokenType.FUNGIBLE_COMMON | ||
|
||
def test_from_proto_non_fungible(): | ||
""" | ||
Test creating a TokenType instance from the protobuf enum (NON_FUNGIBLE_UNIQUE). | ||
""" | ||
token_type = TokenType.from_proto(basic_types_pb2.TokenType.NON_FUNGIBLE_UNIQUE) | ||
assert token_type._token_type == TokenType.NON_FUNGIBLE_UNIQUE | ||
|
||
def test_from_proto_invalid(): | ||
""" | ||
Test that from_proto with an invalid protobuf value raises a ValueError. | ||
""" | ||
with pytest.raises(ValueError, match="Unknown TokenType proto value"): | ||
TokenType.from_proto(2) | ||
|
||
def test_to_proto_fungible(): | ||
""" | ||
Test converting a TokenType(FUNGIBLE_COMMON) to its protobuf enum integer. | ||
""" | ||
token_type = TokenType(TokenType.FUNGIBLE_COMMON) | ||
proto_value = token_type.to_proto() | ||
assert proto_value == basic_types_pb2.TokenType.FUNGIBLE_COMMON | ||
|
||
def test_to_proto_non_fungible(): | ||
""" | ||
Test converting a TokenType(NON_FUNGIBLE_UNIQUE) to its protobuf enum integer. | ||
""" | ||
token_type = TokenType(TokenType.NON_FUNGIBLE_UNIQUE) | ||
proto_value = token_type.to_proto() | ||
assert proto_value == basic_types_pb2.TokenType.NON_FUNGIBLE_UNIQUE | ||
|
||
def test_from_string_fungible(): | ||
""" | ||
Test parsing a string 'FUNGIBLE_COMMON' into a TokenType. | ||
""" | ||
token_type = TokenType.from_string("FUNGIBLE_COMMON") | ||
assert token_type._token_type == TokenType.FUNGIBLE_COMMON | ||
|
||
def test_from_string_non_fungible(): | ||
""" | ||
Test parsing a string 'NON_FUNGIBLE_UNIQUE' into a TokenType. | ||
""" | ||
token_type = TokenType.from_string("NON_FUNGIBLE_UNIQUE") | ||
assert token_type._token_type == TokenType.NON_FUNGIBLE_UNIQUE | ||
|
||
def test_from_string_invalid(): | ||
""" | ||
Test that from_string with an unknown string raises a ValueError. | ||
""" | ||
with pytest.raises(ValueError, match="Invalid token type string"): | ||
TokenType.from_string("unknown_type") | ||
|
||
def test_str_fungible(): | ||
""" | ||
Test string representation of a TokenType(FUNGIBLE_COMMON). | ||
""" | ||
token_type = TokenType(TokenType.FUNGIBLE_COMMON) | ||
assert str(token_type) == "FUNGIBLE_COMMON" | ||
|
||
def test_str_non_fungible(): | ||
""" | ||
Test string representation of a TokenType(NON_FUNGIBLE_UNIQUE). | ||
""" | ||
token_type = TokenType(TokenType.NON_FUNGIBLE_UNIQUE) | ||
assert str(token_type) == "NON_FUNGIBLE_UNIQUE" |