From 2ea37949bfdec94d1305f4b952ecee5e587818f2 Mon Sep 17 00:00:00 2001 From: KG Date: Tue, 4 Jun 2024 01:15:14 -0500 Subject: [PATCH] Improve `TokenizedEntry` formatters --- tivars/types/tokenized.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/tivars/types/tokenized.py b/tivars/types/tokenized.py index ee193c3..bae71f1 100644 --- a/tivars/types/tokenized.py +++ b/tivars/types/tokenized.py @@ -47,15 +47,18 @@ def __format__(self, format_spec: str) -> str: spec, lang = format_spec.split(".") - match spec: - case "": - return self.decode(self.data, lang=lang) + try: + match spec: + case "": + return self.decode(self.data, lang=lang) - case "t": - return self.decode(self.data, lang=lang, mode="accessible") + case "t": + return self.decode(self.data, lang=lang, mode="accessible") - case _: - return super().__format__(format_spec) + except KeyError: + pass + + return super().__format__(format_spec) @staticmethod def decode(data: bytes, *, lang: str = "en", mode: str = "display") -> str | bytes: @@ -364,18 +367,21 @@ class TIAsmProgram(TIProgram): is_tokenized = False def __format__(self, format_spec: str) -> str: - try: - match [*format_spec]: - case sep, *width if width: - return self.data.hex(sep, int(''.join(width))) + if match := re.fullmatch(r"(?P\D)?(?P\d+)?x", format_spec): + match match["sep"], match["width"]: + case None, None: + return self.data.hex() - case sep, *_: + case sep, None: return self.data.hex(sep) - case _: - return self.data.hex() + case None, width: + return self.data.hex(" ", int(width)) + + case sep, width: + return self.data.hex(sep, int(width)) - except TypeError: + else: return super().__format__(format_spec) def get_min_os(self, data: bytes = None) -> OsVersion: