From d3c73f541a8ae4d50f3189e2036a33817b02ee3e Mon Sep 17 00:00:00 2001 From: Moe Jangda Date: Wed, 6 Nov 2024 16:45:28 -0600 Subject: [PATCH] feat: port over initial python schema extraction PoC (#3332) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # Summary This PR ports python schema extraction PoC into this repo > [!WARNING] > Still mostly unusable but this lays in the vast majority of a foundation. # Changes - refactor `verb` decorator. switched to using a pattern that doesn't require dynamic dangling mutable properties on functions. Instead encapsulates decorated verb functions in a class and extracts type information useful for runtime grpc server and schema extraction - sets up initial stab at a directory structure. will attempt to create 1 package per schema feature. each package will contain: - encapsulating class - decorator - schema extraction - minor refactor of `VerbExtractor` to use `isinstance(func, Verb)` instead of `getattr(func, "_is_ftl_verb", False)`. - set up runnable `schema_extractor` (note: this will change in the next PR to a standalone executable. temporarily in a `cli` dir) - added TODO in python language plugin's `Build` method props to @worstell for doing the actual schema extraction work! will co-author the merge commit # Usage > [!NOTE] > From the `python-runtime/ftl` dir ``` ❯ uv run python -m ftl.cli.schema_extractor $(pwd)/../../examples/python/echo Error importing module decorator: attempted relative import with no known parent package /Users/moe/code/tbd/ose/ftl/python-runtime/ftl/src/ftl/extract/context.py:76: UserWarning: google.protobuf.service module is deprecated. RPC implementations should provide code generator plugins which generate code specific to the RPC implementation. service.py will be removed in Jan 2025 spec.loader.exec_module(module) Extracted Decl: pos { filename: "/path/to/ftl/python-runtime/ftl/../../examples/python/echo/echo.py" line: 17 } name: "echo" request { ref { name: "EchoRequest" module: "echo" } } response { ref { name: "EchoResponse" module: "echo" } } Extracted Decl: pos { filename: "/path/to/ftl/python-runtime/ftl/../../examples/python/echo/echo.py" line: 7 } name: "EchoRequest" fields { name: "name" type { string { } } } Extracted Decl: pos { filename: "/path/to/ftl/python-runtime/ftl/../../examples/python/echo/echo.py" line: 12 } name: "EchoResponse" fields { name: "message" type { string { } } } ``` --------- Co-authored-by: github-actions[bot] Co-authored-by: Alec Thomas Co-authored by: worstell --- .github/workflows/workflow-roadmap.yml | 1 + backend/protos/__init__.py | 0 examples/python/echo/uv.lock | 20 ++++ python-runtime/compile/build.go | 3 + python-runtime/ftl/pyproject.toml | 14 +++ python-runtime/ftl/src/ftl/__init__.py | 2 +- python-runtime/ftl/src/ftl/cli/__init__.py | 0 .../ftl/src/ftl/cli/schema_extractor.py | 105 ++++++++++++++++++ .../ftl/src/ftl/decorators/__init__.py | 3 - python-runtime/ftl/src/ftl/decorators/verb.py | 30 ----- .../ftl/src/ftl/extract/__init__.py | 22 ++++ python-runtime/ftl/src/ftl/extract/common.py | 99 +++++++++++++++++ python-runtime/ftl/src/ftl/extract/context.py | 94 ++++++++++++++++ .../ftl/src/ftl/extract/transitive.py | 88 +++++++++++++++ python-runtime/ftl/src/ftl/verb/__init__.py | 5 + python-runtime/ftl/src/ftl/verb/decorator.py | 17 +++ python-runtime/ftl/src/ftl/verb/extractor.py | 45 ++++++++ python-runtime/ftl/src/ftl/verb/model.py | 31 ++++++ 18 files changed, 545 insertions(+), 34 deletions(-) create mode 100644 backend/protos/__init__.py create mode 100644 python-runtime/ftl/src/ftl/cli/__init__.py create mode 100644 python-runtime/ftl/src/ftl/cli/schema_extractor.py delete mode 100644 python-runtime/ftl/src/ftl/decorators/__init__.py delete mode 100644 python-runtime/ftl/src/ftl/decorators/verb.py create mode 100644 python-runtime/ftl/src/ftl/extract/__init__.py create mode 100644 python-runtime/ftl/src/ftl/extract/common.py create mode 100644 python-runtime/ftl/src/ftl/extract/context.py create mode 100644 python-runtime/ftl/src/ftl/extract/transitive.py create mode 100644 python-runtime/ftl/src/ftl/verb/__init__.py create mode 100644 python-runtime/ftl/src/ftl/verb/decorator.py create mode 100644 python-runtime/ftl/src/ftl/verb/extractor.py create mode 100644 python-runtime/ftl/src/ftl/verb/model.py diff --git a/.github/workflows/workflow-roadmap.yml b/.github/workflows/workflow-roadmap.yml index b80dd15f54..264c28da98 100644 --- a/.github/workflows/workflow-roadmap.yml +++ b/.github/workflows/workflow-roadmap.yml @@ -59,6 +59,7 @@ jobs: ["jvm"]=2439 ["security"]=2438 ["dx"]=2436 + ["python"]=3339 ) issue_number=${label_to_issue_map["${{ matrix.label }}"]} diff --git a/backend/protos/__init__.py b/backend/protos/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/python/echo/uv.lock b/examples/python/echo/uv.lock index f79a2c623a..89b80da344 100644 --- a/examples/python/echo/uv.lock +++ b/examples/python/echo/uv.lock @@ -16,3 +16,23 @@ requires-dist = [{ name = "ftl", directory = "../../../python-runtime/ftl" }] name = "ftl" version = "0.1.0" source = { directory = "../../../python-runtime/ftl" } +dependencies = [ + { name = "protobuf" }, +] + +[package.metadata] +requires-dist = [{ name = "protobuf", specifier = ">=5.28.3" }] + +[[package]] +name = "protobuf" +version = "5.28.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/6e/e69eb906fddcb38f8530a12f4b410699972ab7ced4e21524ece9d546ac27/protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b", size = 422479 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c5/05163fad52d7c43e124a545f1372d18266db36036377ad29de4271134a6a/protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24", size = 419624 }, + { url = "https://files.pythonhosted.org/packages/9c/4c/4563ebe001ff30dca9d7ed12e471fa098d9759712980cde1fd03a3a44fb7/protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868", size = 431464 }, + { url = "https://files.pythonhosted.org/packages/1c/f2/baf397f3dd1d3e4af7e3f5a0382b868d25ac068eefe1ebde05132333436c/protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687", size = 414743 }, + { url = "https://files.pythonhosted.org/packages/85/50/cd61a358ba1601f40e7d38bcfba22e053f40ef2c50d55b55926aecc8fec7/protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584", size = 316511 }, + { url = "https://files.pythonhosted.org/packages/5d/ae/3257b09328c0b4e59535e497b0c7537d4954038bdd53a2f0d2f49d15a7c4/protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135", size = 316624 }, + { url = "https://files.pythonhosted.org/packages/ad/c3/2377c159e28ea89a91cf1ca223f827ae8deccb2c9c401e5ca233cd73002f/protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed", size = 169511 }, +] diff --git a/python-runtime/compile/build.go b/python-runtime/compile/build.go index a6c65c23be..4b9b73ad5c 100644 --- a/python-runtime/compile/build.go +++ b/python-runtime/compile/build.go @@ -40,6 +40,9 @@ func Build(ctx context.Context, projectRootDir, stubsRoot string, config modulec buildDir := buildDir(config.Dir) + // TODO: call the python schema extractor. grab the output of le script. unmarshal into schema proto. unmarshal that into go type. return + // same with build errors + if err := internal.ScaffoldZip(buildTemplateFiles(), buildDir, mctx, scaffolder.Functions(scaffoldFuncs)); err != nil { return moduleSch, nil, fmt.Errorf("failed to scaffold build template: %w", err) } diff --git a/python-runtime/ftl/pyproject.toml b/python-runtime/ftl/pyproject.toml index a32edf62c0..378918a148 100644 --- a/python-runtime/ftl/pyproject.toml +++ b/python-runtime/ftl/pyproject.toml @@ -14,3 +14,17 @@ dependencies = [ [build-system] requires = ["hatchling"] build-backend = "hatchling.build" + + +[tool.ruff] +select = [ + "ANN001", # missing type annotation for function argument + "ANN002", # missing type annotation for *args + "ANN003", # missing type annotation for **kwargs + "ANN102", # missing type annotation for cls in classmethod + "ANN201", # missing return type annotation for public function + "ANN202", # missing return type annotation for private function + "ANN204", # missing return type annotation for special method + "ANN205", # missing return type annotation for staticmethod + "ANN206", # missing return type annotation for classmethod +] diff --git a/python-runtime/ftl/src/ftl/__init__.py b/python-runtime/ftl/src/ftl/__init__.py index 93ca480e42..beb896202a 100644 --- a/python-runtime/ftl/src/ftl/__init__.py +++ b/python-runtime/ftl/src/ftl/__init__.py @@ -1,3 +1,3 @@ -from .decorators import verb +from .verb import verb __all__ = ["verb"] diff --git a/python-runtime/ftl/src/ftl/cli/__init__.py b/python-runtime/ftl/src/ftl/cli/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python-runtime/ftl/src/ftl/cli/schema_extractor.py b/python-runtime/ftl/src/ftl/cli/schema_extractor.py new file mode 100644 index 0000000000..4d61fe2dcb --- /dev/null +++ b/python-runtime/ftl/src/ftl/cli/schema_extractor.py @@ -0,0 +1,105 @@ +import argparse +import ast +import concurrent.futures +import os +import sys +from contextlib import contextmanager + +from ftl.extract import ( + GlobalExtractionContext, + TransitiveExtractor, +) +from ftl.verb import ( + VerbExtractor, +) + +# analyzers is now a list of lists, where each sublist contains analyzers that can run in parallel +analyzers = [ + [VerbExtractor], + [TransitiveExtractor], +] + + +@contextmanager +def set_analysis_mode(path): + original_sys_path = sys.path.copy() + sys.path.append(path) + try: + yield + finally: + sys.path = original_sys_path + + +def analyze_directory(module_dir): + """Analyze all Python files in the given module_dir in parallel.""" + global_ctx = GlobalExtractionContext() + + file_paths = [] + for dirpath, _, filenames in os.walk(module_dir): + for filename in filenames: + if filename.endswith(".py"): + file_paths.append(os.path.join(dirpath, filename)) + + for analyzer_batch in analyzers: + with concurrent.futures.ProcessPoolExecutor() as executor: + future_to_file = { + executor.submit( + analyze_file, global_ctx, file_path, analyzer_batch + ): file_path + for file_path in file_paths + } + + for future in concurrent.futures.as_completed(future_to_file): + file_path = future_to_file[future] + try: + future.result() # raise any exception that occurred in the worker process + except Exception as exc: + print(f"failed to extract schema from {file_path}: {exc};") + # else: + # print(f"File {file_path} analyzed successfully.") + + for ref_key, decl in global_ctx.deserialize().items(): + print(f"Extracted Decl:\n{decl}") + + +def analyze_file(global_ctx: GlobalExtractionContext, file_path, analyzer_batch): + """Analyze a single Python file using multiple analyzers in parallel.""" + module_name = os.path.splitext(os.path.basename(file_path))[0] + file_ast = ast.parse(open(file_path).read()) + local_ctx = global_ctx.init_local_context() + + with concurrent.futures.ThreadPoolExecutor() as executor: + futures = [ + executor.submit( + run_analyzer, + analyzer_class, + local_ctx, + module_name, + file_path, + file_ast, + ) + for analyzer_class in analyzer_batch + ] + + for future in concurrent.futures.as_completed(futures): + try: + future.result() + except Exception as exc: + print(f"Analyzer generated an exception: {exc} in {file_path}") + + +def run_analyzer(analyzer_class, context, module_name, file_path, file_ast): + analyzer = analyzer_class(context, module_name, file_path) + analyzer.visit(file_ast) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "module_dir", type=str, help="The Python module directory to analyze." + ) + args = parser.parse_args() + + dir = args.module_dir + with set_analysis_mode(dir): + analyze_directory(dir) diff --git a/python-runtime/ftl/src/ftl/decorators/__init__.py b/python-runtime/ftl/src/ftl/decorators/__init__.py deleted file mode 100644 index beb896202a..0000000000 --- a/python-runtime/ftl/src/ftl/decorators/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .verb import verb - -__all__ = ["verb"] diff --git a/python-runtime/ftl/src/ftl/decorators/verb.py b/python-runtime/ftl/src/ftl/decorators/verb.py deleted file mode 100644 index 079ef19b38..0000000000 --- a/python-runtime/ftl/src/ftl/decorators/verb.py +++ /dev/null @@ -1,30 +0,0 @@ -import functools -from typing import Any, Callable, Optional, TypeVar, Union - -F = TypeVar("F", bound=Callable[..., Any]) - - -def verb(func: Optional[F] = None) -> Union[F, Callable[[F], F]]: - func._is_ftl_verb = True - - def actual_decorator(fn: F) -> F: - # type_hints = get_type_hints(fn) - # sig = inspect.signature(fn) - # first_param = next(iter(sig.parameters)) - - # self._verb_registry[fn.__name__] = { - # "func": fn, - # "input_type": type_hints[first_param], - # "output_type": type_hints["return"], - # } - - @functools.wraps(fn) - def wrapper(*args, **kwargs): - return fn(*args, **kwargs) - - return wrapper - - if func is not None: - return actual_decorator(func) - - return actual_decorator diff --git a/python-runtime/ftl/src/ftl/extract/__init__.py b/python-runtime/ftl/src/ftl/extract/__init__.py new file mode 100644 index 0000000000..3c9282b199 --- /dev/null +++ b/python-runtime/ftl/src/ftl/extract/__init__.py @@ -0,0 +1,22 @@ +from .common import ( + extract_basic_type, + extract_class_type, + extract_function_type, + extract_map, + extract_slice, + extract_type, +) +from .context import GlobalExtractionContext, LocalExtractionContext +from .transitive import TransitiveExtractor + +__all__ = [ + "extract_type", + "extract_slice", + "extract_map", + "extract_basic_type", + "extract_class_type", + "extract_function_type", + "LocalExtractionContext", + "GlobalExtractionContext", + "TransitiveExtractor", +] diff --git a/python-runtime/ftl/src/ftl/extract/common.py b/python-runtime/ftl/src/ftl/extract/common.py new file mode 100644 index 0000000000..3f5d7b0463 --- /dev/null +++ b/python-runtime/ftl/src/ftl/extract/common.py @@ -0,0 +1,99 @@ +from typing import Any, Dict, List, Optional, Type + +from ftl.protos.xyz.block.ftl.v1.schema import schema_pb2 as schemapb + +from .context import LocalExtractionContext + + +def extract_type( + local_ctx: LocalExtractionContext, type_hint: Type[Any] +) -> Optional[schemapb.Type]: + """Extracts type information from Python type hints and maps it to schema types.""" + if isinstance(type_hint, list): + return extract_slice(local_ctx, type_hint) + + elif isinstance(type_hint, dict): + return extract_map(local_ctx, type_hint) + + elif type_hint is Any: + return schemapb.Type(any=schemapb.Any()) + + elif isinstance(type_hint, type): + if ( + type_hint is str + or type_hint is int + or type_hint is bool + or type_hint is float + ): + return extract_basic_type(type_hint) + + if hasattr(type_hint, "__bases__"): + return extract_class_type(local_ctx, type_hint) + + if callable(type_hint): + return extract_function_type(local_ctx, type_hint) + + # Handle parametric types (e.g., List[int], Dict[str, int]) - Optional, uncomment if needed + # elif hasattr(type_hint, "__origin__"): + # return extract_parametric_type(local_ctx, type_hint) + + # TODO: raise exception for unsupported types + return None + + +def extract_slice( + local_ctx: LocalExtractionContext, type_hint: List[Any] +) -> Optional[schemapb.Type]: + if isinstance(type_hint, list) and type_hint: + element_type = extract_type(local_ctx, type_hint[0]) # Assuming non-empty list + if element_type: + return schemapb.Type(array=schemapb.Array(element=element_type)) + return None + + +def extract_map( + local_ctx: LocalExtractionContext, type_hint: Dict[Any, Any] +) -> Optional[schemapb.Type]: + if isinstance(type_hint, dict): + key_type = extract_type(local_ctx, list(type_hint.keys())[0]) + value_type = extract_type(local_ctx, list(type_hint.values())[0]) + if key_type and value_type: + return schemapb.Type(map=schemapb.Map(key=key_type, value=value_type)) + return None + + +def extract_basic_type(type_hint: Type[Any]) -> Optional[schemapb.Type]: + type_map = { + str: schemapb.Type(string=schemapb.String()), + int: schemapb.Type(int=schemapb.Int()), + bool: schemapb.Type(bool=schemapb.Bool()), + float: schemapb.Type(float=schemapb.Float()), + } + return type_map.get(type_hint, None) + + +# Uncomment and implement parametric types if needed +# def extract_parametric_type(local_ctx: LocalExtractionContext, type_hint: Type[Any]) -> Optional[schemapb.Type]: +# if hasattr(type_hint, "__args__"): +# base_type = extract_type(local_ctx, type_hint.__origin__) +# param_types = [extract_type(local_ctx, arg) for arg in type_hint.__args__] +# if isinstance(base_type, schemapb.Ref): +# base_type.type_parameters.extend(param_types) +# return base_type +# return None + + +def extract_class_type( + local_ctx: LocalExtractionContext, type_hint: Type[Any] +) -> Optional[schemapb.Type]: + ref = schemapb.Ref(name=type_hint.__name__, module=type_hint.__module__) + local_ctx.add_needs_extraction(ref) + return schemapb.Type(ref=ref) + + +def extract_function_type( + local_ctx: LocalExtractionContext, type_hint: Type[Any] +) -> Optional[schemapb.Type]: + ref = schemapb.Ref(name=type_hint.__name__, module=type_hint.__module__) + local_ctx.add_needs_extraction(ref) + return schemapb.Type(ref=ref) diff --git a/python-runtime/ftl/src/ftl/extract/context.py b/python-runtime/ftl/src/ftl/extract/context.py new file mode 100644 index 0000000000..0be1d62efb --- /dev/null +++ b/python-runtime/ftl/src/ftl/extract/context.py @@ -0,0 +1,94 @@ +import importlib.util +import multiprocessing +import threading + +from ftl.protos.xyz.block.ftl.v1.schema import schema_pb2 as schemapb + + +class LocalExtractionContext: + """Local context for a single Python file.""" + + def __init__(self, needs_extraction, verbs, data): + self.verbs = verbs + self.data = data + self.needs_extraction = needs_extraction + self.module_cache = {} + self.cache_lock = threading.Lock() + + def add_verb(self, module_name, verb): + """Add a verb to the shared verbs map.""" + ref_key = RefKey(module=module_name, name=verb.name) + self.verbs[ref_key] = verb.SerializeToString() + + def add_data(self, module_name, data): + """Add a verb to the shared verbs map.""" + ref_key = RefKey(module=module_name, name=data.name) + self.data[ref_key] = data.SerializeToString() + + def add_needs_extraction(self, ref: schemapb.Ref): + ref_key = RefKey(module=ref.module, name=ref.name) + # Only add the key if it doesn't exist in the dictionary, not if it's False + if ref_key not in self.needs_extraction: + self.needs_extraction[ref_key] = True + + def remove_needs_extraction(self, module, name): + ref_key = RefKey(module=module, name=name) + self.needs_extraction[ref_key] = False + + def must_extract(self, module, name): + ref_key = RefKey(module=module, name=name) + return ref_key in self.needs_extraction + + def load_python_module(self, module_name, file_path): + """Load a Python module dynamically and cache it locally.""" + with self.cache_lock: + if file_path in self.module_cache: + return self.module_cache[file_path] + + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + self.module_cache[file_path] = module + return module + + +class GlobalExtractionContext: + """Global context across multiple files in a package.""" + + def __init__(self): + manager = multiprocessing.Manager() + self.needs_extraction = manager.dict() + self.verbs = manager.dict() + self.data = manager.dict() + + def deserialize(self): + deserialized_decls = {} + for ref_key, serialized_decl in self.verbs.items(): + decl = schemapb.Verb() + decl.ParseFromString(serialized_decl) + deserialized_decls[ref_key] = decl + for ref_key, serialized_decl in self.data.items(): + decl = schemapb.Data() + decl.ParseFromString(serialized_decl) + deserialized_decls[ref_key] = decl + return deserialized_decls + + def init_local_context(self) -> LocalExtractionContext: + return LocalExtractionContext(self.needs_extraction, self.verbs, self.data) + + +class RefKey: + def __init__(self, module, name): + self.module = module + self.name = name + + def __eq__(self, other): + if isinstance(other, RefKey): + return self.module == other.module and self.name == other.name + return False + + def __hash__(self): + return hash((self.module, self.name)) + + def __repr__(self): + return f"RefKey(module={self.module}, name={self.name})" diff --git a/python-runtime/ftl/src/ftl/extract/transitive.py b/python-runtime/ftl/src/ftl/extract/transitive.py new file mode 100644 index 0000000000..cd17ee84da --- /dev/null +++ b/python-runtime/ftl/src/ftl/extract/transitive.py @@ -0,0 +1,88 @@ +import ast +from typing import Any, Optional, Type + +from ftl.protos.xyz.block.ftl.v1.schema import schema_pb2 as schemapb + +from .common import extract_type +from .context import LocalExtractionContext + + +class TransitiveExtractor(ast.NodeVisitor): + def __init__(self, context: LocalExtractionContext, module_name, file_path): + self.context = context + self.module_name = module_name + self.file_path = file_path + + def load_function(self, func_name): + try: + module = self.context.load_python_module(self.module_name, self.file_path) + func = getattr(module, func_name, None) + if func is None: + print(f"Function {func_name} not found in {self.module_name}") + return None + return func + except ImportError as e: + print(f"Error importing module {self.module_name}: {e}") + return None + + @staticmethod + def convert_ast_annotation_to_type_hint( + annotation_node: ast.AST, + ) -> Optional[Type[Any]]: + """Converts an AST annotation node to a Python type hint.""" + if isinstance(annotation_node, ast.Name): + # Handle built-in types like int, str, etc. + type_name = annotation_node.id + try: + return eval(type_name) # Convert to actual type like 'int', 'str', etc. + except NameError: + return None + # Handle other cases like ast.Subscript, etc. (extend this for complex types) + return None + + def visit_ClassDef(self, node): + if self.context.must_extract(self.module_name, node.name): + lineno = node.lineno + col_offset = node.col_offset + export = False + for decorator in node.decorator_list: + if isinstance(decorator, ast.Name) and decorator.id == "export": + export = True + + # Extract fields and their types + fields = [] + for class_node in node.body: + if isinstance( + class_node, ast.AnnAssign + ): # Annotated assignment (field) + field_name = ( + class_node.target.id + if isinstance(class_node.target, ast.Name) + else None + ) + if field_name and class_node.annotation: + type_hint = self.convert_ast_annotation_to_type_hint( + class_node.annotation + ) + if type_hint: + field_type = extract_type(self.context, type_hint) + if field_type: + field_schema = schemapb.Field( + name=field_name, type=field_type + ) + fields.append(field_schema) + # TODO: else: + # surface error; require type hint for everything + + data = schemapb.Data( + pos=schemapb.Position( + filename=self.file_path, line=lineno, column=col_offset + ), + name=node.name, + fields=fields, + export=export, + ) + + # Add to context or perform further processing + self.context.add_data(self.module_name, data) + self.context.remove_needs_extraction(self.module_name, data.name) diff --git a/python-runtime/ftl/src/ftl/verb/__init__.py b/python-runtime/ftl/src/ftl/verb/__init__.py new file mode 100644 index 0000000000..403dec5838 --- /dev/null +++ b/python-runtime/ftl/src/ftl/verb/__init__.py @@ -0,0 +1,5 @@ +from .decorator import verb +from .extractor import VerbExtractor +from .model import Verb + +__all__ = ["verb", "Verb", "VerbExtractor"] diff --git a/python-runtime/ftl/src/ftl/verb/decorator.py b/python-runtime/ftl/src/ftl/verb/decorator.py new file mode 100644 index 0000000000..0245f7c2d7 --- /dev/null +++ b/python-runtime/ftl/src/ftl/verb/decorator.py @@ -0,0 +1,17 @@ +import functools +from typing import Any, Callable, Optional, TypeVar, Union + +from .model import Verb + +F = TypeVar("F", bound=Callable[..., Any]) + + +def verb( + func: Optional[F] = None, *, export: bool = False +) -> Union[F, Callable[[F], F]]: + def actual_decorator(fn: F) -> F: + return functools.update_wrapper(Verb(fn, export=export), fn) + + if func is not None: + return actual_decorator(func) + return actual_decorator diff --git a/python-runtime/ftl/src/ftl/verb/extractor.py b/python-runtime/ftl/src/ftl/verb/extractor.py new file mode 100644 index 0000000000..157f775ef5 --- /dev/null +++ b/python-runtime/ftl/src/ftl/verb/extractor.py @@ -0,0 +1,45 @@ +import ast +from typing import Optional + +from ftl.extract import LocalExtractionContext, extract_type +from ftl.protos.xyz.block.ftl.v1.schema import schema_pb2 as schemapb + +from .model import Verb + + +class VerbExtractor(ast.NodeVisitor): + def __init__( + self, context: LocalExtractionContext, module_name: str, file_path: str + ): + self.context = context + self.module_name = module_name + self.file_path = file_path + + def load_function(self, func_name: str) -> Optional[Verb]: + """Load a function from the module and return it if it exists.""" + try: + module = self.context.load_python_module(self.module_name, self.file_path) + return getattr(module, func_name, None) + except ImportError as e: + print(f"Error importing module {self.module_name}: {e}") + return None + + def visit_FunctionDef(self, node: ast.FunctionDef) -> None: + """Visit a function definition and extract schema if it's a verb.""" + func = self.load_function(node.name) + if func is None or not isinstance(func, Verb): + return + + try: + verb = schemapb.Verb( + pos=schemapb.Position( + filename=self.file_path, line=node.lineno, column=node.col_offset + ), + name=node.name, + request=extract_type(self.context, func.get_input_type()), + response=extract_type(self.context, func.get_output_type()), + export=func.export, + ) + self.context.add_verb(self.module_name, verb) + except Exception as e: + print(f"Error extracting Verb: {e}") diff --git a/python-runtime/ftl/src/ftl/verb/model.py b/python-runtime/ftl/src/ftl/verb/model.py new file mode 100644 index 0000000000..40f17bf2bf --- /dev/null +++ b/python-runtime/ftl/src/ftl/verb/model.py @@ -0,0 +1,31 @@ +import functools +import inspect +from typing import Any, Callable, Type, TypeVar, get_type_hints + +F = TypeVar("F", bound=Callable[..., Any]) + + +class Verb: + def __init__(self, func: F, *, export: bool = False) -> None: + self.func = func + self.export = export + + self._type_hints = get_type_hints(func) + self._signature = inspect.signature(func) + self._first_param = next(iter(self._signature.parameters)) + + def get_input_type(self) -> Type: + """Get the input type (first parameter type) of the verb.""" + return self._type_hints[self._first_param] + + def get_output_type(self) -> Type: + """Get the output type (return type) of the verb.""" + return self._type_hints["return"] + + def __call__(self, *args, **kwargs): + return self.func(*args, **kwargs) + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return functools.partial(self.__call__, obj)