Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
a3cf34a
Staging branch for LLM module
eb8680 Oct 9, 2025
b57a18d
Move LLM interface code from `robotl` (#358)
jfeser Oct 9, 2025
8170a64
Implement basic tool calling (#366)
jfeser Oct 10, 2025
bcbf7bb
Merge branch 'master' into staging-llm
eb8680 Oct 10, 2025
66a5eb4
Merge branch 'master' into staging-llm
jfeser Oct 20, 2025
ab9e2fe
enable strict mode for tool calling (#375)
jfeser Oct 20, 2025
661cab8
add structured generation and remove unused `decode` operation (#376)
jfeser Oct 20, 2025
02c4378
implemented support for class methods in `Template.define` (#377)
kiranandcode Oct 24, 2025
d9d1782
Revert "implemented support for class methods in `Template.define` (#…
kiranandcode Oct 24, 2025
1053fdd
Add support for methods in `Template.define` (#377) (#378)
kiranandcode Oct 26, 2025
54efb77
Adding a lower-level event and a logger example (#382)
datvo06 Oct 28, 2025
657924e
Add support for tools returning images (#385)
kiranandcode Oct 29, 2025
68af295
Implement Caching Handler for LLM (#392)
datvo06 Nov 12, 2025
b9207b4
implement first to k-ahead sampler (#412)
kiranandcode Nov 24, 2025
41b52b4
Add inheritable class for stateful templates (#416)
jfeser Nov 26, 2025
248ff6e
Support multiple providers (via `litellm`) (#418)
kiranandcode Dec 1, 2025
e4c0d99
store source of generated functions in `__src__` attribute (#403)
kiranandcode Dec 2, 2025
5cb8e89
Adds type-based encoding and support for legacy APIs (#411)
kiranandcode Dec 2, 2025
1f50599
Add LLM Integration tests to the workflows. (#420)
kiranandcode Dec 3, 2025
8118a8f
Merge master into llm-staging (#423)
jfeser Dec 4, 2025
62e45a4
Fix `staging-llm` diff against `master` (#426)
eb8680 Dec 5, 2025
1c37637
Implement a RetryHandler for LLM module (#428)
datvo06 Dec 9, 2025
bb5bded
Merge `master` into `staging-llm` again (#443)
eb8680 Dec 12, 2025
44d7d12
Implements a unified `encode`ing/`decode`ing pipeline for `llm` (#442)
kiranandcode Dec 15, 2025
931d507
Initial version of Lexical Context Collection - Collecting Tools and …
datvo06 Dec 15, 2025
8530fd0
Update `staging-llm` from `master` (#457)
eb8680 Dec 22, 2025
bae8d02
Convert `Template` into an operation (#424)
jfeser Dec 29, 2025
3311d1b
Fail when encoding terms or operations (#474)
jfeser Dec 29, 2025
23f95ef
Implemented record and replay fixtures for LLM calls (#467)
kiranandcode Dec 31, 2025
2094f22
Remove program synthesis code (#475)
jfeser Dec 31, 2025
05b28ef
Disables direct recursion on templates by default (#466)
kiranandcode Dec 31, 2025
d91d4c9
drop k-ahead sampler (#479)
jfeser Dec 31, 2025
e3e8c7e
Document `Template` and `Tool` (#478)
jfeser Jan 1, 2026
7ffe7f8
Encodable Type
datvo06 Jan 5, 2026
ef03c72
Merge
datvo06 Feb 26, 2026
9a1a370
Clean up
datvo06 Feb 26, 2026
cedf128
More cleanup
datvo06 Feb 26, 2026
c754b3e
Update mypy type check to support type check for type
datvo06 Feb 26, 2026
fe7e41c
Lint
datvo06 Feb 26, 2026
c940b7f
Fix tests
datvo06 Feb 26, 2026
db93895
Lint
datvo06 Feb 26, 2026
ec17e45
Minor fix for python 3.13
datvo06 Feb 26, 2026
fc36f33
Merge branch 'master' into encodable_type
datvo06 Mar 4, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 127 additions & 0 deletions effectful/handlers/llm/encoding.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import ast
import base64
import collections
import functools
import inspect
import io
import sys
import textwrap
import types
import typing
Expand Down Expand Up @@ -630,6 +632,123 @@ def deserialize(self, serialized_value: str) -> SynthesizedFunction:
return SynthesizedFunction.model_validate_json(serialized_value)


class SynthesizedType(pydantic.BaseModel):
"""Structured output for type/class synthesis.

Pydantic model representing synthesized class code with type name and module code.
"""

type_name: str = pydantic.Field(
...,
description="The name of the class that satisfies the specification",
)
module_code: str = pydantic.Field(
...,
description="Complete Python module code with the class definition (no imports needed)",
)


@dataclass
class TypeEncodable(Encodable[type, SynthesizedType]):
base: type[type]
enc: type[SynthesizedType]
ctx: Mapping[str, Any]

_decode_counter: typing.ClassVar[int] = 0

def encode(self, value: type) -> SynthesizedType:
type_name = value.__name__
try:
source = inspect.getsource(value)
except (OSError, TypeError):
source = f"class {type_name}: pass # Source unavailable"

return SynthesizedType(
type_name=type_name, module_code=textwrap.dedent(source).strip()
)

def decode(self, encoded_value: SynthesizedType) -> type:
"""Decode a SynthesizedType to a type.

Executes the module code and returns the named class.
"""
type_name = encoded_value.type_name
module_code = textwrap.dedent(encoded_value.module_code).strip() + "\n"

TypeEncodable._decode_counter += 1
module_name = (
f"_llm_effectful_synthesized_types.{type_name}"
f".{TypeEncodable._decode_counter}"
)
filename = f"<synthesized_type:{module_name}>"

# Create a real module and put it to sys.modules
mod = types.ModuleType(module_name)
mod.__file__ = filename
sys.modules[module_name] = mod

# globals = module.__dict__ + context
g = mod.__dict__
g.update({"collections": collections})
if self.ctx:
g.update(self.ctx)
g.update({"__name__": module_name, "__file__": filename})
g.setdefault("__package__", module_name.rpartition(".")[0])

try:
# Parse via evaluation effect (also registers source in linecache)
tree = evaluation.parse(module_code, filename)

# Type-check the synthesized module
evaluation.type_check(tree, self.ctx, None, type)

# Compile and execute via evaluation effects
code_obj = evaluation.compile(tree, filename)
evaluation.exec(code_obj, g)
except SyntaxError as exc:
raise ValueError(f"Syntax error in generated code: {exc}") from exc

if type_name not in g:
raise ValueError(
f"Type '{type_name}' not found after execution. "
f"Available names: {[k for k in g.keys() if not k.startswith('_')]}"
)

synthesized_type = g[type_name]

if not isinstance(synthesized_type, type):
raise ValueError(
f"'{type_name}' is not a type, got {type(synthesized_type).__name__}"
)

# Attach source code and module name
synthesized_type.__source__ = module_code # type: ignore[attr-defined]
synthesized_type.__synthesized__ = encoded_value # type: ignore[attr-defined]
synthesized_type.__module__ = module_name

# Set __firstlineno__ for Python 3.13+ (inspect.getsource requires it).
# Must be set AFTER __module__ since __module__ assignment can clear it.
firstlineno = next(
(
n.lineno
for n in ast.walk(ast.parse(module_code))
if isinstance(n, ast.ClassDef) and n.name == type_name
),
1,
)
synthesized_type.__firstlineno__ = firstlineno # type: ignore[attr-defined]

return synthesized_type

def serialize(
self, encoded_value: SynthesizedType
) -> Sequence[OpenAIMessageContentListBlock]:
return [{"type": "text", "text": encoded_value.model_dump_json()}]

def deserialize(self, serialized_value: str) -> SynthesizedType:
return SynthesizedType.model_validate_json(serialized_value)


def _param_model(sig: inspect.Signature) -> type[pydantic.BaseModel]:
return pydantic.create_model(
"Params",
Expand Down Expand Up @@ -1048,6 +1167,14 @@ def _encodable_callable(
return CallableEncodable(ty, typed_enc, ctx, expected_params, expected_return)


@Encodable.define.register(type)
def _encodable_type(
ty: type, ctx: Mapping[str, Any] | None
) -> Encodable[type, SynthesizedType]:
ctx = ctx or {}
return TypeEncodable(ty, SynthesizedType, ctx)


@Encodable.define.register(Tool)
def _encodable_tool[**P, T](
ty: type[Tool[P, T]], ctx: Mapping[str, Any] | None
Expand Down
48 changes: 27 additions & 21 deletions effectful/handlers/llm/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,12 +564,11 @@ def mypy_type_check(
if not module.body:
raise TypeError("mypy_type_check: module.body is empty")
last = module.body[-1]
if not isinstance(last, ast.FunctionDef):
if not isinstance(last, ast.FunctionDef | ast.ClassDef):
raise TypeError(
f"mypy_type_check: last statement must be a function definition, "
f"mypy_type_check: last statement must be a function or class definition, "
f"got {type(last).__name__}"
)
func_name = last.name

imports = collect_imports(ctx)
# Ensure annotations in the postlude can be resolved (e.g. collections.abc.Callable, typing)
Expand Down Expand Up @@ -614,33 +613,40 @@ def mypy_type_check(
stub_module_body = ast.Module(body=module_body, type_ignores=[])
_RenameTransformer(rename_map).visit(stub_module_body)
module_body = stub_module_body.body
tc_func_name = rename_map.get(func_name, func_name)
else:
module_body = list(module.body)
tc_func_name = func_name

param_types = expected_params
expected_callable_type: type = typing.cast(
type,
collections.abc.Callable[param_types, expected_return]
if expected_params is not None
else collections.abc.Callable[..., expected_return],
)

expected_callable_ast = type_to_ast(expected_callable_type)
postlude = ast.AnnAssign(
target=ast.Name(id="_synthesized_check", ctx=ast.Store()),
annotation=expected_callable_ast,
value=ast.Name(id=tc_func_name, ctx=ast.Load()),
simple=1,
)
postlude: list[ast.stmt] = []
if isinstance(last, ast.FunctionDef):
func_name = last.name
tc_func_name = (
rename_map.get(func_name, func_name) if colliding_names else func_name
)
param_types = expected_params
expected_callable_type: type = typing.cast(
type,
collections.abc.Callable[param_types, expected_return]
if expected_params is not None
else collections.abc.Callable[..., expected_return],
)
expected_callable_ast = type_to_ast(expected_callable_type)
postlude = [
ast.AnnAssign(
target=ast.Name(id="_synthesized_check", ctx=ast.Store()),
annotation=expected_callable_ast,
value=ast.Name(id=tc_func_name, ctx=ast.Load()),
simple=1,
)
]
# For ClassDef: no postlude needed, mypy checks the class body directly.

full_body = (
baseline_imports
+ list(imports)
+ list(stubs)
+ list(variables)
+ module_body
+ [postlude]
+ postlude
)
stub_module = ast.Module(body=full_body, type_ignores=[])
source = ast.unparse(ast.fix_missing_locations(stub_module))
Expand Down
Loading
Loading