diff --git a/AGENTS.mk b/AGENTS.mk index 944cb70..58dca55 100644 --- a/AGENTS.mk +++ b/AGENTS.mk @@ -6,67 +6,38 @@ MAKEFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST))) MAKEFILE_DIR := $(realpath $(dir $(MAKEFILE_PATH))) -EXTRACT_INFO_CMAKE := ${MAKEFILE_DIR}/extract_info.cmake -IDEAS_MAKEFILE := $(MAKEFILE_DIR)/IDEAS.mk - -ANTHROPIC_AUTH_TOKEN ?= $(OPENROUTER_API_KEY) -ANTHROPIC_BASE_URL ?= https://openrouter.ai/api -ANTHROPIC_API_KEY ?= "" AGENT_PROVIDER ?= openrouter AGENT_MODEL ?= anthropic/claude-sonnet-4.6 -BASE_URL ?= "https://openrouter.ai/api/v1" -TRANSLATION_DIR ?= translation.$(shell git --git-dir=${MAKEFILE_DIR}/.git rev-parse HEAD) - -export EXTRACT_INFO_CMAKE - -TARGETS ?= $(shell [ -d build-ninja ] && find build-ninja -maxdepth 1 -type f -executable -exec basename {} \; | cut -d. -f1 | sed -e "s/^lib//gi") -ifeq (${TARGETS},) -ifeq ($(filter cmake clean,$(MAKECMDGOALS)),) -$(error No TARGETS found! You need to run cmake!) -endif -endif +AGENT_BASE_URL ?= "https://openrouter.ai/api/v1" # Docker configuration -DOCKER_DIR := ${MAKEFILE_DIR}/docker +DOCKER_HOSTDIR := ${MAKEFILE_DIR}/docker DOCKER_WORKDIR := /home/user/IDEAS -# Relative path to the current working directory -DOCKER_REL_CWD := $(patsubst $(MAKEFILE_DIR)/%,%,$(CURDIR)) DOCKER_RUN ?= docker run --rm \ --init \ -it \ -v $(MAKEFILE_DIR):$(DOCKER_WORKDIR) \ - -v $(DOCKER_DIR)/.venv:$(DOCKER_WORKDIR)/.venv \ + -w $(DOCKER_WORKDIR)/$(patsubst $(MAKEFILE_DIR)/%,%,$(CURDIR)) \ -e OPENROUTER_API_KEY \ -e TRANSLATION_DIR \ -e AGENT_PROVIDER \ -e AGENT_MODEL \ - -e BASE_URL \ + -e AGENT_BASE_URL \ -e RUSTFLAGS \ - -e VERBOSE \ ideas-$(shell id -u) ifdef DOCKER_RUN - # Touch directory for correct permissions when mounted - VENV_SETUP = mkdir -p $(DOCKER_DIR)/.venv - # Run inside Docker container with exit-on-error - RUN_CMD = $(DOCKER_RUN) /bin/sh -c 'set -e; cd $(DOCKER_WORKDIR)/$(DOCKER_REL_CWD); $(1)' + RUN_PREFIX = \ + mkdir -p $(DOCKER_HOSTDIR)/.venv && \ + $(DOCKER_RUN) \ + /bin/sh -c 'set -e; + RUN_SUFFIX = ' else - VENV_SETUP = @true - RUN_CMD = $(1) + RUN_PREFIX = + RUN_SUFFIX = endif -# cmake -.PHONY: cmake -cmake: build-ninja/cmake.log - -build-ninja/cmake.log: test_case/CMakeLists.txt ${EXTRACT_INFO_CMAKE} - uv run python -m ideas.cmake source_dir=test_case build_dir=build-ninja - @touch $@ - -build-ninja/CMakeCache.txt: build-ninja/cmake.log -build-ninja/compile_commands.json: build-ninja/cmake.log -build-ninja/build.log: build-ninja/cmake.log # test generation from project .PHONY: testgen @@ -74,8 +45,7 @@ testgen: test_crate/tests/test_assert.rs ; .PRECIOUS: test_crate/tests/test_assert.rs test_crate/tests/test_assert.rs: - $(VENV_SETUP) - $(call RUN_CMD,\ + $(RUN_PREFIX) \ uv run python -m ideas.agents.testgen model=$(if $(AGENT_PROVIDER),${AGENT_PROVIDER}/,)${AGENT_MODEL} \ c_code=test_case \ project_name=$(notdir $(CURDIR)) \ @@ -83,36 +53,33 @@ test_crate/tests/test_assert.rs: test_crate_out=test_crate \ hydra.output_subdir=.testgen \ hydra.job.name=testgen \ - hydra.run.dir=test_vectors; \ - ) + hydra.run.dir=test_vectors \ + $(RUN_SUFFIX) + # Agent is not guaranteed to write file + [ -f test_crate/tests/test_assert.rs ] || { echo "ERROR: Agent failed to generate test_crate/tests/test_assert.rs"; exit 1; } # library targets: generate tests from the consolidated lib.c .PRECIOUS: test_crates/%/tests/test_assert.rs test_crates/%/tests/test_assert.rs: ${TRANSLATION_DIR}/%/src/lib.c | build-ninja/lib%.so.type - # Copy lib.c into test_targets//src/ so - # build.rs can use ../../test_targets//src/lib.c + # Copy lib.c into test_crates//src/ so + # build.rs can use ../../test_crates//src/lib.c # both in Docker /tmp and on disk - mkdir -p test_targets/$*/src - cp ${TRANSLATION_DIR}/$*/src/lib.c test_targets/$*/src/lib.c - $(VENV_SETUP) - $(call RUN_CMD,\ + mkdir -p test_crates/$*/src + cp ${TRANSLATION_DIR}/$*/src/lib.c test_crates/$*/src/lib.c + $(RUN_PREFIX) \ uv run python -m ideas.agents.testgen model=$(if $(AGENT_PROVIDER),${AGENT_PROVIDER}/,)${AGENT_MODEL} \ - c_code=test_targets/$*/src/lib.c \ + c_code=test_crates/$*/src/lib.c \ project_name=$* \ test_vectors_out=test_vectors/$*/agent \ test_crate_out=test_crates/$* \ hydra.output_subdir=.testgen \ hydra.job.name=testgen \ - hydra.run.dir=test_vectors/$*; \ - ) + hydra.run.dir=test_vectors/$* \ + $(RUN_SUFFIX) + # Agent is not guaranteed to write file + [ -f test_crates/$*/tests/test_assert.rs ] || { echo "ERROR: Agent failed to generate test_crates/$*/tests/test_assert.rs"; exit 1; } # executable targets: do nothing test_crates/%/tests/test_assert.rs: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type - mkdir -p test_crates/$*/tests - touch test_crates/$*/tests/test_assert.rs - -# fallback -test_crates/%/tests/test_assert.rs: - mkdir -p test_crates/$*/tests - touch test_crates/$*/tests/test_assert.rs + $(error Agent cannot generate tests for binary targets yet!) diff --git a/IDEAS.mk b/IDEAS.mk index 18caaeb..84d6a8e 100644 --- a/IDEAS.mk +++ b/IDEAS.mk @@ -55,8 +55,10 @@ build-ninja/build.log: build-ninja/cmake.log # init .PHONY: init init: $(patsubst %,${TRANSLATION_DIR}/%/init,${TARGETS}) ; -${TRANSLATION_DIR}/%/init: ${TRANSLATION_DIR}/%/src/lib.c | build-ninja/lib%.so.type ; -${TRANSLATION_DIR}/%/init: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type ; +${TRANSLATION_DIR}/%/init: ${TRANSLATION_DIR}/%/src/lib.c | build-ninja/lib%.so.type + touch ${TRANSLATION_DIR}/$*/src/lib.c +${TRANSLATION_DIR}/%/init: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type + touch ${TRANSLATION_DIR}/$*/src/main.c # initialize workspace .PRECIOUS: ${TRANSLATION_DIR}/Cargo.toml @@ -103,7 +105,7 @@ ${TRANSLATION_DIR}/%/translate: ${TRANSLATION_DIR}/%/src/lib.rs | build-ninja/li ${TRANSLATION_DIR}/%/translate: ${TRANSLATION_DIR}/%/src/main.rs | build-ninja/%.type ; .PRECIOUS: ${TRANSLATION_DIR}/%/src/lib.rs -${TRANSLATION_DIR}/%/src/lib.rs: ${TRANSLATION_DIR}/%/src/lib.c ${TRANSLATION_DIR}/%/tests/test_cases.rs | ${TRANSLATION_DIR}/%/Cargo.toml +${TRANSLATION_DIR}/%/src/lib.rs: ${TRANSLATION_DIR}/%/src/lib.c | ${TRANSLATION_DIR}/%/Cargo.toml ${TRANSLATION_DIR}/%/tests/test_assert.rs -uv run python -m ideas.translate model.name=${PROVIDER}/${MODEL} \ filename=${TRANSLATION_DIR}/$*/src/lib.c \ cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ @@ -210,35 +212,24 @@ test_vectors/%/%.json: $(error $@ not found) -# testgen -.PHONY: testgen_argless -testgen_argless: $(patsubst %,test_vectors/%/testgen_argless,${TARGETS}) -test_vectors/%/testgen_argless: | build-ninja/lib%.so.type ; -test_vectors/%/testgen_argless: test_vectors/%/argless.json | build-ninja/%.type ; - -.PRECIOUS: test_vectors/%/argless.json -test_vectors/%/argless.json: | build-ninja/%.type - -uv run python -m ideas.testgen artifact=build-ninja/$* \ - test_vector=$@ \ - hydra.output_subdir=.testgen \ - hydra.job.name=testgen \ - hydra.run.dir=test_vectors/$* - # testgen for each C target .PHONY: testgen_target -testgen_target: $(patsubst %,${TRANSLATION_DIR}/%/tests/test_assert.rs,${TARGETS}) ; +testgen_target: $(patsubst %,test_crates/%/tests/test_assert.rs,${TARGETS}) ; -${TRANSLATION_DIR}/%/tests/test_assert.rs: test_crates/%/tests/test_assert.rs build-ninja/lib%.so.type - mkdir -p $(dir $@) - cp test_crates/$*/tests/test_assert.rs $@ +.PRECIOUS: test_crates/%/tests/test_assert.rs +test_crates/%/tests/test_assert.rs: | ${TRANSLATION_DIR}/%/src/lib.c build-ninja/lib%.so.type + -@$(MAKE) -j1 -f $(AGENTS_MAKEFILE) test_crates/$*/tests/test_assert.rs .PRECIOUS: test_crates/%/tests/test_assert.rs -test_crates/%/tests/test_assert.rs: +test_crates/%/tests/test_assert.rs: | ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type -@$(MAKE) -j1 -f $(AGENTS_MAKEFILE) test_crates/$*/tests/test_assert.rs -${TRANSLATION_DIR}/%/tests/test_assert.rs: build-ninja/%.type +${TRANSLATION_DIR}/%/tests/test_assert.rs: test_crates/%/tests/test_assert.rs | build-ninja/lib%.so.type mkdir -p $(dir $@) - touch $@ + cp test_crates/$*/tests/test_assert.rs $@ + +${TRANSLATION_DIR}/%/tests/test_assert.rs: | build-ninja/%.type + $(error Agent cannot generate tests for binary targets yet!) # clean diff --git a/Makefile b/Makefile index cd2d9c1..57fad59 100644 --- a/Makefile +++ b/Makefile @@ -99,6 +99,12 @@ kill:## Kill all vLLM servers .PHONY: FORCE FORCE: +.PHONY: examples +examples:## Print out examples +examples: $(addsuffix /print,${EXAMPLES}) ; +examples/%/print: FORCE + @if [ -d "$(@D)" ]; then echo "$(@D)"; fi + .PHONY: examples/init examples/init:## Initialize all examples examples/init: $(addsuffix /init,${EXAMPLES}) ; @@ -122,15 +128,6 @@ examples/%/cmake: FORCE -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) cmake -.PHONE: examples/testgen_argless -examples/testgen_argless:## Generate argless tests for executable targets in all C examples -examples/testgen_argless: $(addsuffix /testgen_argless,${EXAMPLES}) -examples/%/testgen_argless:## Generate argless tests for executable targets in a specific C example -examples/%/testgen_argless: FORCE - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) cmake - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) testgen_argless - - .PHONY: examples/testgen_agent examples/testgen_agent:## Generate test vectors for all C examples with an agent examples/testgen_agent: $(addsuffix /testgen_agent,${EXAMPLES}) @@ -145,22 +142,9 @@ examples/testgen_agent_target: $(addsuffix /testgen_agent_target,${EXAMPLES}) examples/%/testgen_agent_target:## Generate test vectors for all targets in a specific C example with an agent examples/%/testgen_agent_target: FORCE -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) cmake - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) init -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) testgen_target -.PHONY: examples/testgen_and_translate -examples/testgen_and_translate:## Generate tests and translate all examples -examples/testgen_and_translate: $(addsuffix /testgen_and_translate,${EXAMPLES}) -examples/%/testgen_and_translate:## Generate tests and translate specific example -examples/%/testgen_and_translate: FORCE - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) cmake - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) testgen_argless - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) init - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) testgen_target - -@$(MAKE) -j1 -f $(IDEAS_MAKEFILE) -C $(@D) translate - - .PHONY: examples/translate examples/translate:## Translate all examples examples/translate: $(addsuffix /translate,${EXAMPLES}) diff --git a/src/ideas/agents/testgen.py b/src/ideas/agents/testgen.py index bce81b2..0afd39e 100644 --- a/src/ideas/agents/testgen.py +++ b/src/ideas/agents/testgen.py @@ -125,8 +125,9 @@ class TestgenInstructions: Write a `{rs_crate_path}/build.rs` that: 1. Uses `cc::Build::new()` with `.compiler("clang")` to compile **all** C source files discovered in Step 1. 2. Adds the correct include directories so the C headers are found. - 3. Links any extra system libraries the C project requires (e.g. `println!("cargo::rustc-link-lib=m");`). - 4. Passes `-w` (suppress warnings) and `-std=c99`. + 3. Uses `.warnings(false)` to suppress warnings. + 4. Uses `.std("c99")` to specify the C standard. + 5. Links any extra system libraries the C project requires (e.g. `println!("cargo::rustc-link-lib=m");`). """ ) @@ -351,7 +352,7 @@ class TestgenInstructions: JSON file. - For floating-point fields use an epsilon comparison: ```rust - assert!((actual - expected).abs() < 1e-4, + assert!((actual - expected).abs() / expected.abs() < 1e-3, "field ``: expected {{expected}}, got {{actual}}"); ``` - For integer / bool fields use `assert_eq!`. @@ -442,9 +443,7 @@ def _main(cfg: TestgenConfig) -> None: logger_trajectory.addHandler(fh) # Simultaneous print and log to file printer = LoggingConsolePrinter(logger=logger_trajectory) - - name = "C library test vector generator" - agent = RelentlessAgent(name=name) + agent = RelentlessAgent(name="C library test vector generator") project_name = cfg.project_name work_dir = Path(tempfile.mkdtemp()) / project_name diff --git a/src/ideas/ast.py b/src/ideas/ast.py index 5efa4f4..dc7ed20 100644 --- a/src/ideas/ast.py +++ b/src/ideas/ast.py @@ -287,30 +287,39 @@ def clang_rename_( def clang_make_global_(path: Path, spelling: str): tu = create_translation_unit(path) - cursor = _find_cursor(tu, spelling) - if cursor.kind not in DEFINITION_START_TOKEN: - raise ValueError(f"Unhandled cursor kind {cursor.kind}!") - - tokens = list(_get_tokens(cursor)) - assert len(tokens) > 0 - + tu_path = Path(tu.spelling).resolve() edits: dict[tuple[int, int], bytes] = {} - for i, token in enumerate(tokens): - # Remove storage specifiers from declaration while preserving offsets - if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): - assert i + 1 < len(tokens), "storage specifier should always come before name" - start_offset = token.extent.start.offset - # Use start of next token as end offset to remove any whitespace - end_offset = tokens[i + 1].extent.start.offset - edits[(start_offset, end_offset)] = b"" - - # Don't change anything after definition start - elif ( - token.kind == TokenKind.PUNCTUATION - and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + for cursor in _find_cursors(tu, spelling): + # We don't handle cursors not in the provided translation unit or anything without a definition + if ( + cursor.location.file is None + or Path(cursor.location.file.name).resolve() != tu_path + or Path(cursor.extent.start.file.name).resolve() != tu_path + or Path(cursor.extent.end.file.name).resolve() != tu_path ): - break + raise NotImplementedError(f"Found `{spelling}` cursor {cursor}` not in {tu_path}!") + if cursor.kind not in DEFINITION_START_TOKEN: + raise ValueError(f"Unhandled cursor kind {cursor.kind}!") + + tokens = list(_get_tokens(cursor)) + assert len(tokens) > 0 + + for i, token in enumerate(tokens): + # Remove storage specifiers from declaration while preserving offsets + if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): + assert i + 1 < len(tokens), "storage specifier should always come before name" + start_offset = token.extent.start.offset + # Use start of next token as end offset to remove any whitespace + end_offset = tokens[i + 1].extent.start.offset + edits[(start_offset, end_offset)] = b"" + + # Don't change anything after definition start + elif ( + token.kind == TokenKind.PUNCTUATION + and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + ): + break if edits: _apply_edits(path, edits) @@ -318,52 +327,61 @@ def clang_make_global_(path: Path, spelling: str): def clang_make_extern_(path: Path, spelling: str): tu = create_translation_unit(path) - cursor = _find_cursor(tu, spelling) - # Determine punctuation token to find based on cursor kind (function or variable) - if cursor.kind not in DEFINITION_START_TOKEN: - raise ValueError(f"Unhandled cursor kind {cursor.kind}!") - - tokens = list(_get_tokens(cursor)) - assert len(tokens) > 0 - + tu_path = Path(tu.spelling).resolve() edits: dict[tuple[int, int], bytes] = {} - is_extern = False - definition_start_token_idx = None - - for i, token in enumerate(tokens): - # Remove storage specifiers from declaration while preserving offsets - if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): - assert i + 1 < len(tokens), "storage specifier should always come before name" - start_offset = token.extent.start.offset - # Use start of next token as end offset to remove any whitespace - end_offset = tokens[i + 1].extent.start.offset - edits[(start_offset, end_offset)] = b"" - - # Check if extern keyword already present - elif token.kind == TokenKind.KEYWORD and token.spelling == "extern": - is_extern = True - - # Record the first definition-opening token. - elif ( - definition_start_token_idx is None - and token.kind == TokenKind.PUNCTUATION - and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + + for cursor in _find_cursors(tu, spelling): + # We don't handle cursors not in the provided translation unit or anything without a definition + if ( + cursor.location.file is None + or Path(cursor.location.file.name).resolve() != tu_path + or Path(cursor.extent.start.file.name).resolve() != tu_path + or Path(cursor.extent.end.file.name).resolve() != tu_path ): - definition_start_token_idx = i - break - - # Replace definition portion with ';' - if definition_start_token_idx is not None: - assert definition_start_token_idx > 0 - # Use end of prior token as end offset to remove any whitespace - start_pos = tokens[definition_start_token_idx - 1].extent.end.offset - end_pos = cursor.extent.end.offset - edits[(start_pos, end_pos)] = b";" - - # Add 'extern ' prefix if not already present - if not is_extern: - extern_insert_pos = cursor.extent.start.offset - edits[(extern_insert_pos, extern_insert_pos)] = b"extern " + raise NotImplementedError(f"Found `{spelling}` cursor `{cursor}` not in {tu_path}!") + if cursor.kind not in DEFINITION_START_TOKEN: + raise ValueError(f"Unhandled cursor kind {cursor.kind}!") + + tokens = list(_get_tokens(cursor)) + assert len(tokens) > 0 + + is_extern = False + definition_start_token_idx = None + + for i, token in enumerate(tokens): + # Remove storage specifiers from declaration while preserving offsets + if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): + assert i + 1 < len(tokens), "storage specifier should always come before name" + start_offset = token.extent.start.offset + # Use start of next token as end offset to remove any whitespace + end_offset = tokens[i + 1].extent.start.offset + edits[(start_offset, end_offset)] = b"" + + # Check if extern keyword already present + elif token.kind == TokenKind.KEYWORD and token.spelling == "extern": + is_extern = True + + # Record the first definition-opening token. + elif ( + definition_start_token_idx is None + and token.kind == TokenKind.PUNCTUATION + and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + ): + definition_start_token_idx = i + break + + # Replace definition portion with ';' + if definition_start_token_idx is not None: + assert definition_start_token_idx > 0 + # Use end of prior token as end offset to remove any whitespace + start_pos = tokens[definition_start_token_idx - 1].extent.end.offset + end_pos = cursor.extent.end.offset + edits[(start_pos, end_pos)] = b";" + + # Add 'extern ' prefix if not already present + if not is_extern: + extern_insert_pos = cursor.extent.start.offset + edits[(extern_insert_pos, extern_insert_pos)] = b"extern " if edits: _apply_edits(path, edits) @@ -391,9 +409,8 @@ def _get_tokens(cursor: Cursor): yield from tu.get_tokens(extent=extent) -def _find_cursor(tu: TranslationUnit, spelling: str) -> Cursor: - definition: Cursor | None = None - declaration: Cursor | None = None +def _find_cursors(tu: TranslationUnit, spelling: str) -> list[Cursor]: + candidates: list[Cursor] = [] assert tu.cursor is not None for cursor in tu.cursor.walk_preorder(): @@ -401,16 +418,38 @@ def _find_cursor(tu: TranslationUnit, spelling: str) -> Cursor: continue if cursor.spelling != spelling: continue - if cursor.is_definition(): - definition = cursor - break - if declaration is None: - declaration = cursor - - target = definition or declaration - if target is None: + if cursor.semantic_parent is None: + continue + if cursor.semantic_parent.kind != CursorKind.TRANSLATION_UNIT: + continue + if cursor.location.is_in_system_header: + continue + candidates.append(cursor) + + if len(candidates) == 0: raise ValueError(f"Unable to find function or variable with spelling `{spelling}`") - return target + + definitions = [cursor for cursor in candidates if cursor.is_definition()] + definition_usrs = {cursor.get_usr() for cursor in definitions if cursor.get_usr()} + if len(definition_usrs) > 1: + raise ValueError( + f"Ambiguous symbol `{spelling}` with multiple definitions: {sorted(definition_usrs)}" + ) + + if len(definition_usrs) == 1: + target_usr = next(iter(definition_usrs)) + else: + declaration_usrs = {cursor.get_usr() for cursor in candidates if cursor.get_usr()} + if len(declaration_usrs) > 1: + raise ValueError( + f"Ambiguous symbol `{spelling}` with multiple declarations: {sorted(declaration_usrs)}" + ) + target_usr = next(iter(declaration_usrs)) if len(declaration_usrs) == 1 else "" + + if not target_usr: + return candidates + + return [cursor for cursor in candidates if cursor.get_usr() == target_usr] def _apply_edits(path: Path, edits: dict[tuple[int, int], bytes]): diff --git a/src/ideas/test_symbol.py b/src/ideas/test_symbol.py index 6019e02..dced680 100644 --- a/src/ideas/test_symbol.py +++ b/src/ideas/test_symbol.py @@ -12,7 +12,7 @@ import dspy from .tools import Crate, run_subprocess -from .ast import Symbol, clang_make_global_, clang_make_extern_ +from .ast import Symbol, clang_make_extern_ logger = logging.getLogger("ideas.test_symbol") @@ -25,21 +25,25 @@ def __init__(self, crate: Crate, symbols: list[Symbol]): # Write a build script to compile C code as a static library and link to it self.write_build_script_() - # Rewrite C code to make each function global. Then generate a Rust binding for it - # to force the Rust linker to include that C function in the Rust artifact. + # Generate a Rust binding for any global function since we need to force the Rust + # linker to include that C function in the Rust artifact. # FIXME: If we ever test variables we should generate bindings for those here too! + binding_path = self.crate.rust_src_path.parent / "binding.rs" + binding_path.write_text("") self.main_function = "" for symbol in symbols: - if not (symbol.is_function and symbol.is_definition): + if not (symbol.is_function and symbol.is_definition and symbol.is_global): continue if self.crate.is_bin and symbol.spelling == "main": # main requires special handling because we must bind to it as _main and # statically create a Rust main that calls it self.main_function = self.write_main_binding() else: - clang_make_global_(self.crate.c_src_path, symbol.spelling) self.write_symbol_binding_(symbol.spelling) - self.crate.vcs.add(self.crate.c_src_path) + + # These files are modified by test + orig_binding_src = binding_path.read_bytes() + orig_rust_src = self.crate.rust_src_path.read_bytes() # Check whether all of the changes compile and commit them passes, output = self.test() @@ -48,6 +52,10 @@ def __init__(self, crate: Crate, symbols: list[Symbol]): msg = f"Failed to prepare `{self.crate.root_package['name']}` for symbol testing!" self.crate.vcs.commit(msg) + # Restore originals + binding_path.write_bytes(orig_binding_src) + self.crate.rust_src_path.write_bytes(orig_rust_src) + # Error loudly if changes don't build if not passes: msg += output @@ -121,65 +129,50 @@ def write_main_binding(self) -> str: ) def test(self) -> tuple[bool, str]: - orig_rust_src = self.crate.rust_src_path.read_text() - rust_src = orig_rust_src + rust_src = self.crate.rust_src_path.read_text() # Remove forbid unsafe from Rust source - rust_src = re.sub(re.escape("#![forbid(unsafe_code)]"), "", rust_src) + rust_src = rust_src.replace("#![forbid(unsafe_code)]", "") # Replace Rust Mutex with C ABI-compatible Mutex in Rust source RUST_MUTEX = "use std::sync::{Mutex, MutexGuard};" C_ABI_MUTEX = "mod sync;\nuse crate::sync::{Mutex, MutexGuard};" - rust_src = re.sub( - f"^{re.escape(RUST_MUTEX)}$", C_ABI_MUTEX, rust_src, flags=re.MULTILINE - ) + rust_src = rust_src.replace(RUST_MUTEX, C_ABI_MUTEX) # Reference wrapper module in Rust source WRAPPER_MOD = "pub mod wrapper;" - wrapper_src_path = self.crate.rust_src_path.parent / "wrapper.rs" - wrapper_src_path.touch() - self.crate.vcs.add(wrapper_src_path) - if not re.search(f"^{re.escape(WRAPPER_MOD)}$", rust_src, flags=re.MULTILINE): + if WRAPPER_MOD not in rust_src: rust_src += WRAPPER_MOD + "\n" + wrapper_path = self.crate.rust_src_path.parent / "wrapper.rs" + wrapper_path.touch() # Reference binding module in Rust source BINDING_MOD = "pub mod binding;" - binding_src_path = self.crate.rust_src_path.parent / "binding.rs" - binding_src_path.touch() - orig_binding_src = binding_src_path.read_text() - binding_src = orig_binding_src - if not re.search(f"^{re.escape(BINDING_MOD)}$", rust_src, flags=re.MULTILINE): + if BINDING_MOD not in rust_src: rust_src += BINDING_MOD + "\n" - - binding_src_path.write_text(binding_src) - self.crate.vcs.add(binding_src_path) + binding_path = self.crate.rust_src_path.parent / "binding.rs" + binding_path.touch() self.crate.rust_src_path.write_text(rust_src) - self.crate.vcs.add(self.crate.rust_src_path) - # Try building the crate, which should always works, before testing and detect - # if we need to insert a main + # Try building the crate to detect if we need to insert a main builds, feedback = self.crate.cargo_build(allow_unsafe=True, fix_E0601=False) if "error[E0601]" in feedback and self.main_function: - binding_src += "pub mod main;\n" - binding_src_path.write_text(binding_src) - self.crate.vcs.add(binding_src_path) + with binding_path.open("a+") as f: + f.write("pub mod main;\n") + with self.crate.rust_src_path.open("a+") as f: + f.write(self.main_function) + + self.crate.vcs.add(wrapper_path, binding_path, self.crate.rust_src_path) - rust_src += self.main_function - self.crate.rust_src_path.write_text(rust_src) - self.crate.vcs.add(self.crate.rust_src_path) + # Make sure the crate builds before testing builds, feedback = self.crate.cargo_build(allow_unsafe=True, fix_E0601=False) if not builds: raise RuntimeError(f"Crate does not build!\n{feedback}") passes, output, error, _ = self.crate.cargo_test() - - # Restore originals - binding_src_path.write_text(orig_binding_src) - self.crate.rust_src_path.write_text(orig_rust_src) - return passes, output + error - def forward(self, symbol: Symbol) -> bool: + def forward(self, symbol: Symbol) -> dspy.Prediction: logger.info(f"Testing symbol `{symbol.name}` ....") # Overwrite C symbol to reference extern symbol that we will link to the Rust symbol. @@ -188,16 +181,30 @@ def forward(self, symbol: Symbol) -> bool: clang_make_extern_(self.crate.c_src_path, symbol.spelling) self.crate.vcs.add(self.crate.c_src_path) + # These files are modified by test + binding_path = self.crate.rust_src_path.parent / "binding.rs" + orig_binding_src = binding_path.read_bytes() + orig_rust_src = self.crate.rust_src_path.read_bytes() + # Run cargo test - passes, output = self.test() + passes, feedback = self.test() msg = f"Tested symbol `{symbol.name}`" if not passes: + feedback = "Running `cargo test` fails!\n" + feedback msg = f"Failed to test symbol `{symbol.name}`" logger.error(msg) - msg += f"\n\n{output}" + msg += f"\n\n{feedback}" self.crate.vcs.commit(msg) - return passes + # Restore originals + binding_path.write_bytes(orig_binding_src) + self.crate.rust_src_path.write_bytes(orig_rust_src) + + pred = dspy.Prediction(success=passes) + if not passes: + # FIXME: Use test feedback? + pred.feedback = "Carefully compare the Rust translation in `prior_translation` with the C `snippet` and find where any mis-translations happen. Then use this knowledge to generate a correct Rust `translation` of the C `snippet`. You should treat the C `snippet` as correct, so if the C `snippet` has a bug, you should replicate that bug in the Rust `translation` too." + return pred def get_linked_binding(function_name: str, c_src_path: Path, *bindgen_args: str) -> str: diff --git a/src/ideas/testgen.py b/src/ideas/testgen.py deleted file mode 100644 index e40acf8..0000000 --- a/src/ideas/testgen.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright (C) 2026 Intel Corporation -# -# SPDX-License-Identifier: Apache-2.0 -# - -import sys -import json -import logging -from pathlib import Path -from dataclasses import dataclass - -import hydra -from omegaconf import MISSING -from hydra.core.config_store import ConfigStore -from hydra.core.hydra_config import HydraConfig - -from ideas.tools import run_subprocess - - -logger = logging.getLogger("ideas.testgen") - - -@dataclass -class TestgenConfig: - artifact: Path = MISSING - test_vector: Path = MISSING - - -cs = ConfigStore.instance() -cs.store(name="testgen", node=TestgenConfig) - - -def _main(cfg: TestgenConfig) -> None: - output_dir = Path(HydraConfig.get().runtime.output_dir) - logger.info(f"Saving results to {output_dir}") - - # Run the artifact and collect outputs - success, output, error, returncode = run_subprocess([str(cfg.artifact)]) - - # Stop the app on timeout - if returncode == "timeout": - raise RuntimeError( - f"Artifact {cfg.artifact} timed out! This may be due to indefinite waiting for `stdin`!" - ) - - if not success: - logger.warning( - f"Artifact {cfg.artifact} failed execution with return code {returncode}! The test vector will expect an error." - ) - - # Write the .json test_vector - test_vector = { - "stdout": {"pattern": f"{output}"}, - "stderr": {"pattern": f"{error}"}, - "rc": returncode, - } - cfg.test_vector.parent.mkdir(parents=True, exist_ok=True) - with open(cfg.test_vector, "w") as f: - json.dump(test_vector, f, indent=2) - - -@hydra.main(version_base=None, config_name="testgen") -def main(cfg: TestgenConfig) -> None: - try: - _main(cfg) - except Exception as e: - logger.exception(e) - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/src/ideas/tools.py b/src/ideas/tools.py index 5370483..d3546e4 100644 --- a/src/ideas/tools.py +++ b/src/ideas/tools.py @@ -509,3 +509,12 @@ def run_and_check_tests( for test_case in test_cases: success += 1 if run_and_check_test(executable, test_case, timeout=timeout) else 0 return success + + +def _in_env(var_name: str, default: bool = True) -> bool: + value = os.getenv(var_name, str(default)) + return value.strip().lower() in {"1", "true", "yes", "on"} + + +HYBRID_BUILD = _in_env("HYBRID_BUILD", default=True) +STATIC_TRANSLATIONS = HYBRID_BUILD or _in_env("STATIC_TRANSLATIONS", default=True) diff --git a/src/ideas/translate.py b/src/ideas/translate.py index 4e15681..ce40c0b 100644 --- a/src/ideas/translate.py +++ b/src/ideas/translate.py @@ -19,7 +19,7 @@ from ideas import SnippetTranslator, RecurrentTranslator, WrapperGenerator, SymbolTester from ideas import create_translation_unit, extract_info_c from ideas.init.consolidate import get_symbols_and_dependencies -from .tools import Crate +from .tools import Crate, HYBRID_BUILD logger = logging.getLogger("ideas.translate") @@ -50,12 +50,24 @@ def _main(cfg: TranslateConfig) -> None: logger.info(f"Saving results to {output_dir}") crate = Crate(cargo_toml=cfg.cargo_toml.resolve(), vcs=cfg.vcs) # type: ignore[reportArgumentType] + # Save C source since it will be modified by the agent + orig_c_src = crate.c_src_path.read_bytes() + + # Make sure Rust source is in known state (i.e., empty) + crate.rust_src_path.write_text("") + # Get global symbol table tu = create_translation_unit(cfg.filename) asts = [extract_info_c(tu)] symbols, dependencies = get_symbols_and_dependencies( asts, source_priority=[], external_symbol_names=["c:@F@main"] if crate.is_bin else None ) + global_functions = [ + s for s in symbols.values() if s.is_global and (s.is_function and s.is_definition) + ] + if not global_functions: + logger.info("No global functions to translate!") + return # Create translation agent model.configure(cfg.model, cfg.generate) @@ -67,32 +79,32 @@ def _main(cfg: TranslateConfig) -> None: symbol_wrapper = WrapperGenerator( crate, cfg.wrapper_max_iters, readonly_cache=cfg.readonly_cache ) - symbol_tester = SymbolTester(crate, list(symbols.values())) + symbol_tester = None + if HYBRID_BUILD: + symbol_tester = SymbolTester(crate, symbols=global_functions) agent = RecurrentTranslator( crate, snippet_translator, symbol_wrapper, symbol_tester, cfg.max_iters ) - # Run translation agent + # Run translation agent and write it to disk pred = agent(symbols, dependencies) - translation: str = pred.translation - translated: bool = pred.success + crate.rust_src_path.write_text(pred.translation) + if pred.success: + # FIXME: Only keep wrappers for symbols we need to export - # FIXME: Only keep wrappers for symbols we need to export + msg = f"Translated `{crate.root_package['name']}` to Rust!" + logger.info(msg) + else: + # Restore original C code so next agent can use it + crate.c_src_path.write_bytes(orig_c_src) - # Write translation to disk - crate.rust_src_path.write_text(translation) - crate.vcs.add(crate.rust_src_path) + msg = f"Failed to translate `{crate.root_package['name']}` to Rust!" + logger.error(msg) # Commit translation - crate.vcs.add(crate.c_src_path) if (output_subdir := HydraConfig.get().output_subdir) is not None: crate.vcs.add(output_dir / output_subdir) - msg = f"Translated `{crate.root_package['name']}` to Rust!" - if not translated: - msg = f"Failed to translate `{crate.root_package['name']}` to Rust!" - logger.error(msg) - else: - logger.info(msg) + crate.vcs.add(crate.rust_src_path, crate.c_src_path) crate.vcs.commit(msg) diff --git a/src/ideas/translate_recurrent.py b/src/ideas/translate_recurrent.py index 07b3185..9537285 100644 --- a/src/ideas/translate_recurrent.py +++ b/src/ideas/translate_recurrent.py @@ -13,7 +13,7 @@ import networkx as nx from .ast import Symbol -from .tools import Crate +from .tools import Crate, STATIC_TRANSLATIONS logger = logging.getLogger("ideas.translate_recurrent") @@ -90,7 +90,7 @@ def forward( # Use static translation for any symbol that a variable depends on static_translation = "" - if any( + if STATIC_TRANSLATIONS and any( nx.has_path(G, group_with_variable, symbol_names) for group_with_variable in symbol_names_with_variable ): @@ -137,6 +137,11 @@ def translate_with_retries( prior_translation, feedback = "", "" pred = dspy.Prediction() for i in range(max(self.max_iters, 1)): + # Save these in case translation fails + orig_c_src = self.crate.c_src_path.read_bytes() + orig_rust_src = self.crate.rust_src_path.read_bytes() + orig_wrappers_src = self._snapshot_wrappers() + # Attempt translation and exit early on success pred = self.translate( reference_code, @@ -149,6 +154,11 @@ def translate_with_retries( if pred.success: break + # Restore to original state since translation failed + self.crate.c_src_path.write_bytes(orig_c_src) + self.crate.rust_src_path.write_bytes(orig_rust_src) + self._restore_wrappers(orig_wrappers_src) + # On failure log a diff against prior translation name = " ".join([f"`{s.name}`" for s in symbols]) msg = f"Failed to translate symbol(s) {name} ({i + 1}/{self.max_iters})!" @@ -169,44 +179,29 @@ def translate_with_retries( # Create feedback for next iteration prior_translation = pred.translation.code - feedback = "Carefully compare the Rust translation in `prior_translation` with the C `snippet` and find where any mis-translations happen. Then use this knowledge to generate a correct Rust `translation` of the C `snippet`. You should treat the C `snippet` as correct, so if the C `snippet` has a bug, you should replicate that bug in the Rust `translation` too." + feedback = pred.feedback return pred - def _snapshot_wrapper_files(self) -> tuple[dict[Path, str], set[Path]]: - wrapper_paths: dict[Path, str] = {} - wrapper_dir = self.crate.rust_src_path.parent / "wrapper" - wrapper_mod = self.crate.rust_src_path.parent / "wrapper.rs" - - if wrapper_mod.exists(): - wrapper_paths[wrapper_mod] = wrapper_mod.read_text() - - existing_wrapper_files: set[Path] = set() - if wrapper_dir.exists(): - existing_wrapper_files = set( - path for path in wrapper_dir.rglob("*.rs") if path.is_file() - ) - for path in existing_wrapper_files: - wrapper_paths[path] = path.read_text() + def _snapshot_wrappers(self) -> dict[Path, bytes]: + wrappers: dict[Path, bytes] = {} - return wrapper_paths, existing_wrapper_files + path = self.crate.rust_src_path.parent / "wrapper.rs" + if path.exists() and path.is_file(): + wrappers[path] = path.read_bytes() - def _restore_wrapper_files( - self, original_wrapper_src: dict[Path, str], existing_wrapper_files: set[Path] - ) -> None: wrapper_dir = self.crate.rust_src_path.parent / "wrapper" - wrapper_mod = self.crate.rust_src_path.parent / "wrapper.rs" - - if wrapper_mod.exists() and wrapper_mod not in original_wrapper_src: - wrapper_mod.unlink() - if wrapper_dir.exists(): - for path in (path for path in wrapper_dir.rglob("*.rs") if path.is_file()): - if path not in existing_wrapper_files: - path.unlink() + for path in wrapper_dir.glob("*.rs"): + if path.exists() and path.is_file(): + wrappers[path] = path.read_bytes() - for path, src in original_wrapper_src.items(): + return wrappers + + def _restore_wrappers(self, wrappers: dict[Path, bytes]) -> None: + for path, src in wrappers.items(): path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(src) + path.write_bytes(src) + self.crate.vcs.add(path) def translate( self, @@ -217,9 +212,6 @@ def translate( feedback: str = "", translation: str = "", ) -> dspy.Prediction: - orig_rust_src = self.crate.rust_src_path.read_text() - original_wrapper_src, existing_wrapper_files = self._snapshot_wrapper_files() - # Translate symbols and save it if successful pred = self.translate_symbol( name=" ".join(symbol.name for symbol in symbols), @@ -245,6 +237,9 @@ def translate( # We can only hybrid build-test functions and variables if not (symbol.is_function and symbol.is_definition) and not symbol.is_variable: continue + # If we can't test symbols, then only wrap globals + if self.test_symbol is None and not symbol.is_global: + continue # Wrap function or annotate variable wrapper = self.wrap_symbol(symbol, reference_code, unsafe_translation) @@ -257,21 +252,21 @@ def translate( # If wrapping failed exit early if not wrapper.success: pred.success = False + pred.feedback = wrapper.feedback break # Try testing symbol and exit early if it fails - if self.test_symbol and not self.test_symbol(symbol): + if not self.test_symbol: + continue + test = self.test_symbol(symbol) + if not test.success: pred.success = False + pred.feedback = test.feedback break + # Cache successful translation and wrappers if pred.success: - # Write successful translation and wrappers to cache self.translate_symbol.write_cache(pred) for wrapper in wrappers: self.wrap_symbol.write_cache(wrapper) - else: - # Restore Rust source and wrapper files to original state since translation failed - self.crate.rust_src_path.write_text(orig_rust_src) - self._restore_wrapper_files(original_wrapper_src, existing_wrapper_files) - return pred diff --git a/src/ideas/translate_snippet.py b/src/ideas/translate_snippet.py index b29f013..142cead 100644 --- a/src/ideas/translate_snippet.py +++ b/src/ideas/translate_snippet.py @@ -83,7 +83,7 @@ def forward( or _read_cache(self.cache, name, snippet) or _read_cache(self.readonly_cache, name, snippet) ) - orig_rust_src = self.crate.rust_src_path.read_text() + orig_rust_src = self.crate.rust_src_path.read_bytes() pred = dspy.Prediction() builds = False dspy_exception = None @@ -149,7 +149,7 @@ def forward( msg += f"\n\n# Reasoning\n{pred.reasoning}" if "reasoning" in pred else "" msg += f"\n\n# Feedback\n{feedback}" if feedback else "" self.crate.vcs.commit(msg) - self.crate.rust_src_path.write_text(orig_rust_src) + self.crate.rust_src_path.write_bytes(orig_rust_src) # All iterations failed because of DSPy exceptions if dspy_exception: raise dspy_exception diff --git a/src/ideas/wrapper.py b/src/ideas/wrapper.py index 3c38124..ce403a3 100644 --- a/src/ideas/wrapper.py +++ b/src/ideas/wrapper.py @@ -24,7 +24,7 @@ from ideas.adapters import Code from ideas.init.consolidate import get_symbols_and_dependencies from ideas.ast_rust import get_nodes, get_root, validate_changes -from ideas.ast import Symbol +from ideas.ast import Symbol, clang_make_global_ logger = logging.getLogger("ideas.wrapper") CodeRust = Code["rust"] @@ -73,22 +73,29 @@ class Signature(dspy.Signature): wrapper: CodeRust = dspy.OutputField() -def generate_unimplemented_wrapper(crate: Crate, symbol_name: str) -> str: - # unsafe extern "C" { - # pub fn helloworld() -> ::std::os::raw::c_int; - # } - ok, bindgen_wrapper, error, _ = run_subprocess( - [ - "bindgen", - "--disable-header-comment", - "--no-doc-comments", - "--no-layout-tests", - "--sort-semantically", - str(crate.c_src_path), - "--allowlist-function", - symbol_name, - ] - ) +def generate_unimplemented_wrapper(path: Path, symbol_name: str) -> str: + orig_src = path.read_bytes() + try: + # Make sure symbol is global, this is why we save original bytes + clang_make_global_(path, symbol_name) + + # unsafe extern "C" { + # pub fn helloworld() -> ::std::os::raw::c_int; + # } + ok, bindgen_wrapper, error, _ = run_subprocess( + [ + "bindgen", + "--disable-header-comment", + "--no-doc-comments", + "--no-layout-tests", + "--sort-semantically", + str(path), + "--allowlist-function", + symbol_name, + ] + ) + finally: + path.write_bytes(orig_src) if not ok: raise ValueError( f"Bindgen failed to generate wrapper for `{symbol_name}`!\nError:\n{error}" @@ -142,9 +149,9 @@ def __init__( sync_path.write_text((Path(__file__).parent / "sync.rs").read_text()) self.crate.vcs.add(sync_path) - # Make sure wrapper module exists + # Make sure wrapper module is in known state (i.e., empty) self.wrapper_path = crate.rust_src_path.parent / "wrapper.rs" - self.wrapper_path.touch() + self.wrapper_path.write_text("") def forward(self, symbol: Symbol, reference_code: str, translation: str) -> dspy.Prediction: if symbol.is_function and symbol.is_definition: @@ -152,22 +159,25 @@ def forward(self, symbol: Symbol, reference_code: str, translation: str) -> dspy elif symbol.is_variable: return self.annotate_variable(symbol, reference_code, translation) else: - logger.info(f"Skipping wrap of symbol `{symbol.name}`") - return dspy.Prediction() + raise NotImplementedError def annotate_variable( self, symbol: Symbol, reference_code: str, translation: str ) -> dspy.Prediction: logger.info(f"Adding export_name attribute to variable `{symbol.name}` ...") - orig_rust_src = self.crate.rust_src_path.read_text() - assert translation in orig_rust_src, "translation must be on disk!" - rust_src = orig_rust_src + rust_src = self.crate.rust_src_path.read_text() + if translation not in rust_src: + raise RuntimeError("Translation must be on disk!") # Add export_name attribute to symbol translation new_translation = export_first_unannotated_variable(translation, symbol.spelling) if new_translation is None: logger.error(f"Failed to add export_name attribute to variable `{symbol.name}`") - return dspy.Prediction(success=False, translation=translation) + return dspy.Prediction( + success=False, + translation=translation, + feedback=f"Could not find a Rust variable named `{symbol.name}` in the translation!", + ) # Update Rust source with export_name attribute rust_src = rust_src.replace(translation, new_translation) @@ -200,18 +210,16 @@ def wrap_function( logger.info(f"Generating wrapper for function `{symbol.name}` ...") - # Write blank wrapper and ensure only that blank wrapper is referenced since we're going to build + # Use bindgen to generate unimplemented wrapper and write to disk to make sure we can actually build + unimplemented_wrapper = generate_unimplemented_wrapper( + self.crate.c_src_path, symbol.spelling + ) symbol_wrapper_path = self.wrapper_path.parent / "wrapper" / f"{symbol.spelling}.rs" symbol_wrapper_path.parent.mkdir(exist_ok=True, parents=True) - symbol_wrapper_path.write_text("") - - # Try building the crate with an empty wrapper and if it fails then just return the unimplemented wrapper - max_iters = max(1, self.max_iters) if self._build(symbol.spelling) == (True, "") else 0 - - # Use bindgen to generate unimplemented wrapper and write to disk. Note the unimplemented - # wrapper contains unsafe code! - unimplemented_wrapper = generate_unimplemented_wrapper(self.crate, symbol.spelling) symbol_wrapper_path.write_text(unimplemented_wrapper) + success, build_feedback = self._build(symbol.spelling) + if not success: + raise RuntimeError(f"The crate does not build!\n\n{build_feedback}") # Prefer supplied wrapper, crate cache, then read-only cache. wrapper = ( @@ -236,7 +244,7 @@ def wrap_function( dspy_exception = None scope_feedback: OrderedDict[str, str] = OrderedDict() pred = dspy.Prediction() - for i in range(max_iters): + for i in range(max(self.max_iters, 1)): # Use the wrapper from the prior iteration as feedback for the next iteration if i > 0: prior_wrapper = wrapper @@ -283,32 +291,27 @@ def wrap_function( success = success and not build_feedback and not scope_feedback if success: + # Reference successful symbol wrapper in wrapper module + with self.wrapper_path.open("a") as f: + f.write(f"pub mod {symbol.spelling};\n") + self.crate.vcs.add(self.wrapper_path) + + # Log and commit success msg = f"Wrapped function `{symbol.name}`" logger.info(msg) if "reasoning" in pred: msg += f"\n\n# Reasoning\n{pred.reasoning}" + self.crate.vcs.commit(msg) break - msg = f"Failed to wrap function `{symbol.name}` ({i + 1}/{max_iters})" + # Log and commit failure + msg = f"Failed to wrap function `{symbol.name}` ({i + 1}/{self.max_iters})" logger.error(msg) msg += f"\n\n# Reasoning\n{pred.reasoning}" if "reasoning" in pred else "" msg += f"\n\n# Build feedback\n{build_feedback}" msg += f"\n\n# Scope Feedback\n{scope_feedback}" self.crate.vcs.commit(msg) - # Reference symbol wrapper in wrapper module - with self.wrapper_path.open("a") as f: - f.write(f"pub mod {symbol.spelling};\n") - self.crate.vcs.add(self.wrapper_path) - - # Write unimplemented wrapper to disk if generation failed - if not success: - symbol_wrapper_path.write_text(unimplemented_wrapper) - self.crate.vcs.add(symbol_wrapper_path) - msg = f"Wrote unimplemented wrapper for `{symbol.name}`" - logger.warning(msg) - self.crate.vcs.commit(msg) - # All iterations failed because of DSPy exceptions if dspy_exception: raise dspy_exception @@ -321,11 +324,14 @@ def wrap_function( pred.prior_wrapper = prior_wrapper pred.build_feedback = build_feedback pred.scope_feedback = "\n\n".join(scope_feedback.values()) + if not success: + # Feedback for translator + pred.feedback = "It was difficult to generate a C-compatible FFI wrapper for the translation. Regenerate the translation with clear, explicit, wrapper-friendly Rust function boundaries and straightforward ownership, while keeping the translation fully memory-safe and free of unsafe constructs." return pred def _build(self, symbol_spelling: str) -> tuple[bool, str]: - orig_rust_src = self.crate.rust_src_path.read_text() - orig_wrapper_src = self.wrapper_path.read_text() + orig_rust_src = self.crate.rust_src_path.read_bytes() + orig_wrapper_src = self.wrapper_path.read_bytes() # Reference wrapper module in Rust source with self.crate.rust_src_path.open("a") as f: @@ -341,8 +347,8 @@ def _build(self, symbol_spelling: str) -> tuple[bool, str]: success, feedback = self.crate.cargo_build(allow_unsafe=True) # Restore original source - self.crate.rust_src_path.write_text(orig_rust_src) - self.wrapper_path.write_text(orig_wrapper_src) + self.crate.rust_src_path.write_bytes(orig_rust_src) + self.wrapper_path.write_bytes(orig_wrapper_src) return success, feedback diff --git a/test/test_clang.py b/test/test_clang.py index 456649f..4baec4a 100644 --- a/test/test_clang.py +++ b/test/test_clang.py @@ -668,3 +668,46 @@ def test_enum_in_struct(): """ ).strip() ) + + +def test_clang_make_extern_multiple_declarations(tmp_path): + c_path = tmp_path / "input.c" + c_path.write_text( + dedent( + """ + static int f(int x); + int f(int x); + static int f(int x) { + return x + 1; + } + """ + ) + ) + + ast.clang_make_extern_(c_path, "f") + transformed = c_path.read_text() + + assert transformed.count("extern int f(int x);") == 3 + assert "static int f" not in transformed + assert "{" not in transformed + + +def test_clang_make_global_multiple_declarations(tmp_path): + c_path = tmp_path / "input.c" + c_path.write_text( + dedent( + """ + static int v; + extern int v; + static int v = 42; + """ + ) + ) + + ast.clang_make_global_(c_path, "v") + transformed = c_path.read_text() + + assert "static int v" not in transformed + assert "int v = 42;" in transformed + assert "extern int v;" in transformed + assert transformed.count("int v;") == 2 diff --git a/test/test_tools.py b/test/test_tools.py index ecb8737..22f6195 100644 --- a/test/test_tools.py +++ b/test/test_tools.py @@ -81,7 +81,8 @@ def test_compile_rust(rust_files: tuple[str, str], tmpdir: Path): def test_clippy(clippy_files: str): # All clippy calls should trigger all_out = tools.run_clippy(clippy_files) - successes, outputs = zip(*all_out) + successes, stdouts, stderrs, _ = zip(*all_out) + outputs = [o + e for o, e in zip(stdouts, stderrs)] assert not any(successes) assert not any(map(lambda out: out == "", outputs)) @@ -103,7 +104,7 @@ def test_structured(rust_files: tuple[str, str], clippy_files: str, tmpdir: Path # JSON dict construction should succeed all_out = tools.run_clippy(clippy_files, structured_output=True) - _, structured_outputs = zip(*all_out) + _, _, structured_outputs, _ = zip(*all_out) structured_outputs = list(structured_outputs) with does_not_raise(): @@ -112,8 +113,8 @@ def test_structured(rust_files: tuple[str, str], clippy_files: str, tmpdir: Path # Messages rendered from the JSON dict should be identical to the original render all_out = tools.run_clippy(clippy_files) - _, outputs = zip(*all_out) - rendered_og_all = list(outputs) + _, stdouts, stderrs, _ = zip(*all_out) + rendered_og_all = [o + e for o, e in zip(stdouts, stderrs)] rendered_reconstructed_all = tools.structured_to_rendered(as_json_all) assert rendered_reconstructed_all == "".join(rendered_og_all) diff --git a/uv.lock b/uv.lock index ffc804f..9c1c059 100644 --- a/uv.lock +++ b/uv.lock @@ -290,41 +290,41 @@ wheels = [ [[package]] name = "cryptography" -version = "46.0.6" +version = "46.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, - { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, - { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, - { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, - { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, - { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, - { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, - { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, - { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, - { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, - { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, - { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, - { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, - { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, - { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, - { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, - { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, - { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, - { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, - { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, - { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, - { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, - { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, - { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, ] [[package]] @@ -565,23 +565,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, ] -[[package]] -name = "grpcio" -version = "1.67.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/53/d9282a66a5db45981499190b77790570617a604a38f3d103d0400974aeb5/grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732", size = 12580022, upload-time = "2024-10-29T06:30:07.787Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/d2/2f032b7a153c7723ea3dea08bffa4bcaca9e0e5bdf643ce565b76da87461/grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b", size = 5091487, upload-time = "2024-10-29T06:24:57.416Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ae/ea2ff6bd2475a082eb97db1104a903cf5fc57c88c87c10b3c3f41a184fc0/grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1", size = 10943530, upload-time = "2024-10-29T06:25:01.062Z" }, - { url = "https://files.pythonhosted.org/packages/07/62/646be83d1a78edf8d69b56647327c9afc223e3140a744c59b25fbb279c3b/grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af", size = 5589079, upload-time = "2024-10-29T06:25:04.254Z" }, - { url = "https://files.pythonhosted.org/packages/d0/25/71513d0a1b2072ce80d7f5909a93596b7ed10348b2ea4fdcbad23f6017bf/grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955", size = 6213542, upload-time = "2024-10-29T06:25:06.824Z" }, - { url = "https://files.pythonhosted.org/packages/76/9a/d21236297111052dcb5dc85cd77dc7bf25ba67a0f55ae028b2af19a704bc/grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8", size = 5850211, upload-time = "2024-10-29T06:25:10.149Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fe/70b1da9037f5055be14f359026c238821b9bcf6ca38a8d760f59a589aacd/grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62", size = 6572129, upload-time = "2024-10-29T06:25:12.853Z" }, - { url = "https://files.pythonhosted.org/packages/74/0d/7df509a2cd2a54814598caf2fb759f3e0b93764431ff410f2175a6efb9e4/grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb", size = 6149819, upload-time = "2024-10-29T06:25:15.803Z" }, - { url = "https://files.pythonhosted.org/packages/0a/08/bc3b0155600898fd10f16b79054e1cca6cb644fa3c250c0fe59385df5e6f/grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121", size = 3596561, upload-time = "2024-10-29T06:25:19.348Z" }, - { url = "https://files.pythonhosted.org/packages/5a/96/44759eca966720d0f3e1b105c43f8ad4590c97bf8eb3cd489656e9590baa/grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba", size = 4346042, upload-time = "2024-10-29T06:25:21.939Z" }, -] - [[package]] name = "h11" version = "0.16.0" @@ -868,13 +851,12 @@ wheels = [ [[package]] name = "litellm" -version = "1.80.11" +version = "1.83.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "click" }, { name = "fastuuid" }, - { name = "grpcio" }, { name = "httpx" }, { name = "importlib-metadata" }, { name = "jinja2" }, @@ -885,21 +867,21 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/47/be6cd7b356418ca8bef3b843507940ce77b76ef2dfe515f2b4ba9b461ff0/litellm-1.80.11.tar.gz", hash = "sha256:c9fc63e7acb6360363238fe291bcff1488c59ff66020416d8376c0ee56414a19", size = 13189510, upload-time = "2025-12-22T12:47:29.181Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/92/6ce9737554994ca8e536e5f4f6a87cc7c4774b656c9eb9add071caf7d54b/litellm-1.83.0.tar.gz", hash = "sha256:860bebc76c4bb27b4cf90b4a77acd66dba25aced37e3db98750de8a1766bfb7a", size = 17333062, upload-time = "2026-03-31T05:08:25.331Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/0b/9e637344f24f3fe0e8039cd2337389fe05e0d31f518bc3e0a5cdbe45784a/litellm-1.80.11-py3-none-any.whl", hash = "sha256:406283d66ead77dc7ff0e0b2559c80e9e497d8e7c2257efb1cb9210a20d09d54", size = 11456346, upload-time = "2025-12-22T12:47:26.469Z" }, + { url = "https://files.pythonhosted.org/packages/19/2c/a670cc050fcd6f45c6199eb99e259c73aea92edba8d5c2fc1b3686d36217/litellm-1.83.0-py3-none-any.whl", hash = "sha256:88c536d339248f3987571493015784671ba3f193a328e1ea6780dbebaa2094a8", size = 15610306, upload-time = "2026-03-31T05:08:21.987Z" }, ] [[package]] name = "mako" -version = "1.3.10" +version = "1.3.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/8a/805404d0c0b9f3d7a326475ca008db57aea9c5c9f2e1e39ed0faa335571c/mako-1.3.11.tar.gz", hash = "sha256:071eb4ab4c5010443152255d77db7faa6ce5916f35226eb02dc34479b6858069", size = 399811, upload-time = "2026-04-14T20:19:51.493Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/68/a5/19d7aaa7e433713ffe881df33705925a196afb9532efc8475d26593921a6/mako-1.3.11-py3-none-any.whl", hash = "sha256:e372c6e333cf004aa736a15f425087ec977e1fcbd2966aae7f17c8dc1da27a77", size = 78503, upload-time = "2026-04-14T20:19:53.233Z" }, ] [[package]]