From 7371d6635cd65207accefaac85d07af730ec4d16 Mon Sep 17 00:00:00 2001 From: Gahow Wang Date: Thu, 9 Apr 2026 14:49:40 +0800 Subject: [PATCH] Force codex stream to use chat completions --- src/aituner/spec.py | 5 ++++- tests/test_core_flow.py | 24 ++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/src/aituner/spec.py b/src/aituner/spec.py index bd4d675..281531e 100644 --- a/src/aituner/spec.py +++ b/src/aituner/spec.py @@ -415,6 +415,7 @@ class LLMEndpointSpec: base_url = str(data.get("base_url") or "").strip() wire_api = str(data.get("wire_api") or "").strip() stream = data.get("stream") + stream_value = _require_bool(stream, context="llm.endpoint.stream") if stream is not None else False reasoning_effort = str(data.get("reasoning_effort") or "").strip() api_key_env = str(data.get("api_key_env") or "").strip() if provider == "codex": @@ -428,6 +429,8 @@ class LLMEndpointSpec: wire_api = resolved_wire_api if not reasoning_effort and resolved_reasoning_effort: reasoning_effort = resolved_reasoning_effort + if stream_value: + wire_api = "chat.completions" if not api_key_env: api_key_env = "OPENAI_API_KEY" elif provider == "bailian": @@ -451,7 +454,7 @@ class LLMEndpointSpec: model=_require_str(data.get("model"), context="llm.endpoint.model"), provider=provider, wire_api=_require_str(wire_api, context="llm.endpoint.wire_api"), - stream=(_require_bool(stream, context="llm.endpoint.stream") if stream is not None else False), + stream=stream_value, reasoning_effort=reasoning_effort or None, api_key_env=_require_str(api_key_env, context="llm.endpoint.api_key_env"), timeout_s=_require_float( diff --git a/tests/test_core_flow.py b/tests/test_core_flow.py index 6218f2c..0f25178 100644 --- a/tests/test_core_flow.py +++ b/tests/test_core_flow.py @@ -288,6 +288,30 @@ class CoreFlowTests(unittest.TestCase): self.assertEqual(endpoint.reasoning_effort, "high") self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY") + def test_codex_stream_forces_chat_completions_wire_api(self) -> None: + with tempfile.TemporaryDirectory() as tmp: + tmp_path = Path(tmp) + codex_dir = tmp_path / ".codex" + codex_dir.mkdir(parents=True) + (codex_dir / "config.toml").write_text( + '\n'.join( + [ + 'model_provider = "ipads"', + "", + "[model_providers.ipads]", + 'base_url = "http://codex.example/v1"', + 'wire_api = "responses"', + ] + ), + encoding="utf-8", + ) + with mock.patch.dict(os.environ, {"HOME": str(tmp_path)}, clear=True): + endpoint = LLMEndpointSpec.from_dict( + {"provider": "codex", "model": "gpt-5.4", "stream": True} + ) + self.assertTrue(endpoint.stream) + self.assertEqual(endpoint.wire_api, "chat.completions") + def test_endpoint_stream_flag(self) -> None: endpoint = LLMEndpointSpec.from_dict( {