Force codex stream to use chat completions

This commit is contained in:
2026-04-09 14:49:40 +08:00
parent 581ef7ccea
commit 7371d6635c
2 changed files with 28 additions and 1 deletions

View File

@@ -288,6 +288,30 @@ class CoreFlowTests(unittest.TestCase):
self.assertEqual(endpoint.reasoning_effort, "high")
self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY")
def test_codex_stream_forces_chat_completions_wire_api(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
codex_dir = tmp_path / ".codex"
codex_dir.mkdir(parents=True)
(codex_dir / "config.toml").write_text(
'\n'.join(
[
'model_provider = "ipads"',
"",
"[model_providers.ipads]",
'base_url = "http://codex.example/v1"',
'wire_api = "responses"',
]
),
encoding="utf-8",
)
with mock.patch.dict(os.environ, {"HOME": str(tmp_path)}, clear=True):
endpoint = LLMEndpointSpec.from_dict(
{"provider": "codex", "model": "gpt-5.4", "stream": True}
)
self.assertTrue(endpoint.stream)
self.assertEqual(endpoint.wire_api, "chat.completions")
def test_endpoint_stream_flag(self) -> None:
endpoint = LLMEndpointSpec.from_dict(
{