Force codex stream to use chat completions
This commit is contained in:
@@ -288,6 +288,30 @@ class CoreFlowTests(unittest.TestCase):
|
||||
self.assertEqual(endpoint.reasoning_effort, "high")
|
||||
self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY")
|
||||
|
||||
def test_codex_stream_forces_chat_completions_wire_api(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
codex_dir = tmp_path / ".codex"
|
||||
codex_dir.mkdir(parents=True)
|
||||
(codex_dir / "config.toml").write_text(
|
||||
'\n'.join(
|
||||
[
|
||||
'model_provider = "ipads"',
|
||||
"",
|
||||
"[model_providers.ipads]",
|
||||
'base_url = "http://codex.example/v1"',
|
||||
'wire_api = "responses"',
|
||||
]
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
with mock.patch.dict(os.environ, {"HOME": str(tmp_path)}, clear=True):
|
||||
endpoint = LLMEndpointSpec.from_dict(
|
||||
{"provider": "codex", "model": "gpt-5.4", "stream": True}
|
||||
)
|
||||
self.assertTrue(endpoint.stream)
|
||||
self.assertEqual(endpoint.wire_api, "chat.completions")
|
||||
|
||||
def test_endpoint_stream_flag(self) -> None:
|
||||
endpoint = LLMEndpointSpec.from_dict(
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user