Add streaming LLM proposal support

This commit is contained in:
2026-04-09 01:06:45 +08:00
parent 46151512cd
commit 96140b79bb
4 changed files with 90 additions and 1 deletions

View File

@@ -253,9 +253,23 @@ class CoreFlowTests(unittest.TestCase):
self.assertEqual(endpoint.provider, "codex")
self.assertEqual(endpoint.base_url, "http://codex.example/v1")
self.assertEqual(endpoint.wire_api, "responses")
self.assertFalse(endpoint.stream)
self.assertEqual(endpoint.reasoning_effort, "high")
self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY")
def test_endpoint_stream_flag(self) -> None:
endpoint = LLMEndpointSpec.from_dict(
{
"provider": "custom",
"base_url": "http://example/v1",
"wire_api": "chat.completions",
"stream": True,
"model": "x",
"api_key_env": "OPENAI_API_KEY",
}
)
self.assertTrue(endpoint.stream)
def test_extract_response_text_supports_responses_api_output(self) -> None:
text = _extract_response_text(
{