Support codex responses API

This commit is contained in:
2026-04-09 00:55:05 +08:00
parent 79ba8a50c8
commit 0990a3771e
4 changed files with 93 additions and 29 deletions

View File

@@ -11,7 +11,7 @@ from unittest import mock
from aituner.cli import main as cli_main
from aituner.http_client import _auth_headers, _openai_url, _should_bypass_proxy
from aituner.job import append_job, build_trial_job
from aituner.llm import build_prompt, parse_proposal_text
from aituner.llm import _extract_response_text, build_prompt, parse_proposal_text
from aituner.search import ThresholdProbe, binary_search_max_feasible
from aituner.slo import RequestOutcome, evaluate_request, summarize_evaluations
from aituner.spec import (
@@ -242,6 +242,7 @@ class CoreFlowTests(unittest.TestCase):
"",
"[model_providers.ipads]",
'base_url = "http://codex.example/v1"',
'wire_api = "responses"',
]
),
encoding="utf-8",
@@ -250,8 +251,24 @@ class CoreFlowTests(unittest.TestCase):
endpoint = LLMEndpointSpec.from_dict({"provider": "codex", "model": "gpt-5.4"})
self.assertEqual(endpoint.provider, "codex")
self.assertEqual(endpoint.base_url, "http://codex.example/v1")
self.assertEqual(endpoint.wire_api, "responses")
self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY")
def test_extract_response_text_supports_responses_api_output(self) -> None:
text = _extract_response_text(
{
"output": [
{
"type": "message",
"content": [
{"type": "output_text", "text": '{"diagnosis":"ok"}'}
],
}
]
}
)
self.assertEqual(text, '{"diagnosis":"ok"}')
def test_auth_headers_load_bailian_key_from_dotenv(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)