Add codex and bailian LLM provider presets

This commit is contained in:
2026-04-07 11:31:26 +08:00
parent f73a8a5767
commit 94c89e1103
8 changed files with 236 additions and 11 deletions

View File

@@ -1,6 +1,7 @@
from __future__ import annotations
import json
import os
import subprocess
import tempfile
import unittest
@@ -8,12 +9,19 @@ from pathlib import Path
from unittest import mock
from aituner.cli import main as cli_main
from aituner.http_client import _openai_url, _should_bypass_proxy
from aituner.http_client import _auth_headers, _openai_url, _should_bypass_proxy
from aituner.job import append_job, build_trial_job
from aituner.llm import build_prompt, parse_proposal_text
from aituner.search import ThresholdProbe, binary_search_max_feasible
from aituner.slo import RequestOutcome, evaluate_request, summarize_evaluations
from aituner.spec import Proposal, SpecError, StudyState, TrialSummary, load_study_spec
from aituner.spec import (
LLMEndpointSpec,
Proposal,
SpecError,
StudyState,
TrialSummary,
load_study_spec,
)
from aituner.store import StudyStore
from aituner.trace import load_trace_requests, summarize_window
from aituner.worker import (
@@ -214,6 +222,71 @@ class CoreFlowTests(unittest.TestCase):
with self.assertRaisesRegex(SpecError, "min_input_tokens must be <="):
load_study_spec(study_path)
def test_bailian_endpoint_defaults(self) -> None:
endpoint = LLMEndpointSpec.from_dict({"provider": "bailian", "model": "qwen-plus"})
self.assertEqual(endpoint.provider, "bailian")
self.assertEqual(
endpoint.base_url, "https://dashscope.aliyuncs.com/compatible-mode/v1"
)
self.assertEqual(endpoint.api_key_env, "DASHSCOPE_API_KEY")
def test_codex_endpoint_resolves_base_url_from_codex_config(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
codex_dir = tmp_path / ".codex"
codex_dir.mkdir(parents=True)
(codex_dir / "config.toml").write_text(
'\n'.join(
[
'model_provider = "ipads"',
"",
"[model_providers.ipads]",
'base_url = "http://codex.example/v1"',
]
),
encoding="utf-8",
)
with mock.patch.dict(os.environ, {"HOME": str(tmp_path)}, clear=True):
endpoint = LLMEndpointSpec.from_dict({"provider": "codex", "model": "gpt-5.4"})
self.assertEqual(endpoint.provider, "codex")
self.assertEqual(endpoint.base_url, "http://codex.example/v1")
self.assertEqual(endpoint.api_key_env, "OPENAI_API_KEY")
def test_auth_headers_load_bailian_key_from_dotenv(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
(tmp_path / ".env").write_text('DASHSCOPE_API_KEY="dash-key"\n', encoding="utf-8")
with mock.patch.dict(os.environ, {}, clear=True):
with mock.patch("pathlib.Path.cwd", return_value=tmp_path):
headers = _auth_headers("DASHSCOPE_API_KEY", "bailian")
self.assertEqual(headers["Authorization"], "Bearer dash-key")
def test_auth_headers_load_codex_auth_and_proxy(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
codex_dir = tmp_path / ".codex"
codex_dir.mkdir(parents=True)
(codex_dir / "config.toml").write_text(
'\n'.join(
[
"[network]",
'http_proxy = "http://proxy.example:3128"',
'https_proxy = "http://proxy.example:3128"',
]
),
encoding="utf-8",
)
(codex_dir / "auth.json").write_text(
json.dumps({"OPENAI_API_KEY": "sk-codex-test"}),
encoding="utf-8",
)
with mock.patch.dict(os.environ, {"HOME": str(tmp_path)}, clear=True):
with mock.patch("pathlib.Path.cwd", return_value=tmp_path):
headers = _auth_headers("OPENAI_API_KEY", "codex")
self.assertEqual(os.environ["http_proxy"], "http://proxy.example:3128")
self.assertEqual(os.environ["HTTP_PROXY"], "http://proxy.example:3128")
self.assertEqual(headers["Authorization"], "Bearer sk-codex-test")
def test_prompt_includes_failed_trial_context(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)