Add codex and bailian LLM provider presets

This commit is contained in:
2026-04-07 11:31:26 +08:00
parent f73a8a5767
commit 94c89e1103
8 changed files with 236 additions and 11 deletions

View File

@@ -90,7 +90,8 @@
"system_prompt": "Propose a single engine config patch that increases the maximum feasible sampling_u under the SLO target.",
"max_history_trials": 8,
"endpoint": {
"base_url": "https://example-openai-compatible-endpoint",
"provider": "custom",
"base_url": "https://example-openai-compatible-endpoint/v1",
"model": "gpt-4.1-mini",
"api_key_env": "OPENAI_API_KEY",
"timeout_s": 120