Record failed trial context

This commit is contained in:
2026-04-04 23:35:07 +08:00
parent 8b024c72f1
commit 7632de8dad
5 changed files with 141 additions and 11 deletions

View File

@@ -13,10 +13,10 @@ from aituner.job import append_job, build_trial_job
from aituner.llm import build_prompt, parse_proposal_text
from aituner.search import ThresholdProbe, binary_search_max_feasible
from aituner.slo import RequestOutcome, summarize_evaluations
from aituner.spec import Proposal, load_study_spec
from aituner.spec import Proposal, StudyState, TrialSummary, load_study_spec
from aituner.store import StudyStore
from aituner.trace import load_trace_requests, summarize_window
from aituner.worker import _replay_requests
from aituner.worker import _replay_requests, _wait_for_server_or_exit
from aituner.trace import TraceRequest
@@ -159,6 +159,36 @@ class CoreFlowTests(unittest.TestCase):
self.assertIn("queueing_knee_by_bucket", prompt)
self.assertTrue(study_root.exists())
def test_prompt_includes_failed_trial_context(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
study_path = _write_study_assets(tmp_path)
study = load_study_spec(study_path)
window, requests = load_trace_requests(study, study_spec_path=study_path)
prompt = build_prompt(
study=study,
window_summary=summarize_window(requests, window),
state=StudyState(
study_id=study.study_id,
trials=[
TrialSummary(
trial_id="trial-0001",
status="failed",
diagnosis="flashinfer looked promising",
config_patch={
"env_patch": {"VLLM_ATTENTION_BACKEND": "FLASHINFER"},
"flag_patch": {"tensor-parallel-size": 4},
},
failure_reason="engine_process_exited_before_ready exit_code=1",
)
],
),
capability_profile=None,
)
self.assertIn('"status": "failed"', prompt)
self.assertIn('"failure_reason": "engine_process_exited_before_ready exit_code=1"', prompt)
self.assertIn('"VLLM_ATTENTION_BACKEND": "FLASHINFER"', prompt)
def test_length_only_trace_rows_are_synthesized(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
@@ -594,6 +624,42 @@ class CoreFlowTests(unittest.TestCase):
self.assertEqual(next_state.best_trial_id, trial.trial_id)
self.assertEqual(next_state.best_request_rate, 12.5)
def test_ingest_trial_results_records_failure_reason(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
study_path = _write_study_assets(tmp_path)
study = load_study_spec(study_path)
store = StudyStore(tmp_path / ".aituner" / "studies")
store.init_study(spec_path=study_path, study=study)
state = store.load_state(study.study_id)
proposal = Proposal.from_dict(
{
"observation": "Obs",
"diagnosis": "Diag",
"config_patch": {"env_patch": {}, "flag_patch": {"tensor-parallel-size": 4}},
"expected_effects": ["raise rate"]
}
)
trial, _ = store.materialize_trial(study=study, state=state, proposal=proposal)
Path(trial.result_path).write_text(
json.dumps(
{
"study_id": study.study_id,
"trial_id": trial.trial_id,
"status": "failed",
"failure_reason": "engine_process_exited_before_ready exit_code=1",
"probes": []
}
),
encoding="utf-8",
)
next_state = store.ingest_trial_results(study.study_id)
self.assertEqual(next_state.trials[0].status, "failed")
self.assertEqual(
next_state.trials[0].failure_reason,
"engine_process_exited_before_ready exit_code=1",
)
def test_cli_tune_runs_multiple_manual_proposals(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
@@ -746,6 +812,17 @@ class CoreFlowTests(unittest.TestCase):
self.assertEqual(len(replayed), 3)
self.assertEqual(replayed[1].error, "slo_pass_rate_unrecoverable")
def test_wait_for_server_or_exit_fails_fast_when_process_exits(self) -> None:
process = mock.Mock()
process.poll.return_value = 17
with self.assertRaisesRegex(RuntimeError, "engine_process_exited_before_ready exit_code=17"):
_wait_for_server_or_exit(
process,
base_url="http://127.0.0.1:8000",
healthcheck_path="/v1/models",
ready_timeout_s=10.0,
)
def test_openai_url_avoids_double_v1(self) -> None:
self.assertEqual(
_openai_url("http://example.com", "/v1/chat/completions"),