Add SLO-driven topology frontier harness guard

This commit is contained in:
2026-05-12 21:00:49 +08:00
parent e1125475ae
commit 2d03b1cd4c
2 changed files with 340 additions and 0 deletions

View File

@@ -826,6 +826,189 @@ class CoreFlowTests(unittest.TestCase):
},
)
def test_harness_validates_unmeasured_tp_frontier_before_runtime_refinement(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
study_path = _write_study_assets(
tmp_path,
engine_overrides={
"tunable_flags": [
"tensor-parallel-size",
"max-num-batched-tokens",
"enable-chunked-prefill",
],
"topology_constraints": {
"allowed_tensor_parallel_sizes": [1, 2, 4],
"allowed_tp_dp_products": [1, 2, 4],
},
},
)
study = load_study_spec(study_path)
result_path = tmp_path / "trial-0002.json"
result_path.write_text(
json.dumps(
{
"status": "completed",
"best_sampling_u": 0.5,
"best_request_rate": 2.0,
"best_pass_rate": 0.96,
"probes": [
{
"threshold": 0.5,
"feasible": True,
"payload": {
"request_count": 100,
"pass_rate": 0.96,
"request_rate": 2.0,
"latency_summary": {"failed_reason_counts": {}},
},
},
{
"threshold": 0.75,
"feasible": False,
"payload": {
"request_count": 100,
"pass_rate": 0.6,
"request_rate": 3.0,
"early_stop_reason": "slo_pass_rate_unrecoverable",
"latency_summary": {
"failed_reason_counts": {"tpot_ms>25.0": 40}
},
},
},
],
}
),
encoding="utf-8",
)
state = StudyState(
study_id=study.study_id,
best_trial_id="trial-0002",
best_request_rate=2.0,
best_request_rate_per_gpu=1.0,
trials=[
TrialSummary(
trial_id="trial-0001",
status="completed",
best_request_rate=0.5,
best_request_rate_per_gpu=0.5,
config_patch={"env_patch": {}, "flag_patch": {}},
),
TrialSummary(
trial_id="trial-0002",
status="completed",
best_request_rate=2.0,
best_request_rate_per_gpu=1.0,
result_path=str(result_path),
config_patch={
"env_patch": {},
"flag_patch": {"tensor-parallel-size": 2},
},
),
],
)
context = build_harness_context(
study=study,
window_summary={"prompt_tokens_p95": 7628, "prompt_tail_ratio_p95_p50": 3.8},
state=state,
)
proposal = build_harness_guided_proposal(context)
self.assertIsNotNone(proposal)
self.assertEqual(proposal.config_patch.flag_patch, {"tensor-parallel-size": 4})
self.assertEqual(
context["harness_proposal"]["reason"],
"topology_frontier_probe_for_slo_pressure",
)
def test_harness_stop_blocked_until_slo_driven_topology_frontier_is_measured(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
study_path = _write_study_assets(
tmp_path,
engine_overrides={
"tunable_flags": ["tensor-parallel-size", "max-num-seqs"],
"topology_constraints": {
"allowed_tensor_parallel_sizes": [1, 2, 4],
"allowed_tp_dp_products": [1, 2, 4],
},
},
)
study = load_study_spec(study_path)
result_path = tmp_path / "trial-0002.json"
result_path.write_text(
json.dumps(
{
"status": "completed",
"best_sampling_u": 0.5,
"best_request_rate": 2.0,
"best_pass_rate": 0.96,
"probes": [
{
"threshold": 0.75,
"feasible": False,
"payload": {
"request_count": 100,
"pass_rate": 0.6,
"request_rate": 3.0,
"early_stop_reason": "slo_pass_rate_unrecoverable",
"latency_summary": {
"failed_reason_counts": {"tpot_ms>25.0": 40}
},
},
}
],
}
),
encoding="utf-8",
)
state = StudyState(
study_id=study.study_id,
best_trial_id="trial-0002",
best_request_rate=2.0,
best_request_rate_per_gpu=1.0,
trials=[
TrialSummary(
trial_id="trial-0001",
status="completed",
best_request_rate=0.5,
best_request_rate_per_gpu=0.5,
config_patch={"env_patch": {}, "flag_patch": {}},
),
TrialSummary(
trial_id="trial-0002",
status="completed",
best_request_rate=2.0,
best_request_rate_per_gpu=1.0,
result_path=str(result_path),
config_patch={
"env_patch": {},
"flag_patch": {"tensor-parallel-size": 2},
},
),
TrialSummary(
trial_id="trial-0003",
status="completed",
best_request_rate=1.98,
best_request_rate_per_gpu=0.99,
config_patch={"env_patch": {}, "flag_patch": {"max-num-seqs": 8}},
),
TrialSummary(
trial_id="trial-0004",
status="completed",
best_request_rate=1.98,
best_request_rate_per_gpu=0.99,
config_patch={"env_patch": {}, "flag_patch": {"max-num-seqs": 16}},
),
],
)
context = build_harness_context(
study=study,
window_summary={"prompt_tokens_p95": 7628, "prompt_tail_ratio_p95_p50": 3.8},
state=state,
)
self.assertFalse(context["harness_stop"]["should_stop"])
self.assertEqual(context["harness_stop"]["reason"], "topology_frontier_requires_probe")
def test_trace_input_length_filter_keeps_only_matching_rows(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)