Constrain harness topology by visible GPUs

This commit is contained in:
2026-05-13 01:25:31 +08:00
parent fb6d74a18c
commit 5c2958e6c1
3 changed files with 143 additions and 7 deletions

View File

@@ -1175,6 +1175,100 @@ class CoreFlowTests(unittest.TestCase):
self.assertIsNotNone(proposal)
self.assertEqual(proposal.config_patch.flag_patch, {"tensor-parallel-size": 2})
def test_harness_excludes_topology_above_visible_gpu_count(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)
study_path = _write_study_assets(
tmp_path,
engine_overrides={
"base_envs": {"CUDA_VISIBLE_DEVICES": "0,1,2,4,5,6,7"},
"tunable_flags": ["tensor-parallel-size"],
"topology_constraints": {
"allowed_tensor_parallel_sizes": [1, 2, 4, 8],
"allowed_tp_dp_products": [1, 2, 4, 8],
},
},
)
result_path = tmp_path / "trial-0003.json"
result_path.write_text(
json.dumps(
{
"status": "completed",
"best_request_rate": 1.078,
"best_pass_rate": 0.958,
"probes": [
{
"threshold": 0.039,
"feasible": False,
"payload": {
"request_count": 100,
"pass_rate": 0.8,
"request_rate": 1.10,
"early_stop_reason": "slo_pass_rate_unrecoverable",
"latency_summary": {
"failed_reason_counts": {"tpot_ms>25.0": 20}
},
},
}
],
}
),
encoding="utf-8",
)
study = load_study_spec(study_path)
context = build_harness_context(
study=study,
window_summary={"prompt_tokens_p95": 7628, "prompt_tail_ratio_p95_p50": 3.8},
state=StudyState(
study_id=study.study_id,
best_trial_id="trial-0003",
best_request_rate=1.078,
best_request_rate_per_gpu=0.2695,
trials=[
TrialSummary(
trial_id="trial-0001",
status="completed",
best_request_rate=0.065,
best_request_rate_per_gpu=0.065,
config_patch={"env_patch": {}, "flag_patch": {}},
),
TrialSummary(
trial_id="trial-0002",
status="completed",
best_request_rate=0.398,
best_request_rate_per_gpu=0.199,
config_patch={
"env_patch": {},
"flag_patch": {"tensor-parallel-size": 2},
},
),
TrialSummary(
trial_id="trial-0003",
status="completed",
best_request_rate=1.078,
best_request_rate_per_gpu=0.2695,
result_path=str(result_path),
config_patch={
"env_patch": {},
"flag_patch": {"tensor-parallel-size": 4},
},
),
],
),
)
candidates = context["candidate_actions"]
self.assertFalse(
any(
action["config_patch"]["flag_patch"].get("tensor-parallel-size") == 8
for action in candidates
)
)
proposal = build_harness_guided_proposal(context)
self.assertTrue(
proposal is None
or proposal.config_patch.flag_patch.get("tensor-parallel-size") != 8
)
def test_harness_stop_blocked_until_slo_driven_topology_frontier_is_measured(self) -> None:
with tempfile.TemporaryDirectory() as tmp:
tmp_path = Path(tmp)