Add harness guided first topology probe
This commit is contained in:
@@ -13,7 +13,11 @@ from aituner.compare import load_compare_spec, run_compare
|
||||
from aituner.engine import build_launch_recipe
|
||||
from aituner.http_client import _auth_headers, _openai_url, _should_bypass_proxy
|
||||
from aituner.job import append_job, build_trial_job
|
||||
from aituner.harness import build_harness_context, build_harness_stop_proposal
|
||||
from aituner.harness import (
|
||||
build_harness_context,
|
||||
build_harness_guided_proposal,
|
||||
build_harness_stop_proposal,
|
||||
)
|
||||
from aituner.llm import _extract_response_text, build_prompt, parse_proposal_text, validate_proposal
|
||||
from aituner.search import ThresholdProbe, binary_search_max_feasible
|
||||
from aituner.slo import RequestOutcome, evaluate_request, summarize_evaluations
|
||||
@@ -596,6 +600,75 @@ class CoreFlowTests(unittest.TestCase):
|
||||
self.assertIsNotNone(proposal)
|
||||
self.assertTrue(proposal.should_stop)
|
||||
|
||||
def test_harness_guided_first_tp_probe_for_latency_bottleneck(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
study_path = _write_study_assets(
|
||||
tmp_path,
|
||||
engine_overrides={
|
||||
"tunable_flags": ["tensor-parallel-size", "data-parallel-size"],
|
||||
"topology_constraints": {
|
||||
"allowed_tensor_parallel_sizes": [1, 2, 4],
|
||||
"allowed_data_parallel_sizes": [1, 2],
|
||||
"allowed_tp_dp_products": [1, 2, 4],
|
||||
},
|
||||
},
|
||||
)
|
||||
study = load_study_spec(study_path)
|
||||
result_path = tmp_path / "trial-0001.json"
|
||||
result_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"status": "completed",
|
||||
"best_sampling_u": 0.25,
|
||||
"best_request_rate": 2.0,
|
||||
"best_pass_rate": 1.0,
|
||||
"probes": [
|
||||
{
|
||||
"threshold": 0.5,
|
||||
"feasible": False,
|
||||
"payload": {
|
||||
"request_count": 100,
|
||||
"pass_rate": 0.6,
|
||||
"request_rate": 4.0,
|
||||
"early_stopped": True,
|
||||
"early_stop_reason": "slo_pass_rate_unrecoverable",
|
||||
"latency_summary": {
|
||||
"failed_reason_counts": {"tpot_ms>50.0": 40},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
state = StudyState(
|
||||
study_id=study.study_id,
|
||||
best_trial_id="trial-0001",
|
||||
best_request_rate=2.0,
|
||||
best_request_rate_per_gpu=2.0,
|
||||
trials=[
|
||||
TrialSummary(
|
||||
trial_id="trial-0001",
|
||||
status="completed",
|
||||
best_request_rate=2.0,
|
||||
best_request_rate_per_gpu=2.0,
|
||||
result_path=str(result_path),
|
||||
config_patch={"env_patch": {}, "flag_patch": {}},
|
||||
)
|
||||
],
|
||||
)
|
||||
context = build_harness_context(
|
||||
study=study,
|
||||
window_summary={"prompt_tokens_p95": 2048},
|
||||
state=state,
|
||||
)
|
||||
proposal = build_harness_guided_proposal(context)
|
||||
self.assertIsNotNone(proposal)
|
||||
self.assertEqual(proposal.config_patch.flag_patch, {"tensor-parallel-size": 2})
|
||||
self.assertFalse(proposal.should_stop)
|
||||
|
||||
def test_trace_input_length_filter_keeps_only_matching_rows(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
|
||||
Reference in New Issue
Block a user