Fix list flag serialization for engine launch
This commit is contained in:
@@ -9,12 +9,14 @@ from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from aituner.cli import main as cli_main
|
||||
from aituner.engine import build_launch_recipe
|
||||
from aituner.http_client import _auth_headers, _openai_url, _should_bypass_proxy
|
||||
from aituner.job import append_job, build_trial_job
|
||||
from aituner.llm import _extract_response_text, build_prompt, parse_proposal_text
|
||||
from aituner.search import ThresholdProbe, binary_search_max_feasible
|
||||
from aituner.slo import RequestOutcome, evaluate_request, summarize_evaluations
|
||||
from aituner.spec import (
|
||||
ConfigPatch,
|
||||
LLMEndpointSpec,
|
||||
Proposal,
|
||||
SpecError,
|
||||
@@ -544,6 +546,22 @@ class CoreFlowTests(unittest.TestCase):
|
||||
self.assertEqual(summary["slo_pass_rate"], 0.5)
|
||||
self.assertFalse(summary["feasible"])
|
||||
|
||||
def test_build_launch_recipe_serializes_list_flags_once(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
study = load_study_spec(_write_study_assets(Path(tmp)))
|
||||
recipe = build_launch_recipe(
|
||||
study.engine,
|
||||
ConfigPatch(
|
||||
flag_patch={
|
||||
"cuda-graph-sizes": [1, 2, 4],
|
||||
}
|
||||
),
|
||||
)
|
||||
self.assertIn("--cuda-graph-sizes", recipe.argv)
|
||||
flag_index = recipe.argv.index("--cuda-graph-sizes")
|
||||
self.assertEqual(recipe.argv[flag_index + 1 : flag_index + 4], ["1", "2", "4"])
|
||||
self.assertEqual(recipe.argv.count("--cuda-graph-sizes"), 1)
|
||||
|
||||
def test_prepare_trace_windows_materializes_repo_local_assets(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
tmp_path = Path(tmp)
|
||||
|
||||
Reference in New Issue
Block a user