Handle missing streamed token metrics
This commit is contained in:
@@ -11,7 +11,13 @@ from unittest import mock
|
||||
from aituner.cli import main as cli_main
|
||||
from aituner.compare import _aggregate_summary, load_compare_spec, run_compare
|
||||
from aituner.engine import build_launch_recipe
|
||||
from aituner.http_client import StreamMetrics, _auth_headers, _openai_url, _should_bypass_proxy
|
||||
from aituner.http_client import (
|
||||
StreamMetrics,
|
||||
_auth_headers,
|
||||
_openai_url,
|
||||
_should_bypass_proxy,
|
||||
stream_chat_completion,
|
||||
)
|
||||
from aituner.job import append_job, build_trial_job
|
||||
from aituner.harness import (
|
||||
build_harness_context,
|
||||
@@ -3767,6 +3773,29 @@ class CoreFlowTests(unittest.TestCase):
|
||||
"http://example.com/v1/chat/completions",
|
||||
)
|
||||
|
||||
def test_stream_chat_completion_handles_missing_usage_and_chunks(self) -> None:
|
||||
class FakeResponse:
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, traceback):
|
||||
return False
|
||||
|
||||
def __iter__(self):
|
||||
return iter([b"data: {\"choices\": []}\n", b"data: [DONE]\n"])
|
||||
|
||||
with mock.patch("aituner.http_client._urlopen", return_value=FakeResponse()):
|
||||
metrics = stream_chat_completion(
|
||||
base_url="http://127.0.0.1:8000",
|
||||
body={"model": "m", "messages": [{"role": "user", "content": "x"}]},
|
||||
timeout_s=1.0,
|
||||
)
|
||||
|
||||
self.assertIsNone(metrics.ttft_ms)
|
||||
self.assertIsNone(metrics.tpot_ms)
|
||||
self.assertIsNone(metrics.completion_tokens)
|
||||
self.assertEqual(metrics.completion_tokens_source, "none")
|
||||
|
||||
def test_loopback_urls_bypass_proxy(self) -> None:
|
||||
self.assertTrue(_should_bypass_proxy("http://127.0.0.1:8000/v1/models"))
|
||||
self.assertTrue(_should_bypass_proxy("http://localhost:8000/health"))
|
||||
|
||||
Reference in New Issue
Block a user