Files
ali-trace-tools/trace_analyzer/models.py
2026-04-21 15:44:47 +00:00

95 lines
2.1 KiB
Python

from dataclasses import dataclass, field
@dataclass
class MessageEvent:
role: str
content_type: str
text_len: int
has_cache_control: bool = False
item_count: int = 0
@dataclass
class ToolSpec:
name: str
tool_type: str
@dataclass
class UsageStats:
input_tokens: int = 0
output_tokens: int = 0
total_tokens: int = 0
reasoning_tokens: int = 0
cached_tokens: int = 0
@dataclass
class RequestMeta:
provider: str
line_number: int
request_id: str
session_id: str
request_model: str
time: str
status_code: str
status_name: str
request_ready_time_ms: int
request_end_time_ms: int
total_cost_time_ms: int
backend_first_request_time_ms: int = 0
backend_first_response_time_ms: int = 0
@dataclass
class TraceRecord:
meta: RequestMeta
canonical_prompt: str = ""
messages: list[MessageEvent] = field(default_factory=list)
role_sequence: list[str] = field(default_factory=list)
declared_tools: list[ToolSpec] = field(default_factory=list)
usage: UsageStats = field(default_factory=UsageStats)
raw_messages: list[dict] = field(default_factory=list)
@dataclass
class TraceFeatures:
request_id: str
session_id: str
model: str
status_code: str
time: str
message_count: int
conversation_depth: int
declared_tool_count: int
assistant_msg_count: int
tool_msg_count: int
user_msg_count: int
system_msg_count: int
assistant_to_tool_count: int
tool_to_assistant_count: int
tool_to_tool_count: int
assistant_to_user_count: int
user_to_assistant_count: int
max_consecutive_tool_msgs: int
avg_tool_burst_len: float
has_tool_loop: int
input_tokens: int
output_tokens: int
total_tokens: int
reasoning_tokens: int
cached_tokens: int
cache_hit_ratio: float
uncached_prompt_tokens: int
output_input_ratio: float
latency_ms: int
ms_per_input_token: float
ms_per_output_token: float
long_context: int
high_cache: int
tool_burst_alert: int
tool_loop_alert: int
slow_request: int = 0
pattern_labels: list[str] = field(default_factory=list)