LLM call metrics

interface LLMMetrics {
    traceId: string;
    spanId: string;
    provider: string;
    model: string;
    inputTokens: number;
    outputTokens: number;
    totalTokens: number;
    latencyMs: number;
    cost: number;
    success: boolean;
    error?: string;
    temperature?: number;
    maxTokens?: number;
    topP?: number;
    streaming?: boolean;
    timestamp: Date;
}

Properties

traceId: string
spanId: string
provider: string
model: string
inputTokens: number
outputTokens: number
totalTokens: number
latencyMs: number
cost: number
success: boolean
error?: string
temperature?: number
maxTokens?: number
topP?: number
streaming?: boolean
timestamp: Date