feat: comprehensive token usage tracking for V2
Add provider-specific usage details, fix streaming usage, and return usage from all high-level APIs (Chat.Send, Generate[T], Agent.Run). Breaking changes: - Chat.Send/SendMessage/SendWithImages now return (string, *Usage, error) - Generate[T]/GenerateWith[T] now return (T, *Usage, error) - Agent.Run/RunMessages now return (string, *Usage, error) New features: - Usage.Details map for provider-specific token breakdowns (reasoning, cached, audio, thoughts tokens) - OpenAI streaming now captures usage via StreamOptions.IncludeUsage - Google streaming now captures UsageMetadata from final chunk - UsageTracker.Details() for accumulated detail totals - ModelPricing and PricingRegistry for cost computation Closes #2 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -280,3 +280,80 @@ func TestWithLogging_Error(t *testing.T) {
|
||||
t.Errorf("expected provider error, got %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUsageTracker_Details(t *testing.T) {
|
||||
tracker := &UsageTracker{}
|
||||
|
||||
tracker.Add(&Usage{
|
||||
InputTokens: 100,
|
||||
OutputTokens: 50,
|
||||
TotalTokens: 150,
|
||||
Details: map[string]int{
|
||||
"cached_input_tokens": 20,
|
||||
"reasoning_tokens": 10,
|
||||
},
|
||||
})
|
||||
|
||||
tracker.Add(&Usage{
|
||||
InputTokens: 80,
|
||||
OutputTokens: 40,
|
||||
TotalTokens: 120,
|
||||
Details: map[string]int{
|
||||
"cached_input_tokens": 15,
|
||||
},
|
||||
})
|
||||
|
||||
details := tracker.Details()
|
||||
if details == nil {
|
||||
t.Fatal("expected details, got nil")
|
||||
}
|
||||
if details["cached_input_tokens"] != 35 {
|
||||
t.Errorf("expected cached_input_tokens=35, got %d", details["cached_input_tokens"])
|
||||
}
|
||||
if details["reasoning_tokens"] != 10 {
|
||||
t.Errorf("expected reasoning_tokens=10, got %d", details["reasoning_tokens"])
|
||||
}
|
||||
|
||||
// Verify returned map is a copy
|
||||
details["cached_input_tokens"] = 999
|
||||
fresh := tracker.Details()
|
||||
if fresh["cached_input_tokens"] != 35 {
|
||||
t.Error("Details() did not return a copy")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUsageTracker_Details_Nil(t *testing.T) {
|
||||
tracker := &UsageTracker{}
|
||||
tracker.Add(&Usage{InputTokens: 10, OutputTokens: 5, TotalTokens: 15})
|
||||
|
||||
details := tracker.Details()
|
||||
if details != nil {
|
||||
t.Errorf("expected nil details for usage without details, got %v", details)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWithUsageTracking_WithDetails(t *testing.T) {
|
||||
mp := newMockProvider(provider.Response{
|
||||
Text: "ok",
|
||||
Usage: &provider.Usage{
|
||||
InputTokens: 100,
|
||||
OutputTokens: 50,
|
||||
TotalTokens: 150,
|
||||
Details: map[string]int{
|
||||
"cached_input_tokens": 30,
|
||||
},
|
||||
},
|
||||
})
|
||||
tracker := &UsageTracker{}
|
||||
model := newMockModel(mp).WithMiddleware(WithUsageTracking(tracker))
|
||||
|
||||
_, err := model.Complete(context.Background(), []Message{UserMessage("test")})
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
details := tracker.Details()
|
||||
if details["cached_input_tokens"] != 30 {
|
||||
t.Errorf("expected cached_input_tokens=30, got %d", details["cached_input_tokens"])
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user