feat: comprehensive token usage tracking for V2
Add provider-specific usage details, fix streaming usage, and return usage from all high-level APIs (Chat.Send, Generate[T], Agent.Run). Breaking changes: - Chat.Send/SendMessage/SendWithImages now return (string, *Usage, error) - Generate[T]/GenerateWith[T] now return (T, *Usage, error) - Agent.Run/RunMessages now return (string, *Usage, error) New features: - Usage.Details map for provider-specific token breakdowns (reasoning, cached, audio, thoughts tokens) - OpenAI streaming now captures usage via StreamOptions.IncludeUsage - Google streaming now captures UsageMetadata from final chunk - UsageTracker.Details() for accumulated detail totals - ModelPricing and PricingRegistry for cost computation Closes #2 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -18,7 +18,7 @@
|
||||
// coder.AsTool("code", "Write and run code"),
|
||||
// )),
|
||||
// )
|
||||
// result, err := orchestrator.Run(ctx, "Build a fibonacci function in Go")
|
||||
// result, _, err := orchestrator.Run(ctx, "Build a fibonacci function in Go")
|
||||
package agent
|
||||
|
||||
import (
|
||||
@@ -64,13 +64,15 @@ func New(model *llm.Model, system string, opts ...Option) *Agent {
|
||||
|
||||
// Run executes the agent with a user prompt. Each call is a fresh conversation.
|
||||
// The agent loops tool calls automatically until it produces a text response.
|
||||
func (a *Agent) Run(ctx context.Context, prompt string) (string, error) {
|
||||
// Returns the text response, accumulated token usage, and any error.
|
||||
func (a *Agent) Run(ctx context.Context, prompt string) (string, *llm.Usage, error) {
|
||||
return a.RunMessages(ctx, []llm.Message{llm.UserMessage(prompt)})
|
||||
}
|
||||
|
||||
// RunMessages executes the agent with full message control.
|
||||
// Each call is a fresh conversation. The agent loops tool calls automatically.
|
||||
func (a *Agent) RunMessages(ctx context.Context, messages []llm.Message) (string, error) {
|
||||
// Returns the text response, accumulated token usage, and any error.
|
||||
func (a *Agent) RunMessages(ctx context.Context, messages []llm.Message) (string, *llm.Usage, error) {
|
||||
chat := llm.NewChat(a.model, a.reqOpts...)
|
||||
if a.system != "" {
|
||||
chat.SetSystem(a.system)
|
||||
@@ -107,7 +109,8 @@ type delegateParams struct {
|
||||
func (a *Agent) AsTool(name, description string) llm.Tool {
|
||||
return llm.Define[delegateParams](name, description,
|
||||
func(ctx context.Context, p delegateParams) (string, error) {
|
||||
return a.Run(ctx, p.Input)
|
||||
text, _, err := a.Run(ctx, p.Input)
|
||||
return text, err
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user