Files
go-llm/v2/agent/agent.go
Steve Dudenhoeffer 5b687839b2
All checks were successful
CI / Lint (pull_request) Successful in 10m18s
CI / Root Module (pull_request) Successful in 11m4s
CI / V2 Module (pull_request) Successful in 11m5s
feat: comprehensive token usage tracking for V2
Add provider-specific usage details, fix streaming usage, and return
usage from all high-level APIs (Chat.Send, Generate[T], Agent.Run).

Breaking changes:
- Chat.Send/SendMessage/SendWithImages now return (string, *Usage, error)
- Generate[T]/GenerateWith[T] now return (T, *Usage, error)
- Agent.Run/RunMessages now return (string, *Usage, error)

New features:
- Usage.Details map for provider-specific token breakdowns
  (reasoning, cached, audio, thoughts tokens)
- OpenAI streaming now captures usage via StreamOptions.IncludeUsage
- Google streaming now captures UsageMetadata from final chunk
- UsageTracker.Details() for accumulated detail totals
- ModelPricing and PricingRegistry for cost computation

Closes #2

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-02 04:33:18 +00:00

117 lines
3.9 KiB
Go

// Package agent provides a simple agent abstraction built on top of go-llm.
//
// An Agent wraps a model, system prompt, and tools into a reusable unit.
// Agents can be turned into tools via AsTool, enabling parent agents to
// delegate work to specialized sub-agents through the normal tool-call loop.
//
// Example — orchestrator with sub-agents:
//
// researcher := agent.New(model, "You research topics via web search.",
// agent.WithTools(llm.NewToolBox(tools.WebSearch(apiKey))),
// )
// coder := agent.New(model, "You write and run code.",
// agent.WithTools(llm.NewToolBox(tools.Exec())),
// )
// orchestrator := agent.New(model, "You coordinate research and coding tasks.",
// agent.WithTools(llm.NewToolBox(
// researcher.AsTool("research", "Research a topic"),
// coder.AsTool("code", "Write and run code"),
// )),
// )
// result, _, err := orchestrator.Run(ctx, "Build a fibonacci function in Go")
package agent
import (
"context"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// Agent is a configured LLM agent with a system prompt and tools.
// Each call to Run creates a fresh conversation (no state is carried between runs).
type Agent struct {
model *llm.Model
system string
tools *llm.ToolBox
reqOpts []llm.RequestOption
}
// Option configures an Agent.
type Option func(*Agent)
// WithTools sets the tools available to the agent.
func WithTools(tb *llm.ToolBox) Option {
return func(a *Agent) { a.tools = tb }
}
// WithRequestOptions sets default request options (temperature, max tokens, etc.)
// applied to every completion call the agent makes.
func WithRequestOptions(opts ...llm.RequestOption) Option {
return func(a *Agent) { a.reqOpts = opts }
}
// New creates an agent with the given model and system prompt.
func New(model *llm.Model, system string, opts ...Option) *Agent {
a := &Agent{
model: model,
system: system,
}
for _, opt := range opts {
opt(a)
}
return a
}
// Run executes the agent with a user prompt. Each call is a fresh conversation.
// The agent loops tool calls automatically until it produces a text response.
// Returns the text response, accumulated token usage, and any error.
func (a *Agent) Run(ctx context.Context, prompt string) (string, *llm.Usage, error) {
return a.RunMessages(ctx, []llm.Message{llm.UserMessage(prompt)})
}
// RunMessages executes the agent with full message control.
// Each call is a fresh conversation. The agent loops tool calls automatically.
// Returns the text response, accumulated token usage, and any error.
func (a *Agent) RunMessages(ctx context.Context, messages []llm.Message) (string, *llm.Usage, error) {
chat := llm.NewChat(a.model, a.reqOpts...)
if a.system != "" {
chat.SetSystem(a.system)
}
if a.tools != nil {
chat.SetTools(a.tools)
}
// Send each message; the last one triggers the completion loop.
// All but the last are added as context.
for i, msg := range messages {
if i < len(messages)-1 {
chat.AddToolResults(msg) // AddToolResults just appends to history
continue
}
return chat.SendMessage(ctx, msg)
}
// Empty messages — send an empty user message
return chat.Send(ctx, "")
}
// delegateParams is the parameter struct for the tool created by AsTool.
type delegateParams struct {
Input string `json:"input" description:"The task or question to delegate to this agent"`
}
// AsTool creates a llm.Tool that delegates to this agent.
// When a parent agent calls this tool, it runs the agent with the provided input
// as the prompt and returns the agent's text response as the tool result.
//
// This enables sub-agent patterns where a parent agent can spawn specialized
// child agents through the normal tool-call mechanism.
func (a *Agent) AsTool(name, description string) llm.Tool {
return llm.Define[delegateParams](name, description,
func(ctx context.Context, p delegateParams) (string, error) {
text, _, err := a.Run(ctx, p.Input)
return text, err
},
)
}