Add agent sub-package for composable LLM agents
Introduces v2/agent with a minimal API: Agent, New(), Run(), and AsTool(). Agents wrap a model + system prompt + tools. AsTool() turns an agent into a llm.Tool, enabling parent agents to delegate to sub-agents through the normal tool-call loop — no channels, pools, or orchestration needed. Also exports NewClient(provider.Provider) for custom provider integration. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
113
v2/agent/agent.go
Normal file
113
v2/agent/agent.go
Normal file
@@ -0,0 +1,113 @@
|
||||
// Package agent provides a simple agent abstraction built on top of go-llm.
|
||||
//
|
||||
// An Agent wraps a model, system prompt, and tools into a reusable unit.
|
||||
// Agents can be turned into tools via AsTool, enabling parent agents to
|
||||
// delegate work to specialized sub-agents through the normal tool-call loop.
|
||||
//
|
||||
// Example — orchestrator with sub-agents:
|
||||
//
|
||||
// researcher := agent.New(model, "You research topics via web search.",
|
||||
// agent.WithTools(llm.NewToolBox(tools.WebSearch(apiKey))),
|
||||
// )
|
||||
// coder := agent.New(model, "You write and run code.",
|
||||
// agent.WithTools(llm.NewToolBox(tools.Exec())),
|
||||
// )
|
||||
// orchestrator := agent.New(model, "You coordinate research and coding tasks.",
|
||||
// agent.WithTools(llm.NewToolBox(
|
||||
// researcher.AsTool("research", "Research a topic"),
|
||||
// coder.AsTool("code", "Write and run code"),
|
||||
// )),
|
||||
// )
|
||||
// result, err := orchestrator.Run(ctx, "Build a fibonacci function in Go")
|
||||
package agent
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
|
||||
)
|
||||
|
||||
// Agent is a configured LLM agent with a system prompt and tools.
|
||||
// Each call to Run creates a fresh conversation (no state is carried between runs).
|
||||
type Agent struct {
|
||||
model *llm.Model
|
||||
system string
|
||||
tools *llm.ToolBox
|
||||
reqOpts []llm.RequestOption
|
||||
}
|
||||
|
||||
// Option configures an Agent.
|
||||
type Option func(*Agent)
|
||||
|
||||
// WithTools sets the tools available to the agent.
|
||||
func WithTools(tb *llm.ToolBox) Option {
|
||||
return func(a *Agent) { a.tools = tb }
|
||||
}
|
||||
|
||||
// WithRequestOptions sets default request options (temperature, max tokens, etc.)
|
||||
// applied to every completion call the agent makes.
|
||||
func WithRequestOptions(opts ...llm.RequestOption) Option {
|
||||
return func(a *Agent) { a.reqOpts = opts }
|
||||
}
|
||||
|
||||
// New creates an agent with the given model and system prompt.
|
||||
func New(model *llm.Model, system string, opts ...Option) *Agent {
|
||||
a := &Agent{
|
||||
model: model,
|
||||
system: system,
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt(a)
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
// Run executes the agent with a user prompt. Each call is a fresh conversation.
|
||||
// The agent loops tool calls automatically until it produces a text response.
|
||||
func (a *Agent) Run(ctx context.Context, prompt string) (string, error) {
|
||||
return a.RunMessages(ctx, []llm.Message{llm.UserMessage(prompt)})
|
||||
}
|
||||
|
||||
// RunMessages executes the agent with full message control.
|
||||
// Each call is a fresh conversation. The agent loops tool calls automatically.
|
||||
func (a *Agent) RunMessages(ctx context.Context, messages []llm.Message) (string, error) {
|
||||
chat := llm.NewChat(a.model, a.reqOpts...)
|
||||
if a.system != "" {
|
||||
chat.SetSystem(a.system)
|
||||
}
|
||||
if a.tools != nil {
|
||||
chat.SetTools(a.tools)
|
||||
}
|
||||
|
||||
// Send each message; the last one triggers the completion loop.
|
||||
// All but the last are added as context.
|
||||
for i, msg := range messages {
|
||||
if i < len(messages)-1 {
|
||||
chat.AddToolResults(msg) // AddToolResults just appends to history
|
||||
continue
|
||||
}
|
||||
return chat.SendMessage(ctx, msg)
|
||||
}
|
||||
|
||||
// Empty messages — send an empty user message
|
||||
return chat.Send(ctx, "")
|
||||
}
|
||||
|
||||
// delegateParams is the parameter struct for the tool created by AsTool.
|
||||
type delegateParams struct {
|
||||
Input string `json:"input" description:"The task or question to delegate to this agent"`
|
||||
}
|
||||
|
||||
// AsTool creates a llm.Tool that delegates to this agent.
|
||||
// When a parent agent calls this tool, it runs the agent with the provided input
|
||||
// as the prompt and returns the agent's text response as the tool result.
|
||||
//
|
||||
// This enables sub-agent patterns where a parent agent can spawn specialized
|
||||
// child agents through the normal tool-call mechanism.
|
||||
func (a *Agent) AsTool(name, description string) llm.Tool {
|
||||
return llm.Define[delegateParams](name, description,
|
||||
func(ctx context.Context, p delegateParams) (string, error) {
|
||||
return a.Run(ctx, p.Input)
|
||||
},
|
||||
)
|
||||
}
|
||||
Reference in New Issue
Block a user