Add agent sub-package for composable LLM agents
All checks were successful
CI / Lint (push) Successful in 9m46s
CI / V2 Module (push) Successful in 12m5s
CI / Root Module (push) Successful in 12m6s

Introduces v2/agent with a minimal API: Agent, New(), Run(), and AsTool().
Agents wrap a model + system prompt + tools. AsTool() turns an agent into
a llm.Tool, enabling parent agents to delegate to sub-agents through the
normal tool-call loop — no channels, pools, or orchestration needed.

Also exports NewClient(provider.Provider) for custom provider integration.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-07 23:17:19 -05:00
parent be572a76f4
commit 87ec56a2be
6 changed files with 472 additions and 7 deletions

113
v2/agent/agent.go Normal file
View File

@@ -0,0 +1,113 @@
// Package agent provides a simple agent abstraction built on top of go-llm.
//
// An Agent wraps a model, system prompt, and tools into a reusable unit.
// Agents can be turned into tools via AsTool, enabling parent agents to
// delegate work to specialized sub-agents through the normal tool-call loop.
//
// Example — orchestrator with sub-agents:
//
// researcher := agent.New(model, "You research topics via web search.",
// agent.WithTools(llm.NewToolBox(tools.WebSearch(apiKey))),
// )
// coder := agent.New(model, "You write and run code.",
// agent.WithTools(llm.NewToolBox(tools.Exec())),
// )
// orchestrator := agent.New(model, "You coordinate research and coding tasks.",
// agent.WithTools(llm.NewToolBox(
// researcher.AsTool("research", "Research a topic"),
// coder.AsTool("code", "Write and run code"),
// )),
// )
// result, err := orchestrator.Run(ctx, "Build a fibonacci function in Go")
package agent
import (
"context"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// Agent is a configured LLM agent with a system prompt and tools.
// Each call to Run creates a fresh conversation (no state is carried between runs).
type Agent struct {
model *llm.Model
system string
tools *llm.ToolBox
reqOpts []llm.RequestOption
}
// Option configures an Agent.
type Option func(*Agent)
// WithTools sets the tools available to the agent.
func WithTools(tb *llm.ToolBox) Option {
return func(a *Agent) { a.tools = tb }
}
// WithRequestOptions sets default request options (temperature, max tokens, etc.)
// applied to every completion call the agent makes.
func WithRequestOptions(opts ...llm.RequestOption) Option {
return func(a *Agent) { a.reqOpts = opts }
}
// New creates an agent with the given model and system prompt.
func New(model *llm.Model, system string, opts ...Option) *Agent {
a := &Agent{
model: model,
system: system,
}
for _, opt := range opts {
opt(a)
}
return a
}
// Run executes the agent with a user prompt. Each call is a fresh conversation.
// The agent loops tool calls automatically until it produces a text response.
func (a *Agent) Run(ctx context.Context, prompt string) (string, error) {
return a.RunMessages(ctx, []llm.Message{llm.UserMessage(prompt)})
}
// RunMessages executes the agent with full message control.
// Each call is a fresh conversation. The agent loops tool calls automatically.
func (a *Agent) RunMessages(ctx context.Context, messages []llm.Message) (string, error) {
chat := llm.NewChat(a.model, a.reqOpts...)
if a.system != "" {
chat.SetSystem(a.system)
}
if a.tools != nil {
chat.SetTools(a.tools)
}
// Send each message; the last one triggers the completion loop.
// All but the last are added as context.
for i, msg := range messages {
if i < len(messages)-1 {
chat.AddToolResults(msg) // AddToolResults just appends to history
continue
}
return chat.SendMessage(ctx, msg)
}
// Empty messages — send an empty user message
return chat.Send(ctx, "")
}
// delegateParams is the parameter struct for the tool created by AsTool.
type delegateParams struct {
Input string `json:"input" description:"The task or question to delegate to this agent"`
}
// AsTool creates a llm.Tool that delegates to this agent.
// When a parent agent calls this tool, it runs the agent with the provided input
// as the prompt and returns the agent's text response as the tool result.
//
// This enables sub-agent patterns where a parent agent can spawn specialized
// child agents through the normal tool-call mechanism.
func (a *Agent) AsTool(name, description string) llm.Tool {
return llm.Define[delegateParams](name, description,
func(ctx context.Context, p delegateParams) (string, error) {
return a.Run(ctx, p.Input)
},
)
}

244
v2/agent/agent_test.go Normal file
View File

@@ -0,0 +1,244 @@
package agent
import (
"context"
"errors"
"sync"
"testing"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
"gitea.stevedudenhoeffer.com/steve/go-llm/v2/provider"
)
// mockProvider is a test helper that implements provider.Provider.
type mockProvider struct {
mu sync.Mutex
completeFunc func(ctx context.Context, req provider.Request) (provider.Response, error)
requests []provider.Request
}
func (m *mockProvider) Complete(ctx context.Context, req provider.Request) (provider.Response, error) {
m.mu.Lock()
m.requests = append(m.requests, req)
m.mu.Unlock()
return m.completeFunc(ctx, req)
}
func (m *mockProvider) Stream(ctx context.Context, req provider.Request, events chan<- provider.StreamEvent) error {
close(events)
return nil
}
func (m *mockProvider) lastRequest() provider.Request {
m.mu.Lock()
defer m.mu.Unlock()
if len(m.requests) == 0 {
return provider.Request{}
}
return m.requests[len(m.requests)-1]
}
func newMockModel(fn func(ctx context.Context, req provider.Request) (provider.Response, error)) *llm.Model {
mp := &mockProvider{completeFunc: fn}
return llm.NewClient(mp).Model("mock-model")
}
func newSimpleMockModel(text string) *llm.Model {
return newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
return provider.Response{Text: text}, nil
})
}
func TestAgent_Run(t *testing.T) {
model := newSimpleMockModel("Hello from agent!")
a := New(model, "You are a helpful assistant.")
result, err := a.Run(context.Background(), "Say hello")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "Hello from agent!" {
t.Errorf("expected 'Hello from agent!', got %q", result)
}
}
func TestAgent_Run_WithTools(t *testing.T) {
callCount := 0
model := newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
callCount++
if callCount == 1 {
// First call: model requests a tool call
return provider.Response{
ToolCalls: []provider.ToolCall{
{ID: "tc1", Name: "greet", Arguments: `{}`},
},
}, nil
}
// Second call: model returns text after seeing tool result
return provider.Response{Text: "Tool said: hello!"}, nil
})
tool := llm.DefineSimple("greet", "Says hello", func(ctx context.Context) (string, error) {
return "hello!", nil
})
a := New(model, "You are helpful.", WithTools(llm.NewToolBox(tool)))
result, err := a.Run(context.Background(), "Use the greet tool")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "Tool said: hello!" {
t.Errorf("expected 'Tool said: hello!', got %q", result)
}
if callCount != 2 {
t.Errorf("expected 2 calls (tool loop), got %d", callCount)
}
}
func TestAgent_AsTool(t *testing.T) {
// Create a child agent
childModel := newSimpleMockModel("child result: 42")
child := New(childModel, "You compute things.")
// Create the tool from the child agent
childTool := child.AsTool("compute", "Delegate computation to child agent")
// Verify tool metadata
if childTool.Name != "compute" {
t.Errorf("expected tool name 'compute', got %q", childTool.Name)
}
if childTool.Description != "Delegate computation to child agent" {
t.Errorf("expected correct description, got %q", childTool.Description)
}
// Execute the tool directly (simulating what the parent's Chat.Send loop does)
result, err := childTool.Execute(context.Background(), `{"input":"what is 6*7?"}`)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "child result: 42" {
t.Errorf("expected 'child result: 42', got %q", result)
}
}
func TestAgent_AsTool_ParentChild(t *testing.T) {
// Child agent that always returns a fixed result
childModel := newSimpleMockModel("researched: Go generics are great")
child := New(childModel, "You are a researcher.")
// Parent agent: first call returns tool call, second returns text
parentCallCount := 0
parentModel := newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
parentCallCount++
if parentCallCount == 1 {
return provider.Response{
ToolCalls: []provider.ToolCall{
{ID: "tc1", Name: "research", Arguments: `{"input":"Tell me about Go generics"}`},
},
}, nil
}
// After getting tool result, parent synthesizes final answer
return provider.Response{Text: "Based on research: Go generics are great"}, nil
})
parent := New(parentModel, "You coordinate tasks.",
WithTools(llm.NewToolBox(
child.AsTool("research", "Research a topic"),
)),
)
result, err := parent.Run(context.Background(), "Tell me about Go generics")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "Based on research: Go generics are great" {
t.Errorf("expected synthesized result, got %q", result)
}
if parentCallCount != 2 {
t.Errorf("expected 2 parent calls (tool loop), got %d", parentCallCount)
}
}
func TestAgent_RunMessages(t *testing.T) {
model := newSimpleMockModel("I see the system and user messages")
a := New(model, "You are helpful.")
messages := []llm.Message{
llm.UserMessage("First question"),
llm.AssistantMessage("First answer"),
llm.UserMessage("Follow up"),
}
result, err := a.RunMessages(context.Background(), messages)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "I see the system and user messages" {
t.Errorf("unexpected result: %q", result)
}
}
func TestAgent_ContextCancellation(t *testing.T) {
model := newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
return provider.Response{}, ctx.Err()
})
a := New(model, "You are helpful.")
ctx, cancel := context.WithCancel(context.Background())
cancel() // Cancel immediately
_, err := a.Run(ctx, "This should fail")
if err == nil {
t.Fatal("expected error from cancelled context")
}
}
func TestAgent_WithRequestOptions(t *testing.T) {
var capturedReq provider.Request
model := newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
capturedReq = req
return provider.Response{Text: "ok"}, nil
})
a := New(model, "You are helpful.",
WithRequestOptions(llm.WithTemperature(0.3), llm.WithMaxTokens(100)),
)
_, err := a.Run(context.Background(), "test")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if capturedReq.Temperature == nil || *capturedReq.Temperature != 0.3 {
t.Errorf("expected temperature 0.3, got %v", capturedReq.Temperature)
}
if capturedReq.MaxTokens == nil || *capturedReq.MaxTokens != 100 {
t.Errorf("expected maxTokens 100, got %v", capturedReq.MaxTokens)
}
}
func TestAgent_Run_Error(t *testing.T) {
wantErr := errors.New("model failed")
model := newMockModel(func(ctx context.Context, req provider.Request) (provider.Response, error) {
return provider.Response{}, wantErr
})
a := New(model, "You are helpful.")
_, err := a.Run(context.Background(), "test")
if err == nil {
t.Fatal("expected error, got nil")
}
}
func TestAgent_EmptySystem(t *testing.T) {
model := newSimpleMockModel("no system prompt")
a := New(model, "") // Empty system prompt
result, err := a.Run(context.Background(), "test")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if result != "no system prompt" {
t.Errorf("unexpected result: %q", result)
}
}

107
v2/agent/example_test.go Normal file
View File

@@ -0,0 +1,107 @@
package agent_test
import (
"context"
"fmt"
"os"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
"gitea.stevedudenhoeffer.com/steve/go-llm/v2/agent"
"gitea.stevedudenhoeffer.com/steve/go-llm/v2/tools"
)
// A researcher agent that can search the web and browse pages.
func Example_researcher() {
model := llm.OpenAI(os.Getenv("OPENAI_API_KEY")).Model("gpt-4o")
researcher := agent.New(model,
"You are a research assistant. Use web search to find information, "+
"then use the browser to read full articles when needed. "+
"Provide a concise summary of your findings.",
agent.WithTools(llm.NewToolBox(
tools.WebSearch(os.Getenv("BRAVE_API_KEY")),
tools.Browser(),
)),
agent.WithRequestOptions(llm.WithTemperature(0.3)),
)
result, err := researcher.Run(context.Background(), "What are the latest developments in Go generics?")
if err != nil {
fmt.Println("Error:", err)
return
}
fmt.Println(result)
}
// A coder agent that can read, write, and execute code.
func Example_coder() {
model := llm.OpenAI(os.Getenv("OPENAI_API_KEY")).Model("gpt-4o")
coder := agent.New(model,
"You are a coding assistant. You can read files, write files, and execute commands. "+
"When asked to create a program, write the code to a file and then run it to verify it works.",
agent.WithTools(llm.NewToolBox(
tools.ReadFile(),
tools.WriteFile(),
tools.Exec(
tools.WithAllowedCommands([]string{"go", "python", "node", "cat", "ls"}),
tools.WithWorkDir(os.TempDir()),
),
)),
)
result, err := coder.Run(context.Background(),
"Create a Go program that prints the first 10 Fibonacci numbers. Save it and run it.")
if err != nil {
fmt.Println("Error:", err)
return
}
fmt.Println(result)
}
// An orchestrator agent that delegates to specialized sub-agents.
// The orchestrator breaks a complex task into subtasks and dispatches them
// to the appropriate sub-agent via tool calls.
func Example_orchestrator() {
model := llm.OpenAI(os.Getenv("OPENAI_API_KEY")).Model("gpt-4o")
// Specialized sub-agents
researcher := agent.New(model,
"You are a research assistant. Search the web for information on the given topic "+
"and return a concise summary.",
agent.WithTools(llm.NewToolBox(
tools.WebSearch(os.Getenv("BRAVE_API_KEY")),
)),
)
coder := agent.New(model,
"You are a coding assistant. Write and test code as requested. "+
"Save files and run them to verify they work.",
agent.WithTools(llm.NewToolBox(
tools.ReadFile(),
tools.WriteFile(),
tools.Exec(tools.WithAllowedCommands([]string{"go", "python"})),
)),
)
// Orchestrator can delegate to both sub-agents
orchestrator := agent.New(model,
"You are a project manager. Break complex tasks into research and coding subtasks. "+
"Use delegate_research for information gathering and delegate_coding for implementation. "+
"Synthesize the results into a final answer.",
agent.WithTools(llm.NewToolBox(
researcher.AsTool("delegate_research",
"Delegate a research task. Provide a clear question or topic to research."),
coder.AsTool("delegate_coding",
"Delegate a coding task. Provide clear requirements for what to implement."),
)),
)
result, err := orchestrator.Run(context.Background(),
"Research how to implement a binary search tree in Go, then create one with insert and search operations.")
if err != nil {
fmt.Println("Error:", err)
return
}
fmt.Println(result)
}

View File

@@ -16,7 +16,7 @@ func OpenAI(apiKey string, opts ...ClientOption) *Client {
for _, opt := range opts {
opt(cfg)
}
return newClient(openaiProvider.New(apiKey, cfg.baseURL))
return NewClient(openaiProvider.New(apiKey, cfg.baseURL))
}
// Anthropic creates an Anthropic client.
@@ -30,7 +30,7 @@ func Anthropic(apiKey string, opts ...ClientOption) *Client {
opt(cfg)
}
_ = cfg // Anthropic doesn't support custom base URL in the SDK
return newClient(anthProvider.New(apiKey))
return NewClient(anthProvider.New(apiKey))
}
// Google creates a Google (Gemini) client.
@@ -44,5 +44,5 @@ func Google(apiKey string, opts ...ClientOption) *Client {
opt(cfg)
}
_ = cfg // Google doesn't support custom base URL in the SDK
return newClient(googleProvider.New(apiKey))
return NewClient(googleProvider.New(apiKey))
}

View File

@@ -12,8 +12,9 @@ type Client struct {
middleware []Middleware
}
// newClient creates a Client backed by the given provider.
func newClient(p provider.Provider) *Client {
// NewClient creates a Client backed by the given provider.
// Use this to integrate custom provider implementations or for testing.
func NewClient(p provider.Provider) *Client {
return &Client{p: p}
}

View File

@@ -117,7 +117,7 @@ func TestModel_Complete_WithTools(t *testing.T) {
func TestClient_Model(t *testing.T) {
mp := newMockProvider(provider.Response{Text: "hi"})
client := newClient(mp)
client := NewClient(mp)
model := client.Model("test-model")
resp, err := model.Complete(context.Background(), []Message{UserMessage("test")})
@@ -144,7 +144,7 @@ func TestClient_WithMiddleware(t *testing.T) {
}
mp := newMockProvider(provider.Response{Text: "ok"})
client := newClient(mp).WithMiddleware(mw)
client := NewClient(mp).WithMiddleware(mw)
model := client.Model("test-model")
_, err := model.Complete(context.Background(), []Message{UserMessage("test")})