v2 is a new Go module (v2/) with a dramatically simpler API: - Unified Message type (no more Input marker interface) - Define[T] for ergonomic tool creation with standard context.Context - Chat session with automatic tool-call loop (agent loop) - Streaming via pull-based StreamReader - MCP one-call connect (MCPStdioServer, MCPHTTPServer, MCPSSEServer) - Middleware support (logging, retry, timeout, usage tracking) - Decoupled JSON Schema (map[string]any, no provider coupling) - Sample tools: WebSearch, Browser, Exec, ReadFile, WriteFile, HTTP - Providers: OpenAI, Anthropic, Google (all with streaming) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
154 lines
4.0 KiB
Go
154 lines
4.0 KiB
Go
package llm
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
)
|
|
|
|
// Chat manages a multi-turn conversation with automatic history tracking
|
|
// and optional automatic tool-call execution.
|
|
type Chat struct {
|
|
model *Model
|
|
messages []Message
|
|
tools *ToolBox
|
|
opts []RequestOption
|
|
}
|
|
|
|
// NewChat creates a new conversation with the given model.
|
|
func NewChat(model *Model, opts ...RequestOption) *Chat {
|
|
return &Chat{
|
|
model: model,
|
|
opts: opts,
|
|
}
|
|
}
|
|
|
|
// SetSystem sets or replaces the system message.
|
|
func (c *Chat) SetSystem(text string) {
|
|
filtered := make([]Message, 0, len(c.messages)+1)
|
|
for _, m := range c.messages {
|
|
if m.Role != RoleSystem {
|
|
filtered = append(filtered, m)
|
|
}
|
|
}
|
|
c.messages = append([]Message{SystemMessage(text)}, filtered...)
|
|
}
|
|
|
|
// SetTools configures the tools available for this chat.
|
|
func (c *Chat) SetTools(tb *ToolBox) {
|
|
c.tools = tb
|
|
}
|
|
|
|
// Send sends a user message and returns the assistant's text response.
|
|
// If the model calls tools, they are executed automatically and the loop
|
|
// continues until the model produces a text response (the "agent loop").
|
|
func (c *Chat) Send(ctx context.Context, text string) (string, error) {
|
|
return c.SendMessage(ctx, UserMessage(text))
|
|
}
|
|
|
|
// SendWithImages sends a user message with images attached.
|
|
func (c *Chat) SendWithImages(ctx context.Context, text string, images ...Image) (string, error) {
|
|
return c.SendMessage(ctx, UserMessageWithImages(text, images...))
|
|
}
|
|
|
|
// SendMessage sends an arbitrary message and returns the final text response.
|
|
// Handles the full tool-call loop automatically.
|
|
func (c *Chat) SendMessage(ctx context.Context, msg Message) (string, error) {
|
|
c.messages = append(c.messages, msg)
|
|
|
|
opts := c.buildOpts()
|
|
|
|
for {
|
|
resp, err := c.model.Complete(ctx, c.messages, opts...)
|
|
if err != nil {
|
|
return "", fmt.Errorf("completion failed: %w", err)
|
|
}
|
|
|
|
c.messages = append(c.messages, resp.Message())
|
|
|
|
if !resp.HasToolCalls() {
|
|
return resp.Text, nil
|
|
}
|
|
|
|
if c.tools == nil {
|
|
return "", ErrNoToolsConfigured
|
|
}
|
|
|
|
toolResults, err := c.tools.ExecuteAll(ctx, resp.ToolCalls)
|
|
if err != nil {
|
|
return "", fmt.Errorf("tool execution failed: %w", err)
|
|
}
|
|
|
|
c.messages = append(c.messages, toolResults...)
|
|
}
|
|
}
|
|
|
|
// SendRaw sends a message and returns the raw Response without automatic tool execution.
|
|
// Useful when you want to handle tool calls manually.
|
|
func (c *Chat) SendRaw(ctx context.Context, msg Message) (Response, error) {
|
|
c.messages = append(c.messages, msg)
|
|
|
|
opts := c.buildOpts()
|
|
|
|
resp, err := c.model.Complete(ctx, c.messages, opts...)
|
|
if err != nil {
|
|
return Response{}, err
|
|
}
|
|
|
|
c.messages = append(c.messages, resp.Message())
|
|
return resp, nil
|
|
}
|
|
|
|
// SendStream sends a user message and returns a StreamReader for streaming responses.
|
|
func (c *Chat) SendStream(ctx context.Context, text string) (*StreamReader, error) {
|
|
c.messages = append(c.messages, UserMessage(text))
|
|
|
|
opts := c.buildOpts()
|
|
|
|
cfg := &requestConfig{}
|
|
for _, opt := range opts {
|
|
opt(cfg)
|
|
}
|
|
|
|
req := buildProviderRequest(c.model.model, c.messages, cfg)
|
|
return newStreamReader(ctx, c.model.provider, req)
|
|
}
|
|
|
|
// AddToolResults manually adds tool results to the conversation.
|
|
// Use with SendRaw when handling tool calls manually.
|
|
func (c *Chat) AddToolResults(results ...Message) {
|
|
c.messages = append(c.messages, results...)
|
|
}
|
|
|
|
// Messages returns the current conversation history (read-only copy).
|
|
func (c *Chat) Messages() []Message {
|
|
cp := make([]Message, len(c.messages))
|
|
copy(cp, c.messages)
|
|
return cp
|
|
}
|
|
|
|
// Reset clears the conversation history.
|
|
func (c *Chat) Reset() {
|
|
c.messages = nil
|
|
}
|
|
|
|
// Fork creates a copy of this chat with identical history, for branching conversations.
|
|
func (c *Chat) Fork() *Chat {
|
|
c2 := &Chat{
|
|
model: c.model,
|
|
messages: make([]Message, len(c.messages)),
|
|
tools: c.tools,
|
|
opts: c.opts,
|
|
}
|
|
copy(c2.messages, c.messages)
|
|
return c2
|
|
}
|
|
|
|
func (c *Chat) buildOpts() []RequestOption {
|
|
opts := make([]RequestOption, len(c.opts))
|
|
copy(opts, c.opts)
|
|
if c.tools != nil {
|
|
opts = append(opts, WithTools(c.tools))
|
|
}
|
|
return opts
|
|
}
|