Compare commits
5 Commits
1927f4d187
...
9c1b4f7e9f
| Author | SHA1 | Date | |
|---|---|---|---|
| 9c1b4f7e9f | |||
| 2cf75ae07d | |||
| 97d54c10ae | |||
| bf7c86ab2a | |||
| be99af3597 |
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
.claude
|
||||||
|
.idea
|
||||||
|
*.exe
|
||||||
|
.env
|
||||||
88
CLAUDE.md
Normal file
88
CLAUDE.md
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# CLAUDE.md for go-llm
|
||||||
|
|
||||||
|
## Build and Test Commands
|
||||||
|
- Build project: `go build ./...`
|
||||||
|
- Run all tests: `go test ./...`
|
||||||
|
- Run specific test: `go test -v -run <TestName> ./...`
|
||||||
|
- Tidy dependencies: `go mod tidy`
|
||||||
|
|
||||||
|
## Code Style Guidelines
|
||||||
|
- **Indentation**: Use standard Go tabs for indentation.
|
||||||
|
- **Naming**:
|
||||||
|
- Use `camelCase` for internal/private variables and functions.
|
||||||
|
- Use `PascalCase` for exported types, functions, and struct fields.
|
||||||
|
- Interface names should be concise (e.g., `LLM`, `ChatCompletion`).
|
||||||
|
- **Error Handling**:
|
||||||
|
- Always check and handle errors immediately.
|
||||||
|
- Wrap errors with context using `fmt.Errorf("%w: ...", err)`.
|
||||||
|
- Use the project's internal `Error` struct in `error.go` when differentiating between error types is needed.
|
||||||
|
- **Project Structure**:
|
||||||
|
- `llm.go`: Contains core interfaces (`LLM`, `ChatCompletion`) and shared types (`Message`, `Role`, `Image`).
|
||||||
|
- Provider implementations are in `openai.go`, `anthropic.go`, and `google.go`.
|
||||||
|
- Schema definitions for tool calling are in the `schema/` directory.
|
||||||
|
- `mcp.go`: MCP (Model Context Protocol) client integration for connecting to MCP servers.
|
||||||
|
- **Imports**: Organize imports into groups: standard library, then third-party libraries.
|
||||||
|
- **Documentation**: Use standard Go doc comments for exported symbols.
|
||||||
|
- **README.md**: The README.md file should always be kept up to date with any significant changes to the project.
|
||||||
|
|
||||||
|
## CLI Tool
|
||||||
|
- Build CLI: `go build ./cmd/llm`
|
||||||
|
- Run CLI: `./llm` (or `llm.exe` on Windows)
|
||||||
|
- Run without building: `go run ./cmd/llm`
|
||||||
|
|
||||||
|
### CLI Features
|
||||||
|
- Interactive TUI for testing all go-llm features
|
||||||
|
- Support for OpenAI, Anthropic, and Google providers
|
||||||
|
- Image input (file path, URL, or base64)
|
||||||
|
- Tool/function calling with demo tools
|
||||||
|
- Temperature control and settings
|
||||||
|
|
||||||
|
### Key Bindings
|
||||||
|
- `Enter` - Send message
|
||||||
|
- `Ctrl+I` - Add image
|
||||||
|
- `Ctrl+T` - Toggle tools panel
|
||||||
|
- `Ctrl+P` - Change provider
|
||||||
|
- `Ctrl+M` - Change model
|
||||||
|
- `Ctrl+S` - Settings
|
||||||
|
- `Ctrl+N` - New conversation
|
||||||
|
- `Esc` - Exit/Cancel
|
||||||
|
|
||||||
|
## MCP (Model Context Protocol) Support
|
||||||
|
|
||||||
|
The library supports connecting to MCP servers to use their tools. MCP servers can be connected via:
|
||||||
|
- **stdio**: Run a command as a subprocess
|
||||||
|
- **sse**: Connect to an SSE endpoint
|
||||||
|
- **http**: Connect to a streamable HTTP endpoint
|
||||||
|
|
||||||
|
### Usage Example
|
||||||
|
```go
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Create and connect to an MCP server
|
||||||
|
server := &llm.MCPServer{
|
||||||
|
Name: "my-server",
|
||||||
|
Command: "my-mcp-server",
|
||||||
|
Args: []string{"--some-flag"},
|
||||||
|
}
|
||||||
|
if err := server.Connect(ctx); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
// Add the server to a toolbox
|
||||||
|
toolbox := llm.NewToolBox().WithMCPServer(server)
|
||||||
|
|
||||||
|
// Use the toolbox in requests - MCP tools are automatically available
|
||||||
|
req := llm.Request{
|
||||||
|
Messages: []llm.Message{{Role: llm.RoleUser, Text: "Use the MCP tool"}},
|
||||||
|
Toolbox: toolbox,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### MCPServer Options
|
||||||
|
- `Name`: Friendly name for logging
|
||||||
|
- `Command`: Command to run (for stdio transport)
|
||||||
|
- `Args`: Command arguments
|
||||||
|
- `Env`: Additional environment variables
|
||||||
|
- `URL`: Endpoint URL (for sse/http transport)
|
||||||
|
- `Transport`: "stdio" (default), "sse", or "http"
|
||||||
20
anthropic.go
20
anthropic.go
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@@ -10,19 +10,19 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"gitea.stevedudenhoeffer.com/steve/go-llm/utils"
|
"gitea.stevedudenhoeffer.com/steve/go-llm/internal/imageutil"
|
||||||
|
|
||||||
anth "github.com/liushuangls/go-anthropic/v2"
|
anth "github.com/liushuangls/go-anthropic/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type anthropic struct {
|
type anthropicImpl struct {
|
||||||
key string
|
key string
|
||||||
model string
|
model string
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ LLM = anthropic{}
|
var _ LLM = anthropicImpl{}
|
||||||
|
|
||||||
func (a anthropic) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
func (a anthropicImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
||||||
a.model = modelVersion
|
a.model = modelVersion
|
||||||
|
|
||||||
// TODO: model verification?
|
// TODO: model verification?
|
||||||
@@ -36,7 +36,7 @@ func deferClose(c io.Closer) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
func (a anthropicImpl) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||||
res := anth.MessagesRequest{
|
res := anth.MessagesRequest{
|
||||||
Model: anth.Model(a.model),
|
Model: anth.Model(a.model),
|
||||||
MaxTokens: 1000,
|
MaxTokens: 1000,
|
||||||
@@ -90,7 +90,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
|||||||
// Check if image size exceeds 5MiB (5242880 bytes)
|
// Check if image size exceeds 5MiB (5242880 bytes)
|
||||||
if len(raw) >= 5242880 {
|
if len(raw) >= 5242880 {
|
||||||
|
|
||||||
compressed, mime, err := utils.CompressImage(img.Base64, 5*1024*1024)
|
compressed, mime, err := imageutil.CompressImage(img.Base64, 5*1024*1024)
|
||||||
|
|
||||||
// just replace the image with the compressed one
|
// just replace the image with the compressed one
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -157,7 +157,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tool := range req.Toolbox.functions {
|
for _, tool := range req.Toolbox.Functions() {
|
||||||
res.Tools = append(res.Tools, anth.ToolDefinition{
|
res.Tools = append(res.Tools, anth.ToolDefinition{
|
||||||
Name: tool.Name,
|
Name: tool.Name,
|
||||||
Description: tool.Description,
|
Description: tool.Description,
|
||||||
@@ -177,7 +177,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
|
func (a anthropicImpl) responseToLLMResponse(in anth.MessagesResponse) Response {
|
||||||
choice := ResponseChoice{}
|
choice := ResponseChoice{}
|
||||||
for _, msg := range in.Content {
|
for _, msg := range in.Content {
|
||||||
|
|
||||||
@@ -212,7 +212,7 @@ func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a anthropic) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
func (a anthropicImpl) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
||||||
cl := anth.NewClient(a.key)
|
cl := anth.NewClient(a.key)
|
||||||
|
|
||||||
res, err := cl.CreateMessages(ctx, a.requestToAnthropicRequest(req))
|
res, err := cl.CreateMessages(ctx, a.requestToAnthropicRequest(req))
|
||||||
|
|||||||
11
cmd/llm/.env.example
Normal file
11
cmd/llm/.env.example
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# go-llm CLI Environment Variables
|
||||||
|
# Copy this file to .env and fill in your API keys
|
||||||
|
|
||||||
|
# OpenAI API Key (https://platform.openai.com/api-keys)
|
||||||
|
OPENAI_API_KEY=
|
||||||
|
|
||||||
|
# Anthropic API Key (https://console.anthropic.com/settings/keys)
|
||||||
|
ANTHROPIC_API_KEY=
|
||||||
|
|
||||||
|
# Google AI API Key (https://aistudio.google.com/apikey)
|
||||||
|
GOOGLE_API_KEY=
|
||||||
182
cmd/llm/commands.go
Normal file
182
cmd/llm/commands.go
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Message types for async operations
|
||||||
|
|
||||||
|
// ChatResponseMsg contains the response from a chat completion
|
||||||
|
type ChatResponseMsg struct {
|
||||||
|
Response llm.Response
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolExecutionMsg contains results from tool execution
|
||||||
|
type ToolExecutionMsg struct {
|
||||||
|
Results []llm.ToolCallResponse
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageLoadedMsg contains a loaded image
|
||||||
|
type ImageLoadedMsg struct {
|
||||||
|
Image llm.Image
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// sendChatRequest sends a chat completion request
|
||||||
|
func sendChatRequest(chat llm.ChatCompletion, req llm.Request) tea.Cmd {
|
||||||
|
return func() tea.Msg {
|
||||||
|
resp, err := chat.ChatComplete(context.Background(), req)
|
||||||
|
return ChatResponseMsg{Response: resp, Err: err}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeTools executes tool calls and returns results
|
||||||
|
func executeTools(toolbox llm.ToolBox, req llm.Request, resp llm.ResponseChoice) tea.Cmd {
|
||||||
|
return func() tea.Msg {
|
||||||
|
ctx := llm.NewContext(context.Background(), req, &resp, nil)
|
||||||
|
var results []llm.ToolCallResponse
|
||||||
|
|
||||||
|
for _, call := range resp.Calls {
|
||||||
|
result, err := toolbox.Execute(ctx, call)
|
||||||
|
results = append(results, llm.ToolCallResponse{
|
||||||
|
ID: call.ID,
|
||||||
|
Result: result,
|
||||||
|
Error: err,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return ToolExecutionMsg{Results: results, Err: nil}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadImageFromPath loads an image from a file path
|
||||||
|
func loadImageFromPath(path string) tea.Cmd {
|
||||||
|
return func() tea.Msg {
|
||||||
|
// Clean up the path
|
||||||
|
path = strings.TrimSpace(path)
|
||||||
|
path = strings.Trim(path, "\"'")
|
||||||
|
|
||||||
|
// Read the file
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return ImageLoadedMsg{Err: fmt.Errorf("failed to read image file: %w", err)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect content type
|
||||||
|
contentType := http.DetectContentType(data)
|
||||||
|
if !strings.HasPrefix(contentType, "image/") {
|
||||||
|
return ImageLoadedMsg{Err: fmt.Errorf("file is not an image: %s", contentType)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base64 encode
|
||||||
|
encoded := base64.StdEncoding.EncodeToString(data)
|
||||||
|
|
||||||
|
return ImageLoadedMsg{
|
||||||
|
Image: llm.Image{
|
||||||
|
Base64: encoded,
|
||||||
|
ContentType: contentType,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadImageFromURL loads an image from a URL
|
||||||
|
func loadImageFromURL(url string) tea.Cmd {
|
||||||
|
return func() tea.Msg {
|
||||||
|
url = strings.TrimSpace(url)
|
||||||
|
|
||||||
|
// For URL images, we can just use the URL directly
|
||||||
|
return ImageLoadedMsg{
|
||||||
|
Image: llm.Image{
|
||||||
|
Url: url,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadImageFromBase64 loads an image from base64 data
|
||||||
|
func loadImageFromBase64(data string) tea.Cmd {
|
||||||
|
return func() tea.Msg {
|
||||||
|
data = strings.TrimSpace(data)
|
||||||
|
|
||||||
|
// Check if it's a data URL
|
||||||
|
if strings.HasPrefix(data, "data:") {
|
||||||
|
// Parse data URL: data:image/png;base64,....
|
||||||
|
parts := strings.SplitN(data, ",", 2)
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return ImageLoadedMsg{Err: fmt.Errorf("invalid data URL format")}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract content type from first part
|
||||||
|
mediaType := strings.TrimPrefix(parts[0], "data:")
|
||||||
|
mediaType = strings.TrimSuffix(mediaType, ";base64")
|
||||||
|
|
||||||
|
return ImageLoadedMsg{
|
||||||
|
Image: llm.Image{
|
||||||
|
Base64: parts[1],
|
||||||
|
ContentType: mediaType,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assume it's raw base64, try to detect content type
|
||||||
|
decoded, err := base64.StdEncoding.DecodeString(data)
|
||||||
|
if err != nil {
|
||||||
|
return ImageLoadedMsg{Err: fmt.Errorf("invalid base64 data: %w", err)}
|
||||||
|
}
|
||||||
|
|
||||||
|
contentType := http.DetectContentType(decoded)
|
||||||
|
if !strings.HasPrefix(contentType, "image/") {
|
||||||
|
return ImageLoadedMsg{Err: fmt.Errorf("data is not an image: %s", contentType)}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ImageLoadedMsg{
|
||||||
|
Image: llm.Image{
|
||||||
|
Base64: data,
|
||||||
|
ContentType: contentType,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildRequest builds a chat request from the current state
|
||||||
|
func buildRequest(m *Model, userText string) llm.Request {
|
||||||
|
// Create the user message with any pending images
|
||||||
|
userMsg := llm.Message{
|
||||||
|
Role: llm.RoleUser,
|
||||||
|
Text: userText,
|
||||||
|
Images: m.pendingImages,
|
||||||
|
}
|
||||||
|
|
||||||
|
req := llm.Request{
|
||||||
|
Conversation: m.conversation,
|
||||||
|
Messages: []llm.Message{
|
||||||
|
{Role: llm.RoleSystem, Text: m.systemPrompt},
|
||||||
|
userMsg,
|
||||||
|
},
|
||||||
|
Temperature: m.temperature,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add toolbox if enabled
|
||||||
|
if m.toolsEnabled && len(m.toolbox.Functions()) > 0 {
|
||||||
|
req.Toolbox = m.toolbox.WithRequireTool(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
return req
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildFollowUpRequest builds a follow-up request after tool execution
|
||||||
|
func buildFollowUpRequest(m *Model, previousReq llm.Request, resp llm.ResponseChoice, toolResults []llm.ToolCallResponse) llm.Request {
|
||||||
|
return previousReq.NextRequest(resp, toolResults)
|
||||||
|
}
|
||||||
25
cmd/llm/main.go
Normal file
25
cmd/llm/main.go
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
"github.com/joho/godotenv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Load .env file if it exists (ignore error if not found)
|
||||||
|
_ = godotenv.Load()
|
||||||
|
|
||||||
|
p := tea.NewProgram(
|
||||||
|
InitialModel(),
|
||||||
|
tea.WithAltScreen(),
|
||||||
|
tea.WithMouseCellMotion(),
|
||||||
|
)
|
||||||
|
|
||||||
|
if _, err := p.Run(); err != nil {
|
||||||
|
fmt.Printf("Error running program: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
295
cmd/llm/model.go
Normal file
295
cmd/llm/model.go
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/bubbles/textinput"
|
||||||
|
"github.com/charmbracelet/bubbles/viewport"
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// State represents the current view/screen of the application
|
||||||
|
type State int
|
||||||
|
|
||||||
|
const (
|
||||||
|
StateChat State = iota
|
||||||
|
StateProviderSelect
|
||||||
|
StateModelSelect
|
||||||
|
StateImageInput
|
||||||
|
StateToolsPanel
|
||||||
|
StateSettings
|
||||||
|
StateAPIKeyInput
|
||||||
|
)
|
||||||
|
|
||||||
|
// DisplayMessage represents a message for display in the UI
|
||||||
|
type DisplayMessage struct {
|
||||||
|
Role llm.Role
|
||||||
|
Content string
|
||||||
|
Images int // number of images attached
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProviderInfo contains information about a provider
|
||||||
|
type ProviderInfo struct {
|
||||||
|
Name string
|
||||||
|
EnvVar string
|
||||||
|
Models []string
|
||||||
|
HasAPIKey bool
|
||||||
|
ModelIndex int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model is the main Bubble Tea model
|
||||||
|
type Model struct {
|
||||||
|
// State
|
||||||
|
state State
|
||||||
|
previousState State
|
||||||
|
|
||||||
|
// Provider
|
||||||
|
provider llm.LLM
|
||||||
|
providerName string
|
||||||
|
chat llm.ChatCompletion
|
||||||
|
modelName string
|
||||||
|
apiKeys map[string]string
|
||||||
|
providers []ProviderInfo
|
||||||
|
providerIndex int
|
||||||
|
|
||||||
|
// Conversation
|
||||||
|
conversation []llm.Input
|
||||||
|
messages []DisplayMessage
|
||||||
|
|
||||||
|
// Tools
|
||||||
|
toolbox llm.ToolBox
|
||||||
|
toolsEnabled bool
|
||||||
|
|
||||||
|
// Settings
|
||||||
|
systemPrompt string
|
||||||
|
temperature *float64
|
||||||
|
|
||||||
|
// Pending images
|
||||||
|
pendingImages []llm.Image
|
||||||
|
|
||||||
|
// UI Components
|
||||||
|
input textinput.Model
|
||||||
|
viewport viewport.Model
|
||||||
|
viewportReady bool
|
||||||
|
|
||||||
|
// Selection state (for lists)
|
||||||
|
listIndex int
|
||||||
|
listItems []string
|
||||||
|
|
||||||
|
// Dimensions
|
||||||
|
width int
|
||||||
|
height int
|
||||||
|
|
||||||
|
// Loading state
|
||||||
|
loading bool
|
||||||
|
err error
|
||||||
|
|
||||||
|
// For API key input
|
||||||
|
apiKeyInput textinput.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitialModel creates and returns the initial model
|
||||||
|
func InitialModel() Model {
|
||||||
|
ti := textinput.New()
|
||||||
|
ti.Placeholder = "Type your message..."
|
||||||
|
ti.Focus()
|
||||||
|
ti.CharLimit = 4096
|
||||||
|
ti.Width = 60
|
||||||
|
|
||||||
|
aki := textinput.New()
|
||||||
|
aki.Placeholder = "Enter API key..."
|
||||||
|
aki.CharLimit = 256
|
||||||
|
aki.Width = 60
|
||||||
|
aki.EchoMode = textinput.EchoPassword
|
||||||
|
|
||||||
|
// Initialize providers with environment variable checks
|
||||||
|
providers := []ProviderInfo{
|
||||||
|
{
|
||||||
|
Name: "OpenAI",
|
||||||
|
EnvVar: "OPENAI_API_KEY",
|
||||||
|
Models: []string{
|
||||||
|
"gpt-4.1",
|
||||||
|
"gpt-4.1-mini",
|
||||||
|
"gpt-4.1-nano",
|
||||||
|
"gpt-4o",
|
||||||
|
"gpt-4o-mini",
|
||||||
|
"gpt-4-turbo",
|
||||||
|
"gpt-3.5-turbo",
|
||||||
|
"o1",
|
||||||
|
"o1-mini",
|
||||||
|
"o1-preview",
|
||||||
|
"o3-mini",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Anthropic",
|
||||||
|
EnvVar: "ANTHROPIC_API_KEY",
|
||||||
|
Models: []string{
|
||||||
|
"claude-sonnet-4-20250514",
|
||||||
|
"claude-opus-4-20250514",
|
||||||
|
"claude-3-7-sonnet-20250219",
|
||||||
|
"claude-3-5-sonnet-20241022",
|
||||||
|
"claude-3-5-haiku-20241022",
|
||||||
|
"claude-3-opus-20240229",
|
||||||
|
"claude-3-sonnet-20240229",
|
||||||
|
"claude-3-haiku-20240307",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Google",
|
||||||
|
EnvVar: "GOOGLE_API_KEY",
|
||||||
|
Models: []string{
|
||||||
|
"gemini-2.0-flash",
|
||||||
|
"gemini-2.0-flash-lite",
|
||||||
|
"gemini-1.5-pro",
|
||||||
|
"gemini-1.5-flash",
|
||||||
|
"gemini-1.5-flash-8b",
|
||||||
|
"gemini-1.0-pro",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for API keys in environment
|
||||||
|
apiKeys := make(map[string]string)
|
||||||
|
for i := range providers {
|
||||||
|
if key := os.Getenv(providers[i].EnvVar); key != "" {
|
||||||
|
apiKeys[providers[i].Name] = key
|
||||||
|
providers[i].HasAPIKey = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m := Model{
|
||||||
|
state: StateProviderSelect,
|
||||||
|
input: ti,
|
||||||
|
apiKeyInput: aki,
|
||||||
|
apiKeys: apiKeys,
|
||||||
|
providers: providers,
|
||||||
|
systemPrompt: "You are a helpful assistant.",
|
||||||
|
toolbox: createDemoToolbox(),
|
||||||
|
toolsEnabled: false,
|
||||||
|
messages: []DisplayMessage{},
|
||||||
|
conversation: []llm.Input{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build list items for provider selection
|
||||||
|
m.listItems = make([]string, len(providers))
|
||||||
|
for i, p := range providers {
|
||||||
|
status := " (no key)"
|
||||||
|
if p.HasAPIKey {
|
||||||
|
status = " (ready)"
|
||||||
|
}
|
||||||
|
m.listItems[i] = p.Name + status
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init initializes the model
|
||||||
|
func (m Model) Init() tea.Cmd {
|
||||||
|
return textinput.Blink
|
||||||
|
}
|
||||||
|
|
||||||
|
// selectProvider sets up the selected provider
|
||||||
|
func (m *Model) selectProvider(index int) error {
|
||||||
|
if index < 0 || index >= len(m.providers) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
p := m.providers[index]
|
||||||
|
key, ok := m.apiKeys[p.Name]
|
||||||
|
if !ok || key == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m.providerName = p.Name
|
||||||
|
m.providerIndex = index
|
||||||
|
|
||||||
|
switch p.Name {
|
||||||
|
case "OpenAI":
|
||||||
|
m.provider = llm.OpenAI(key)
|
||||||
|
case "Anthropic":
|
||||||
|
m.provider = llm.Anthropic(key)
|
||||||
|
case "Google":
|
||||||
|
m.provider = llm.Google(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select default model
|
||||||
|
if len(p.Models) > 0 {
|
||||||
|
return m.selectModel(p.ModelIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// selectModel sets the current model
|
||||||
|
func (m *Model) selectModel(index int) error {
|
||||||
|
if m.provider == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
p := m.providers[m.providerIndex]
|
||||||
|
if index < 0 || index >= len(p.Models) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
modelName := p.Models[index]
|
||||||
|
chat, err := m.provider.ModelVersion(modelName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
m.chat = chat
|
||||||
|
m.modelName = modelName
|
||||||
|
m.providers[m.providerIndex].ModelIndex = index
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// newConversation resets the conversation
|
||||||
|
func (m *Model) newConversation() {
|
||||||
|
m.conversation = []llm.Input{}
|
||||||
|
m.messages = []DisplayMessage{}
|
||||||
|
m.pendingImages = []llm.Image{}
|
||||||
|
m.err = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// addUserMessage adds a user message to the conversation
|
||||||
|
func (m *Model) addUserMessage(text string, images []llm.Image) {
|
||||||
|
msg := llm.Message{
|
||||||
|
Role: llm.RoleUser,
|
||||||
|
Text: text,
|
||||||
|
Images: images,
|
||||||
|
}
|
||||||
|
m.conversation = append(m.conversation, msg)
|
||||||
|
m.messages = append(m.messages, DisplayMessage{
|
||||||
|
Role: llm.RoleUser,
|
||||||
|
Content: text,
|
||||||
|
Images: len(images),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// addAssistantMessage adds an assistant message to the conversation
|
||||||
|
func (m *Model) addAssistantMessage(content string) {
|
||||||
|
m.messages = append(m.messages, DisplayMessage{
|
||||||
|
Role: llm.RoleAssistant,
|
||||||
|
Content: content,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// addToolCallMessage adds a tool call message to display
|
||||||
|
func (m *Model) addToolCallMessage(name string, args string) {
|
||||||
|
m.messages = append(m.messages, DisplayMessage{
|
||||||
|
Role: llm.Role("tool_call"),
|
||||||
|
Content: name + ": " + args,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// addToolResultMessage adds a tool result message to display
|
||||||
|
func (m *Model) addToolResultMessage(name string, result string) {
|
||||||
|
m.messages = append(m.messages, DisplayMessage{
|
||||||
|
Role: llm.Role("tool_result"),
|
||||||
|
Content: name + " -> " + result,
|
||||||
|
})
|
||||||
|
}
|
||||||
113
cmd/llm/styles.go
Normal file
113
cmd/llm/styles.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Colors
|
||||||
|
primaryColor = lipgloss.Color("205")
|
||||||
|
secondaryColor = lipgloss.Color("39")
|
||||||
|
accentColor = lipgloss.Color("212")
|
||||||
|
mutedColor = lipgloss.Color("241")
|
||||||
|
errorColor = lipgloss.Color("196")
|
||||||
|
successColor = lipgloss.Color("82")
|
||||||
|
|
||||||
|
// App styles
|
||||||
|
appStyle = lipgloss.NewStyle().Padding(1, 2)
|
||||||
|
|
||||||
|
// Header
|
||||||
|
headerStyle = lipgloss.NewStyle().
|
||||||
|
Bold(true).
|
||||||
|
Foreground(primaryColor).
|
||||||
|
BorderStyle(lipgloss.NormalBorder()).
|
||||||
|
BorderBottom(true).
|
||||||
|
BorderForeground(mutedColor).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
// Provider badge
|
||||||
|
providerBadgeStyle = lipgloss.NewStyle().
|
||||||
|
Background(secondaryColor).
|
||||||
|
Foreground(lipgloss.Color("0")).
|
||||||
|
Padding(0, 1).
|
||||||
|
Bold(true)
|
||||||
|
|
||||||
|
// Messages
|
||||||
|
systemMsgStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(mutedColor).
|
||||||
|
Italic(true).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
userMsgStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(secondaryColor).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
assistantMsgStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(lipgloss.Color("255")).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
roleLabelStyle = lipgloss.NewStyle().
|
||||||
|
Bold(true).
|
||||||
|
Width(12)
|
||||||
|
|
||||||
|
// Tool calls
|
||||||
|
toolCallStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(accentColor).
|
||||||
|
Italic(true).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
toolResultStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(successColor).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
// Input area
|
||||||
|
inputStyle = lipgloss.NewStyle().
|
||||||
|
BorderStyle(lipgloss.RoundedBorder()).
|
||||||
|
BorderForeground(primaryColor).
|
||||||
|
Padding(0, 1)
|
||||||
|
|
||||||
|
inputHelpStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(mutedColor).
|
||||||
|
Italic(true)
|
||||||
|
|
||||||
|
// Error
|
||||||
|
errorStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(errorColor).
|
||||||
|
Bold(true)
|
||||||
|
|
||||||
|
// Loading
|
||||||
|
loadingStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(accentColor).
|
||||||
|
Italic(true)
|
||||||
|
|
||||||
|
// List selection
|
||||||
|
selectedItemStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(primaryColor).
|
||||||
|
Bold(true)
|
||||||
|
|
||||||
|
normalItemStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(lipgloss.Color("255"))
|
||||||
|
|
||||||
|
// Settings panel
|
||||||
|
settingLabelStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(secondaryColor).
|
||||||
|
Width(15)
|
||||||
|
|
||||||
|
settingValueStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(lipgloss.Color("255"))
|
||||||
|
|
||||||
|
// Help text
|
||||||
|
helpStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(mutedColor).
|
||||||
|
Padding(1, 0)
|
||||||
|
|
||||||
|
// Image indicator
|
||||||
|
imageIndicatorStyle = lipgloss.NewStyle().
|
||||||
|
Foreground(accentColor).
|
||||||
|
Bold(true)
|
||||||
|
|
||||||
|
// Viewport
|
||||||
|
viewportStyle = lipgloss.NewStyle().
|
||||||
|
BorderStyle(lipgloss.NormalBorder()).
|
||||||
|
BorderForeground(mutedColor)
|
||||||
|
)
|
||||||
105
cmd/llm/tools.go
Normal file
105
cmd/llm/tools.go
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TimeParams is the parameter struct for the GetTime function
|
||||||
|
type TimeParams struct{}
|
||||||
|
|
||||||
|
// GetTime returns the current time
|
||||||
|
func GetTime(_ *llm.Context, _ TimeParams) (any, error) {
|
||||||
|
return time.Now().Format("Monday, January 2, 2006 3:04:05 PM MST"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CalcParams is the parameter struct for the Calculate function
|
||||||
|
type CalcParams struct {
|
||||||
|
A float64 `json:"a" description:"First number"`
|
||||||
|
B float64 `json:"b" description:"Second number"`
|
||||||
|
Op string `json:"op" description:"Operation: add, subtract, multiply, divide, power, sqrt, mod"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate performs basic math operations
|
||||||
|
func Calculate(_ *llm.Context, params CalcParams) (any, error) {
|
||||||
|
switch strings.ToLower(params.Op) {
|
||||||
|
case "add", "+":
|
||||||
|
return params.A + params.B, nil
|
||||||
|
case "subtract", "sub", "-":
|
||||||
|
return params.A - params.B, nil
|
||||||
|
case "multiply", "mul", "*":
|
||||||
|
return params.A * params.B, nil
|
||||||
|
case "divide", "div", "/":
|
||||||
|
if params.B == 0 {
|
||||||
|
return nil, fmt.Errorf("division by zero")
|
||||||
|
}
|
||||||
|
return params.A / params.B, nil
|
||||||
|
case "power", "pow", "^":
|
||||||
|
return math.Pow(params.A, params.B), nil
|
||||||
|
case "sqrt":
|
||||||
|
if params.A < 0 {
|
||||||
|
return nil, fmt.Errorf("cannot take square root of negative number")
|
||||||
|
}
|
||||||
|
return math.Sqrt(params.A), nil
|
||||||
|
case "mod", "%":
|
||||||
|
return math.Mod(params.A, params.B), nil
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unknown operation: %s", params.Op)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WeatherParams is the parameter struct for the GetWeather function
|
||||||
|
type WeatherParams struct {
|
||||||
|
Location string `json:"location" description:"City name or location"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetWeather returns mock weather data (for demo purposes)
|
||||||
|
func GetWeather(_ *llm.Context, params WeatherParams) (any, error) {
|
||||||
|
// This is a demo function - returns mock data
|
||||||
|
weathers := []string{"sunny", "cloudy", "rainy", "partly cloudy", "windy"}
|
||||||
|
temps := []int{65, 72, 58, 80, 45}
|
||||||
|
|
||||||
|
// Use location string to deterministically pick weather
|
||||||
|
idx := len(params.Location) % len(weathers)
|
||||||
|
|
||||||
|
return map[string]any{
|
||||||
|
"location": params.Location,
|
||||||
|
"temperature": strconv.Itoa(temps[idx]) + "F",
|
||||||
|
"condition": weathers[idx],
|
||||||
|
"humidity": "45%",
|
||||||
|
"note": "This is mock data for demonstration purposes",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RandomNumberParams is the parameter struct for the RandomNumber function
|
||||||
|
type RandomNumberParams struct {
|
||||||
|
Min int `json:"min" description:"Minimum value (inclusive)"`
|
||||||
|
Max int `json:"max" description:"Maximum value (inclusive)"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// RandomNumber generates a pseudo-random number (using current time nanoseconds)
|
||||||
|
func RandomNumber(_ *llm.Context, params RandomNumberParams) (any, error) {
|
||||||
|
if params.Min > params.Max {
|
||||||
|
return nil, fmt.Errorf("min cannot be greater than max")
|
||||||
|
}
|
||||||
|
// Simple pseudo-random using time
|
||||||
|
n := time.Now().UnixNano()
|
||||||
|
rangeSize := params.Max - params.Min + 1
|
||||||
|
result := params.Min + int(n%int64(rangeSize))
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// createDemoToolbox creates a toolbox with demo tools for testing
|
||||||
|
func createDemoToolbox() llm.ToolBox {
|
||||||
|
return llm.NewToolBox(
|
||||||
|
llm.NewFunction("get_time", "Get the current date and time", GetTime),
|
||||||
|
llm.NewFunction("calculate", "Perform basic math operations (add, subtract, multiply, divide, power, sqrt, mod)", Calculate),
|
||||||
|
llm.NewFunction("get_weather", "Get weather information for a location (demo data)", GetWeather),
|
||||||
|
llm.NewFunction("random_number", "Generate a random number between min and max", RandomNumber),
|
||||||
|
)
|
||||||
|
}
|
||||||
435
cmd/llm/update.go
Normal file
435
cmd/llm/update.go
Normal file
@@ -0,0 +1,435 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/bubbles/textinput"
|
||||||
|
"github.com/charmbracelet/bubbles/viewport"
|
||||||
|
tea "github.com/charmbracelet/bubbletea"
|
||||||
|
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// pendingRequest stores the request being processed for follow-up
|
||||||
|
var pendingRequest llm.Request
|
||||||
|
var pendingResponse llm.ResponseChoice
|
||||||
|
|
||||||
|
// Update handles messages and updates the model
|
||||||
|
func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||||
|
var cmd tea.Cmd
|
||||||
|
var cmds []tea.Cmd
|
||||||
|
|
||||||
|
switch msg := msg.(type) {
|
||||||
|
case tea.KeyMsg:
|
||||||
|
return m.handleKeyMsg(msg)
|
||||||
|
|
||||||
|
case tea.WindowSizeMsg:
|
||||||
|
m.width = msg.Width
|
||||||
|
m.height = msg.Height
|
||||||
|
|
||||||
|
headerHeight := 3
|
||||||
|
footerHeight := 4
|
||||||
|
verticalMargins := headerHeight + footerHeight
|
||||||
|
|
||||||
|
if !m.viewportReady {
|
||||||
|
m.viewport = viewport.New(msg.Width-4, msg.Height-verticalMargins)
|
||||||
|
m.viewport.HighPerformanceRendering = false
|
||||||
|
m.viewportReady = true
|
||||||
|
} else {
|
||||||
|
m.viewport.Width = msg.Width - 4
|
||||||
|
m.viewport.Height = msg.Height - verticalMargins
|
||||||
|
}
|
||||||
|
|
||||||
|
m.input.Width = msg.Width - 6
|
||||||
|
m.apiKeyInput.Width = msg.Width - 6
|
||||||
|
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
|
||||||
|
case ChatResponseMsg:
|
||||||
|
m.loading = false
|
||||||
|
if msg.Err != nil {
|
||||||
|
m.err = msg.Err
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(msg.Response.Choices) == 0 {
|
||||||
|
m.err = fmt.Errorf("no response choices returned")
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
choice := msg.Response.Choices[0]
|
||||||
|
|
||||||
|
// Check for tool calls
|
||||||
|
if len(choice.Calls) > 0 && m.toolsEnabled {
|
||||||
|
// Store for follow-up
|
||||||
|
pendingResponse = choice
|
||||||
|
|
||||||
|
// Add assistant's response to conversation if there's content
|
||||||
|
if choice.Content != "" {
|
||||||
|
m.addAssistantMessage(choice.Content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display tool calls
|
||||||
|
for _, call := range choice.Calls {
|
||||||
|
m.addToolCallMessage(call.FunctionCall.Name, call.FunctionCall.Arguments)
|
||||||
|
}
|
||||||
|
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
m.viewport.GotoBottom()
|
||||||
|
|
||||||
|
// Execute tools
|
||||||
|
m.loading = true
|
||||||
|
return m, executeTools(m.toolbox, pendingRequest, choice)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular response - add to conversation and display
|
||||||
|
m.conversation = append(m.conversation, choice)
|
||||||
|
m.addAssistantMessage(choice.Content)
|
||||||
|
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
m.viewport.GotoBottom()
|
||||||
|
|
||||||
|
case ToolExecutionMsg:
|
||||||
|
if msg.Err != nil {
|
||||||
|
m.loading = false
|
||||||
|
m.err = msg.Err
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display tool results
|
||||||
|
for i, result := range msg.Results {
|
||||||
|
name := pendingResponse.Calls[i].FunctionCall.Name
|
||||||
|
resultStr := fmt.Sprintf("%v", result.Result)
|
||||||
|
if result.Error != nil {
|
||||||
|
resultStr = "Error: " + result.Error.Error()
|
||||||
|
}
|
||||||
|
m.addToolResultMessage(name, resultStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add tool call responses to conversation
|
||||||
|
for _, result := range msg.Results {
|
||||||
|
m.conversation = append(m.conversation, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the assistant's response to conversation
|
||||||
|
m.conversation = append(m.conversation, pendingResponse)
|
||||||
|
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
m.viewport.GotoBottom()
|
||||||
|
|
||||||
|
// Send follow-up request
|
||||||
|
followUp := buildFollowUpRequest(&m, pendingRequest, pendingResponse, msg.Results)
|
||||||
|
pendingRequest = followUp
|
||||||
|
return m, sendChatRequest(m.chat, followUp)
|
||||||
|
|
||||||
|
case ImageLoadedMsg:
|
||||||
|
if msg.Err != nil {
|
||||||
|
m.err = msg.Err
|
||||||
|
m.state = m.previousState
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m.pendingImages = append(m.pendingImages, msg.Image)
|
||||||
|
m.state = m.previousState
|
||||||
|
m.err = nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Update text input
|
||||||
|
if m.state == StateChat {
|
||||||
|
m.input, cmd = m.input.Update(msg)
|
||||||
|
cmds = append(cmds, cmd)
|
||||||
|
} else if m.state == StateAPIKeyInput {
|
||||||
|
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
|
||||||
|
cmds = append(cmds, cmd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, tea.Batch(cmds...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleKeyMsg handles keyboard input
|
||||||
|
func (m Model) handleKeyMsg(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
// Global key handling
|
||||||
|
switch msg.String() {
|
||||||
|
case "ctrl+c":
|
||||||
|
return m, tea.Quit
|
||||||
|
|
||||||
|
case "esc":
|
||||||
|
if m.state != StateChat {
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
return m, tea.Quit
|
||||||
|
}
|
||||||
|
|
||||||
|
// State-specific key handling
|
||||||
|
switch m.state {
|
||||||
|
case StateChat:
|
||||||
|
return m.handleChatKeys(msg)
|
||||||
|
case StateProviderSelect:
|
||||||
|
return m.handleProviderSelectKeys(msg)
|
||||||
|
case StateModelSelect:
|
||||||
|
return m.handleModelSelectKeys(msg)
|
||||||
|
case StateImageInput:
|
||||||
|
return m.handleImageInputKeys(msg)
|
||||||
|
case StateToolsPanel:
|
||||||
|
return m.handleToolsPanelKeys(msg)
|
||||||
|
case StateSettings:
|
||||||
|
return m.handleSettingsKeys(msg)
|
||||||
|
case StateAPIKeyInput:
|
||||||
|
return m.handleAPIKeyInputKeys(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleChatKeys handles keys in chat state
|
||||||
|
func (m Model) handleChatKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "enter":
|
||||||
|
if m.loading {
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
text := strings.TrimSpace(m.input.Value())
|
||||||
|
if text == "" {
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if m.chat == nil {
|
||||||
|
m.err = fmt.Errorf("no model selected - press Ctrl+P to select a provider")
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build and send request
|
||||||
|
req := buildRequest(&m, text)
|
||||||
|
pendingRequest = req
|
||||||
|
|
||||||
|
// Add user message to display
|
||||||
|
m.addUserMessage(text, m.pendingImages)
|
||||||
|
|
||||||
|
// Clear input and pending images
|
||||||
|
m.input.Reset()
|
||||||
|
m.pendingImages = nil
|
||||||
|
m.err = nil
|
||||||
|
m.loading = true
|
||||||
|
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
m.viewport.GotoBottom()
|
||||||
|
|
||||||
|
return m, sendChatRequest(m.chat, req)
|
||||||
|
|
||||||
|
case "ctrl+i":
|
||||||
|
m.previousState = StateChat
|
||||||
|
m.state = StateImageInput
|
||||||
|
m.input.SetValue("")
|
||||||
|
m.input.Placeholder = "Enter image path or URL..."
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "ctrl+t":
|
||||||
|
m.state = StateToolsPanel
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "ctrl+p":
|
||||||
|
m.state = StateProviderSelect
|
||||||
|
m.listIndex = m.providerIndex
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "ctrl+m":
|
||||||
|
if m.provider == nil {
|
||||||
|
m.err = fmt.Errorf("select a provider first")
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
m.state = StateModelSelect
|
||||||
|
m.listItems = m.providers[m.providerIndex].Models
|
||||||
|
m.listIndex = m.providers[m.providerIndex].ModelIndex
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "ctrl+s":
|
||||||
|
m.state = StateSettings
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "ctrl+n":
|
||||||
|
m.newConversation()
|
||||||
|
m.viewport.SetContent(m.renderMessages())
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
case "up", "down", "pgup", "pgdown":
|
||||||
|
var cmd tea.Cmd
|
||||||
|
m.viewport, cmd = m.viewport.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
|
||||||
|
default:
|
||||||
|
var cmd tea.Cmd
|
||||||
|
m.input, cmd = m.input.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleProviderSelectKeys handles keys in provider selection state
|
||||||
|
func (m Model) handleProviderSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "up", "k":
|
||||||
|
if m.listIndex > 0 {
|
||||||
|
m.listIndex--
|
||||||
|
}
|
||||||
|
case "down", "j":
|
||||||
|
if m.listIndex < len(m.providers)-1 {
|
||||||
|
m.listIndex++
|
||||||
|
}
|
||||||
|
case "enter":
|
||||||
|
p := m.providers[m.listIndex]
|
||||||
|
if !p.HasAPIKey {
|
||||||
|
// Need to get API key
|
||||||
|
m.state = StateAPIKeyInput
|
||||||
|
m.apiKeyInput.Focus()
|
||||||
|
m.apiKeyInput.SetValue("")
|
||||||
|
return m, textinput.Blink
|
||||||
|
}
|
||||||
|
|
||||||
|
err := m.selectProvider(m.listIndex)
|
||||||
|
if err != nil {
|
||||||
|
m.err = err
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
m.newConversation()
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleAPIKeyInputKeys handles keys in API key input state
|
||||||
|
func (m Model) handleAPIKeyInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "enter":
|
||||||
|
key := strings.TrimSpace(m.apiKeyInput.Value())
|
||||||
|
if key == "" {
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store the API key
|
||||||
|
p := m.providers[m.listIndex]
|
||||||
|
m.apiKeys[p.Name] = key
|
||||||
|
m.providers[m.listIndex].HasAPIKey = true
|
||||||
|
|
||||||
|
// Update list items
|
||||||
|
for i, prov := range m.providers {
|
||||||
|
status := " (no key)"
|
||||||
|
if prov.HasAPIKey {
|
||||||
|
status = " (ready)"
|
||||||
|
}
|
||||||
|
m.listItems[i] = prov.Name + status
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select the provider
|
||||||
|
err := m.selectProvider(m.listIndex)
|
||||||
|
if err != nil {
|
||||||
|
m.err = err
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
m.newConversation()
|
||||||
|
return m, nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
var cmd tea.Cmd
|
||||||
|
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleModelSelectKeys handles keys in model selection state
|
||||||
|
func (m Model) handleModelSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "up", "k":
|
||||||
|
if m.listIndex > 0 {
|
||||||
|
m.listIndex--
|
||||||
|
}
|
||||||
|
case "down", "j":
|
||||||
|
if m.listIndex < len(m.listItems)-1 {
|
||||||
|
m.listIndex++
|
||||||
|
}
|
||||||
|
case "enter":
|
||||||
|
err := m.selectModel(m.listIndex)
|
||||||
|
if err != nil {
|
||||||
|
m.err = err
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleImageInputKeys handles keys in image input state
|
||||||
|
func (m Model) handleImageInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "enter":
|
||||||
|
input := strings.TrimSpace(m.input.Value())
|
||||||
|
if input == "" {
|
||||||
|
m.state = m.previousState
|
||||||
|
m.input.Placeholder = "Type your message..."
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m.input.Placeholder = "Type your message..."
|
||||||
|
|
||||||
|
// Determine input type and load
|
||||||
|
if strings.HasPrefix(input, "http://") || strings.HasPrefix(input, "https://") {
|
||||||
|
return m, loadImageFromURL(input)
|
||||||
|
} else if strings.HasPrefix(input, "data:") || len(input) > 100 && !strings.Contains(input, "/") && !strings.Contains(input, "\\") {
|
||||||
|
return m, loadImageFromBase64(input)
|
||||||
|
} else {
|
||||||
|
return m, loadImageFromPath(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
var cmd tea.Cmd
|
||||||
|
m.input, cmd = m.input.Update(msg)
|
||||||
|
return m, cmd
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleToolsPanelKeys handles keys in tools panel state
|
||||||
|
func (m Model) handleToolsPanelKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "t":
|
||||||
|
m.toolsEnabled = !m.toolsEnabled
|
||||||
|
case "enter", "q":
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleSettingsKeys handles keys in settings state
|
||||||
|
func (m Model) handleSettingsKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
|
||||||
|
switch msg.String() {
|
||||||
|
case "1":
|
||||||
|
// Set temperature to nil (default)
|
||||||
|
m.temperature = nil
|
||||||
|
case "2":
|
||||||
|
t := 0.0
|
||||||
|
m.temperature = &t
|
||||||
|
case "3":
|
||||||
|
t := 0.5
|
||||||
|
m.temperature = &t
|
||||||
|
case "4":
|
||||||
|
t := 0.7
|
||||||
|
m.temperature = &t
|
||||||
|
case "5":
|
||||||
|
t := 1.0
|
||||||
|
m.temperature = &t
|
||||||
|
case "enter", "q":
|
||||||
|
m.state = StateChat
|
||||||
|
m.input.Focus()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
296
cmd/llm/view.go
Normal file
296
cmd/llm/view.go
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/lipgloss"
|
||||||
|
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// View renders the current state
|
||||||
|
func (m Model) View() string {
|
||||||
|
switch m.state {
|
||||||
|
case StateProviderSelect:
|
||||||
|
return m.renderProviderSelect()
|
||||||
|
case StateModelSelect:
|
||||||
|
return m.renderModelSelect()
|
||||||
|
case StateImageInput:
|
||||||
|
return m.renderImageInput()
|
||||||
|
case StateToolsPanel:
|
||||||
|
return m.renderToolsPanel()
|
||||||
|
case StateSettings:
|
||||||
|
return m.renderSettings()
|
||||||
|
case StateAPIKeyInput:
|
||||||
|
return m.renderAPIKeyInput()
|
||||||
|
default:
|
||||||
|
return m.renderChat()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderChat renders the main chat view
|
||||||
|
func (m Model) renderChat() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
// Header
|
||||||
|
provider := m.providerName
|
||||||
|
if provider == "" {
|
||||||
|
provider = "None"
|
||||||
|
}
|
||||||
|
model := m.modelName
|
||||||
|
if model == "" {
|
||||||
|
model = "None"
|
||||||
|
}
|
||||||
|
|
||||||
|
header := headerStyle.Render(fmt.Sprintf("go-llm CLI %s",
|
||||||
|
providerBadgeStyle.Render(fmt.Sprintf("%s/%s", provider, model))))
|
||||||
|
|
||||||
|
b.WriteString(header)
|
||||||
|
b.WriteString("\n")
|
||||||
|
|
||||||
|
// Messages viewport
|
||||||
|
if m.viewportReady {
|
||||||
|
b.WriteString(m.viewport.View())
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Image indicator
|
||||||
|
if len(m.pendingImages) > 0 {
|
||||||
|
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s) attached]", len(m.pendingImages))))
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error
|
||||||
|
if m.err != nil {
|
||||||
|
b.WriteString(errorStyle.Render(" Error: " + m.err.Error()))
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loading
|
||||||
|
if m.loading {
|
||||||
|
b.WriteString(loadingStyle.Render(" Thinking..."))
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Input
|
||||||
|
inputBox := inputStyle.Render(m.input.View())
|
||||||
|
b.WriteString(inputBox)
|
||||||
|
b.WriteString("\n")
|
||||||
|
|
||||||
|
// Help
|
||||||
|
help := inputHelpStyle.Render("Enter: send | Ctrl+I: image | Ctrl+T: tools | Ctrl+P: provider | Ctrl+M: model | Ctrl+S: settings | Ctrl+N: new | Esc: quit")
|
||||||
|
b.WriteString(help)
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderMessages renders all messages for the viewport
|
||||||
|
func (m Model) renderMessages() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
if len(m.messages) == 0 {
|
||||||
|
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
b.WriteString(lipgloss.NewStyle().Foreground(mutedColor).Render("Start a conversation by typing a message below."))
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
for _, msg := range m.messages {
|
||||||
|
var content string
|
||||||
|
var style lipgloss.Style
|
||||||
|
|
||||||
|
switch msg.Role {
|
||||||
|
case llm.RoleUser:
|
||||||
|
style = userMsgStyle
|
||||||
|
label := roleLabelStyle.Foreground(secondaryColor).Render("[User]")
|
||||||
|
content = label + " " + msg.Content
|
||||||
|
if msg.Images > 0 {
|
||||||
|
content += imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s)]", msg.Images))
|
||||||
|
}
|
||||||
|
case llm.RoleAssistant:
|
||||||
|
style = assistantMsgStyle
|
||||||
|
label := roleLabelStyle.Foreground(lipgloss.Color("255")).Render("[Assistant]")
|
||||||
|
content = label + " " + msg.Content
|
||||||
|
case llm.Role("tool_call"):
|
||||||
|
style = toolCallStyle
|
||||||
|
content = " -> Calling: " + msg.Content
|
||||||
|
case llm.Role("tool_result"):
|
||||||
|
style = toolResultStyle
|
||||||
|
content = " <- Result: " + msg.Content
|
||||||
|
default:
|
||||||
|
style = assistantMsgStyle
|
||||||
|
content = msg.Content
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString(style.Render(content))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderProviderSelect renders the provider selection view
|
||||||
|
func (m Model) renderProviderSelect() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render("Select Provider"))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
for i, item := range m.listItems {
|
||||||
|
cursor := " "
|
||||||
|
style := normalItemStyle
|
||||||
|
if i == m.listIndex {
|
||||||
|
cursor = "> "
|
||||||
|
style = selectedItemStyle
|
||||||
|
}
|
||||||
|
b.WriteString(style.Render(cursor + item))
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString("\n")
|
||||||
|
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderAPIKeyInput renders the API key input view
|
||||||
|
func (m Model) renderAPIKeyInput() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
provider := m.providers[m.listIndex]
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render(fmt.Sprintf("Enter API Key for %s", provider.Name)))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString(fmt.Sprintf("Environment variable: %s\n\n", provider.EnvVar))
|
||||||
|
b.WriteString("Enter your API key below (it will be hidden):\n\n")
|
||||||
|
|
||||||
|
inputBox := inputStyle.Render(m.apiKeyInput.View())
|
||||||
|
b.WriteString(inputBox)
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString(helpStyle.Render("Enter to confirm, Esc to cancel"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderModelSelect renders the model selection view
|
||||||
|
func (m Model) renderModelSelect() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render(fmt.Sprintf("Select Model (%s)", m.providerName)))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
for i, item := range m.listItems {
|
||||||
|
cursor := " "
|
||||||
|
style := normalItemStyle
|
||||||
|
if i == m.listIndex {
|
||||||
|
cursor = "> "
|
||||||
|
style = selectedItemStyle
|
||||||
|
}
|
||||||
|
if item == m.modelName {
|
||||||
|
item += " (current)"
|
||||||
|
}
|
||||||
|
b.WriteString(style.Render(cursor + item))
|
||||||
|
b.WriteString("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString("\n")
|
||||||
|
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderImageInput renders the image input view
|
||||||
|
func (m Model) renderImageInput() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render("Add Image"))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString("Enter an image source:\n")
|
||||||
|
b.WriteString(" - File path (e.g., /path/to/image.png)\n")
|
||||||
|
b.WriteString(" - URL (e.g., https://example.com/image.jpg)\n")
|
||||||
|
b.WriteString(" - Base64 data or data URL\n\n")
|
||||||
|
|
||||||
|
if len(m.pendingImages) > 0 {
|
||||||
|
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf("Currently attached: %d image(s)\n\n", len(m.pendingImages))))
|
||||||
|
}
|
||||||
|
|
||||||
|
inputBox := inputStyle.Render(m.input.View())
|
||||||
|
b.WriteString(inputBox)
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString(helpStyle.Render("Enter to add image, Esc to cancel"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderToolsPanel renders the tools panel
|
||||||
|
func (m Model) renderToolsPanel() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render("Tools / Function Calling"))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
status := "DISABLED"
|
||||||
|
statusStyle := errorStyle
|
||||||
|
if m.toolsEnabled {
|
||||||
|
status = "ENABLED"
|
||||||
|
statusStyle = lipgloss.NewStyle().Foreground(successColor).Bold(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString(settingLabelStyle.Render("Tools Status:"))
|
||||||
|
b.WriteString(statusStyle.Render(status))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString("Available tools:\n")
|
||||||
|
for _, fn := range m.toolbox.Functions() {
|
||||||
|
b.WriteString(fmt.Sprintf(" - %s: %s\n", selectedItemStyle.Render(fn.Name), fn.Description))
|
||||||
|
}
|
||||||
|
|
||||||
|
b.WriteString("\n")
|
||||||
|
b.WriteString(helpStyle.Render("Press 't' to toggle tools, Enter or 'q' to close"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// renderSettings renders the settings view
|
||||||
|
func (m Model) renderSettings() string {
|
||||||
|
var b strings.Builder
|
||||||
|
|
||||||
|
b.WriteString(headerStyle.Render("Settings"))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
// Temperature
|
||||||
|
tempStr := "default"
|
||||||
|
if m.temperature != nil {
|
||||||
|
tempStr = fmt.Sprintf("%.1f", *m.temperature)
|
||||||
|
}
|
||||||
|
b.WriteString(settingLabelStyle.Render("Temperature:"))
|
||||||
|
b.WriteString(settingValueStyle.Render(tempStr))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString("Press a key to set temperature:\n")
|
||||||
|
b.WriteString(" 1 - Default (model decides)\n")
|
||||||
|
b.WriteString(" 2 - 0.0 (deterministic)\n")
|
||||||
|
b.WriteString(" 3 - 0.5 (balanced)\n")
|
||||||
|
b.WriteString(" 4 - 0.7 (creative)\n")
|
||||||
|
b.WriteString(" 5 - 1.0 (very creative)\n")
|
||||||
|
|
||||||
|
b.WriteString("\n")
|
||||||
|
|
||||||
|
// System prompt
|
||||||
|
b.WriteString(settingLabelStyle.Render("System Prompt:"))
|
||||||
|
b.WriteString("\n")
|
||||||
|
b.WriteString(settingValueStyle.Render(" " + m.systemPrompt))
|
||||||
|
b.WriteString("\n\n")
|
||||||
|
|
||||||
|
b.WriteString(helpStyle.Render("Enter or 'q' to close"))
|
||||||
|
|
||||||
|
return appStyle.Render(b.String())
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|||||||
87
go.mod
87
go.mod
@@ -1,48 +1,67 @@
|
|||||||
module gitea.stevedudenhoeffer.com/steve/go-llm
|
module gitea.stevedudenhoeffer.com/steve/go-llm
|
||||||
|
|
||||||
go 1.23.1
|
go 1.24.0
|
||||||
|
|
||||||
|
toolchain go1.24.2
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/google/generative-ai-go v0.19.0
|
github.com/charmbracelet/bubbles v0.21.0
|
||||||
github.com/liushuangls/go-anthropic/v2 v2.15.0
|
github.com/charmbracelet/bubbletea v1.3.10
|
||||||
github.com/openai/openai-go v0.1.0-beta.9
|
github.com/charmbracelet/lipgloss v1.1.0
|
||||||
golang.org/x/image v0.29.0
|
github.com/joho/godotenv v1.5.1
|
||||||
google.golang.org/api v0.228.0
|
github.com/liushuangls/go-anthropic/v2 v2.17.0
|
||||||
|
github.com/modelcontextprotocol/go-sdk v1.2.0
|
||||||
|
github.com/openai/openai-go v1.12.0
|
||||||
|
golang.org/x/image v0.35.0
|
||||||
|
google.golang.org/genai v1.43.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
cloud.google.com/go v0.120.0 // indirect
|
cloud.google.com/go v0.123.0 // indirect
|
||||||
cloud.google.com/go/ai v0.10.1 // indirect
|
cloud.google.com/go/auth v0.18.1 // indirect
|
||||||
cloud.google.com/go/auth v0.15.0 // indirect
|
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
github.com/atotto/clipboard v0.1.4 // indirect
|
||||||
cloud.google.com/go/compute/metadata v0.6.0 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
cloud.google.com/go/longrunning v0.6.6 // indirect
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||||
|
github.com/charmbracelet/x/ansi v0.10.1 // indirect
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
||||||
|
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||||
github.com/go-logr/logr v1.4.2 // indirect
|
github.com/go-logr/logr v1.4.3 // indirect
|
||||||
github.com/go-logr/stdr v1.2.2 // indirect
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
|
github.com/google/go-cmp v0.7.0 // indirect
|
||||||
|
github.com/google/jsonschema-go v0.3.0 // indirect
|
||||||
github.com/google/s2a-go v0.1.9 // indirect
|
github.com/google/s2a-go v0.1.9 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11 // indirect
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
github.com/googleapis/gax-go/v2 v2.16.0 // indirect
|
||||||
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
|
github.com/gorilla/websocket v1.5.3 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||||
|
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||||
|
github.com/muesli/termenv v0.16.0 // indirect
|
||||||
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/tidwall/gjson v1.18.0 // indirect
|
github.com/tidwall/gjson v1.18.0 // indirect
|
||||||
github.com/tidwall/match v1.1.1 // indirect
|
github.com/tidwall/match v1.2.0 // indirect
|
||||||
github.com/tidwall/pretty v1.2.1 // indirect
|
github.com/tidwall/pretty v1.2.1 // indirect
|
||||||
github.com/tidwall/sjson v1.2.5 // indirect
|
github.com/tidwall/sjson v1.2.5 // indirect
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect
|
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect
|
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
go.opentelemetry.io/otel/metric v1.39.0 // indirect
|
||||||
golang.org/x/crypto v0.37.0 // indirect
|
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||||
golang.org/x/net v0.39.0 // indirect
|
golang.org/x/crypto v0.47.0 // indirect
|
||||||
golang.org/x/oauth2 v0.29.0 // indirect
|
golang.org/x/net v0.49.0 // indirect
|
||||||
golang.org/x/sync v0.16.0 // indirect
|
golang.org/x/oauth2 v0.32.0 // indirect
|
||||||
golang.org/x/sys v0.32.0 // indirect
|
golang.org/x/sys v0.40.0 // indirect
|
||||||
golang.org/x/text v0.27.0 // indirect
|
golang.org/x/text v0.33.0 // indirect
|
||||||
golang.org/x/time v0.11.0 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d // indirect
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250409194420-de1ac958c67a // indirect
|
google.golang.org/grpc v1.78.0 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250409194420-de1ac958c67a // indirect
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
google.golang.org/grpc v1.71.1 // indirect
|
|
||||||
google.golang.org/protobuf v1.36.6 // indirect
|
|
||||||
)
|
)
|
||||||
|
|||||||
186
go.sum
186
go.sum
@@ -1,97 +1,145 @@
|
|||||||
cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA=
|
cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE=
|
||||||
cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q=
|
cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU=
|
||||||
cloud.google.com/go/ai v0.10.1 h1:EU93KqYmMeOKgaBXAz2DshH2C/BzAT1P+iJORksLIic=
|
cloud.google.com/go/auth v0.18.1 h1:IwTEx92GFUo2pJ6Qea0EU3zYvKnTAeRCODxfA/G5UWs=
|
||||||
cloud.google.com/go/ai v0.10.1/go.mod h1:sWWHZvmJ83BjuxAQtYEiA0SFTpijtbH+SXWFO14ri5A=
|
cloud.google.com/go/auth v0.18.1/go.mod h1:GfTYoS9G3CWpRA3Va9doKN9mjPGRS+v41jmZAhBzbrA=
|
||||||
cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps=
|
cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
|
||||||
cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8=
|
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
||||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
|
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
|
||||||
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
|
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||||
cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw=
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw=
|
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/charmbracelet/bubbles v0.21.0 h1:9TdC97SdRVfg/1aaXNVWfFH3nnLAwOXr8Fn6u6mfQdFs=
|
||||||
|
github.com/charmbracelet/bubbles v0.21.0/go.mod h1:HF+v6QUR4HkEpz62dx7ym2xc71/KBHg+zKwJtMw+qtg=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.10/go.mod h1:ORQfo0fk8U+po9VaNvnV95UPWA1BitP1E0N6xJPlHr4=
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||||
|
github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
|
||||||
|
github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||||
|
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||||
|
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||||
|
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||||
github.com/google/generative-ai-go v0.19.0 h1:R71szggh8wHMCUlEMsW2A/3T+5LdEIkiaHSYgSpUgdg=
|
|
||||||
github.com/google/generative-ai-go v0.19.0/go.mod h1:JYolL13VG7j79kM5BtHz4qwONHkeJQzOCkKXnpqtS/E=
|
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
|
github.com/google/jsonschema-go v0.3.0 h1:6AH2TxVNtk3IlvkkhjrtbUc4S8AvO0Xii0DxIygDg+Q=
|
||||||
|
github.com/google/jsonschema-go v0.3.0/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
|
||||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11 h1:vAe81Msw+8tKUxi2Dqh/NZMz7475yUvmRIkXr4oN2ao=
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11/go.mod h1:RFV7MUdlb7AgEq2v7FmMCfeSMCllAzWxFgRdusoGks8=
|
||||||
github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q=
|
github.com/googleapis/gax-go/v2 v2.16.0 h1:iHbQmKLLZrexmb0OSsNGTeSTS0HO4YvFOG8g5E4Zd0Y=
|
||||||
github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA=
|
github.com/googleapis/gax-go/v2 v2.16.0/go.mod h1:o1vfQjjNZn4+dPnRdl/4ZD7S9414Y4xA+a/6Icj6l14=
|
||||||
github.com/liushuangls/go-anthropic/v2 v2.15.0 h1:zpplg7BRV/9FlMmeMPI0eDwhViB0l9SkNrF8ErYlRoQ=
|
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||||
github.com/liushuangls/go-anthropic/v2 v2.15.0/go.mod h1:kq2yW3JVy1/rph8u5KzX7F3q95CEpCT2RXp/2nfCmb4=
|
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/openai/openai-go v0.1.0-beta.9 h1:ABpubc5yU/3ejee2GgRrbFta81SG/d7bQbB8mIdP0Xo=
|
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||||
github.com/openai/openai-go v0.1.0-beta.9/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
|
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||||
|
github.com/liushuangls/go-anthropic/v2 v2.17.0 h1:iBA6h7aghi1q86owEQ95XE2R2MF/0dQ7bCxtwTxOg4c=
|
||||||
|
github.com/liushuangls/go-anthropic/v2 v2.17.0/go.mod h1:a550cJXPoTG2FL3DvfKG2zzD5O2vjgvo4tHtoGPzFLU=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||||
|
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s=
|
||||||
|
github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||||
|
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||||
|
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||||
|
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||||
|
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||||
|
github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
|
||||||
|
github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
|
||||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
|
github.com/tidwall/match v1.2.0 h1:0pt8FlkOwjN2fPt4bIl4BoNxb98gGHN2ObFEDkrfZnM=
|
||||||
|
github.com/tidwall/match v1.2.0/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw=
|
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM=
|
github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU=
|
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=
|
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 h1:ssfIgGNANqpVFCndZvcuyKbl0g+UAVcbBcqGkG28H0Y=
|
||||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0/go.mod h1:GQ/474YrbE4Jx8gZ4q5I4hrhUzM6UPzyrqJYV2AqPoQ=
|
||||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||||
go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=
|
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||||
go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=
|
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=
|
go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=
|
go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
|
||||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
|
||||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
||||||
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||||
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||||
golang.org/x/image v0.29.0 h1:HcdsyR4Gsuys/Axh0rDEmlBmB68rW1U9BUdB3UVHsas=
|
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||||
golang.org/x/image v0.29.0/go.mod h1:RVJROnf3SLK8d26OW91j4FrIHGbsJ8QnbEocVTOWQDA=
|
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E=
|
||||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE=
|
||||||
golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98=
|
golang.org/x/image v0.35.0 h1:LKjiHdgMtO8z7Fh18nGY6KDcoEtVfsgLDPeLyguqb7I=
|
||||||
golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
|
golang.org/x/image v0.35.0/go.mod h1:MwPLTVgvxSASsxdLzKrl8BRFuyqMyGhLwmC+TO1Sybk=
|
||||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY=
|
||||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0=
|
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
google.golang.org/api v0.228.0 h1:X2DJ/uoWGnY5obVjewbp8icSL5U4FzuCfy9OjbLSnLs=
|
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||||
google.golang.org/api v0.228.0/go.mod h1:wNvRS1Pbe8r4+IfBIniV8fwCpGwTrYa+kMUDiC5z5a4=
|
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250409194420-de1ac958c67a h1:OQ7sHVzkx6L57dQpzUS4ckfWJ51KDH74XHTDe23xWAs=
|
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250409194420-de1ac958c67a/go.mod h1:2R6XrVC8Oc08GlNh8ujEpc7HkLiEZ16QeY7FxIs20ac=
|
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250409194420-de1ac958c67a h1:GIqLhp/cYUkuGuiT+vJk8vhOP86L4+SP5j8yXgeVpvI=
|
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250409194420-de1ac958c67a/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||||
google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI=
|
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||||
google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
|
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
google.golang.org/genai v1.43.0 h1:8vhqhzJNZu1U94e2m+KvDq/TUUjSmDrs1aKkvTa8SoM=
|
||||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
google.golang.org/genai v1.43.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d h1:xXzuihhT3gL/ntduUZwHECzAn57E8dA6l8SOtYWdD8Q=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260122232226-8e98ce8d340d/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
|
||||||
|
google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
|
||||||
|
google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
|
||||||
|
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||||
|
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
103
google.go
103
google.go
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@@ -8,26 +8,28 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
"google.golang.org/genai"
|
||||||
"google.golang.org/api/option"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type google struct {
|
type googleImpl struct {
|
||||||
key string
|
key string
|
||||||
model string
|
model string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g google) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
var _ LLM = googleImpl{}
|
||||||
|
|
||||||
|
func (g googleImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
||||||
g.model = modelVersion
|
g.model = modelVersion
|
||||||
|
|
||||||
return g, nil
|
return g, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g google) requestToChatHistory(in Request, model *genai.GenerativeModel) (*genai.GenerativeModel, *genai.ChatSession, []genai.Part) {
|
func (g googleImpl) requestToContents(in Request) ([]*genai.Content, *genai.GenerateContentConfig) {
|
||||||
res := *model
|
var contents []*genai.Content
|
||||||
|
var cfg genai.GenerateContentConfig
|
||||||
|
|
||||||
for _, tool := range in.Toolbox.functions {
|
for _, tool := range in.Toolbox.Functions() {
|
||||||
res.Tools = append(res.Tools, &genai.Tool{
|
cfg.Tools = append(cfg.Tools, &genai.Tool{
|
||||||
FunctionDeclarations: []*genai.FunctionDeclaration{
|
FunctionDeclarations: []*genai.FunctionDeclaration{
|
||||||
{
|
{
|
||||||
Name: tool.Name,
|
Name: tool.Name,
|
||||||
@@ -38,48 +40,44 @@ func (g google) requestToChatHistory(in Request, model *genai.GenerativeModel) (
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if !in.Toolbox.RequiresTool() {
|
if in.Toolbox.RequiresTool() {
|
||||||
res.ToolConfig = &genai.ToolConfig{FunctionCallingConfig: &genai.FunctionCallingConfig{
|
cfg.ToolConfig = &genai.ToolConfig{FunctionCallingConfig: &genai.FunctionCallingConfig{
|
||||||
Mode: genai.FunctionCallingAny,
|
Mode: genai.FunctionCallingConfigModeAny,
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
cs := res.StartChat()
|
for _, c := range in.Messages {
|
||||||
|
var role genai.Role
|
||||||
for i, c := range in.Messages {
|
|
||||||
content := genai.NewUserContent(genai.Text(c.Text))
|
|
||||||
|
|
||||||
switch c.Role {
|
switch c.Role {
|
||||||
case RoleAssistant, RoleSystem:
|
case RoleAssistant, RoleSystem:
|
||||||
content.Role = "model"
|
role = genai.RoleModel
|
||||||
|
|
||||||
case RoleUser:
|
case RoleUser:
|
||||||
content.Role = "user"
|
role = genai.RoleUser
|
||||||
|
}
|
||||||
|
|
||||||
|
var parts []*genai.Part
|
||||||
|
if c.Text != "" {
|
||||||
|
parts = append(parts, genai.NewPartFromText(c.Text))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, img := range c.Images {
|
for _, img := range c.Images {
|
||||||
if img.Url != "" {
|
if img.Url != "" {
|
||||||
// gemini does not support URLs, so we need to download the image and convert it to a blob
|
// gemini does not support URLs, so we need to download the image and convert it to a blob
|
||||||
|
|
||||||
// Download the image from the URL
|
|
||||||
resp, err := http.Get(img.Url)
|
resp, err := http.Get(img.Url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(fmt.Sprintf("error downloading image: %v", err))
|
panic(fmt.Sprintf("error downloading image: %v", err))
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
// Check the Content-Length to ensure it's not over 20MB
|
|
||||||
if resp.ContentLength > 20*1024*1024 {
|
if resp.ContentLength > 20*1024*1024 {
|
||||||
panic(fmt.Sprintf("image size exceeds 20MB: %d bytes", resp.ContentLength))
|
panic(fmt.Sprintf("image size exceeds 20MB: %d bytes", resp.ContentLength))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the content into a byte slice
|
|
||||||
data, err := io.ReadAll(resp.Body)
|
data, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(fmt.Sprintf("error reading image data: %v", err))
|
panic(fmt.Sprintf("error reading image data: %v", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure the MIME type is appropriate
|
|
||||||
mimeType := http.DetectContentType(data)
|
mimeType := http.DetectContentType(data)
|
||||||
switch mimeType {
|
switch mimeType {
|
||||||
case "image/jpeg", "image/png", "image/gif":
|
case "image/jpeg", "image/png", "image/gif":
|
||||||
@@ -88,38 +86,24 @@ func (g google) requestToChatHistory(in Request, model *genai.GenerativeModel) (
|
|||||||
panic(fmt.Sprintf("unsupported image MIME type: %s", mimeType))
|
panic(fmt.Sprintf("unsupported image MIME type: %s", mimeType))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a genai.Blob using the validated image data
|
parts = append(parts, genai.NewPartFromBytes(data, mimeType))
|
||||||
content.Parts = append(content.Parts, genai.Blob{
|
|
||||||
MIMEType: mimeType,
|
|
||||||
Data: data,
|
|
||||||
})
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// convert base64 to blob
|
|
||||||
b, e := base64.StdEncoding.DecodeString(img.Base64)
|
b, e := base64.StdEncoding.DecodeString(img.Base64)
|
||||||
if e != nil {
|
if e != nil {
|
||||||
panic(fmt.Sprintf("error decoding base64: %v", e))
|
panic(fmt.Sprintf("error decoding base64: %v", e))
|
||||||
}
|
}
|
||||||
|
|
||||||
content.Parts = append(content.Parts, genai.Blob{
|
parts = append(parts, genai.NewPartFromBytes(b, img.ContentType))
|
||||||
MIMEType: img.ContentType,
|
|
||||||
Data: b,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if this is the last message, we want to add to history, we want it to be the parts
|
contents = append(contents, genai.NewContentFromParts(parts, role))
|
||||||
if i == len(in.Messages)-1 {
|
|
||||||
return &res, cs, content.Parts
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cs.History = append(cs.History, content)
|
return contents, &cfg
|
||||||
}
|
}
|
||||||
|
|
||||||
return &res, cs, nil
|
func (g googleImpl) responseToLLMResponse(in *genai.GenerateContentResponse) (Response, error) {
|
||||||
}
|
|
||||||
|
|
||||||
func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Response, error) {
|
|
||||||
res := Response{}
|
res := Response{}
|
||||||
|
|
||||||
for _, c := range in.Candidates {
|
for _, c := range in.Candidates {
|
||||||
@@ -127,15 +111,12 @@ func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Respon
|
|||||||
var set = false
|
var set = false
|
||||||
if c.Content != nil {
|
if c.Content != nil {
|
||||||
for _, p := range c.Content.Parts {
|
for _, p := range c.Content.Parts {
|
||||||
switch p.(type) {
|
if p.Text != "" {
|
||||||
case genai.Text:
|
|
||||||
choice.Content = string(p.(genai.Text))
|
|
||||||
set = true
|
set = true
|
||||||
|
choice.Content = p.Text
|
||||||
case genai.FunctionCall:
|
} else if p.FunctionCall != nil {
|
||||||
v := p.(genai.FunctionCall)
|
v := p.FunctionCall
|
||||||
b, e := json.Marshal(v.Args)
|
b, e := json.Marshal(v.Args)
|
||||||
|
|
||||||
if e != nil {
|
if e != nil {
|
||||||
return Response{}, fmt.Errorf("error marshalling args: %w", e)
|
return Response{}, fmt.Errorf("error marshalling args: %w", e)
|
||||||
}
|
}
|
||||||
@@ -150,8 +131,6 @@ func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Respon
|
|||||||
|
|
||||||
choice.Calls = append(choice.Calls, call)
|
choice.Calls = append(choice.Calls, call)
|
||||||
set = true
|
set = true
|
||||||
default:
|
|
||||||
return Response{}, fmt.Errorf("unknown part type: %T", p)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -165,23 +144,19 @@ func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Respon
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g google) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
func (g googleImpl) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
||||||
cl, err := genai.NewClient(ctx, option.WithAPIKey(g.key))
|
cl, err := genai.NewClient(ctx, &genai.ClientConfig{
|
||||||
|
APIKey: g.key,
|
||||||
|
Backend: genai.BackendGeminiAPI,
|
||||||
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Response{}, fmt.Errorf("error creating genai client: %w", err)
|
return Response{}, fmt.Errorf("error creating genai client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
model := cl.GenerativeModel(g.model)
|
contents, cfg := g.requestToContents(req)
|
||||||
|
|
||||||
_, cs, parts := g.requestToChatHistory(req, model)
|
|
||||||
|
|
||||||
resp, err := cs.SendMessage(ctx, parts...)
|
|
||||||
|
|
||||||
//parts := g.requestToGoogleRequest(req, model)
|
|
||||||
|
|
||||||
//resp, err := model.GenerateContent(ctx, parts...)
|
|
||||||
|
|
||||||
|
resp, err := cl.Models.GenerateContent(ctx, g.model, contents, cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Response{}, fmt.Errorf("error generating content: %w", err)
|
return Response{}, fmt.Errorf("error generating content: %w", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package utils
|
package imageutil
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@@ -12,8 +12,8 @@ import (
|
|||||||
"golang.org/x/image/draw"
|
"golang.org/x/image/draw"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CompressImage takes a base‑64‑encoded image (JPEG, PNG or GIF) and returns
|
// CompressImage takes a base-64-encoded image (JPEG, PNG or GIF) and returns
|
||||||
// a base‑64‑encoded version that is at most maxLength in size, or an error.
|
// a base-64-encoded version that is at most maxLength in size, or an error.
|
||||||
func CompressImage(b64 string, maxLength int) (string, string, error) {
|
func CompressImage(b64 string, maxLength int) (string, string, error) {
|
||||||
raw, err := base64.StdEncoding.DecodeString(b64)
|
raw, err := base64.StdEncoding.DecodeString(b64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -29,12 +29,12 @@ func CompressImage(b64 string, maxLength int) (string, string, error) {
|
|||||||
case "image/gif":
|
case "image/gif":
|
||||||
return compressGIF(raw, maxLength)
|
return compressGIF(raw, maxLength)
|
||||||
|
|
||||||
default: // jpeg, png, webp, etc. → treat as raster
|
default: // jpeg, png, webp, etc. -> treat as raster
|
||||||
return compressRaster(raw, maxLength)
|
return compressRaster(raw, maxLength)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------- Raster path (jpeg / png / single‑frame gif) ----------
|
// ---------- Raster path (jpeg / png / single-frame gif) ----------
|
||||||
|
|
||||||
func compressRaster(src []byte, maxLength int) (string, string, error) {
|
func compressRaster(src []byte, maxLength int) (string, string, error) {
|
||||||
img, _, err := image.Decode(bytes.NewReader(src))
|
img, _, err := image.Decode(bytes.NewReader(src))
|
||||||
@@ -57,7 +57,7 @@ func compressRaster(src []byte, maxLength int) (string, string, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// down‑scale 80%
|
// down-scale 80%
|
||||||
b := img.Bounds()
|
b := img.Bounds()
|
||||||
if b.Dx() < 100 || b.Dy() < 100 {
|
if b.Dx() < 100 || b.Dy() < 100 {
|
||||||
return "", "", fmt.Errorf("cannot compress below %.02fMiB without destroying image", float64(maxLength)/1048576.0)
|
return "", "", fmt.Errorf("cannot compress below %.02fMiB without destroying image", float64(maxLength)/1048576.0)
|
||||||
@@ -86,7 +86,7 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
|
|||||||
return base64.StdEncoding.EncodeToString(buf.Bytes()), "image/gif", nil
|
return base64.StdEncoding.EncodeToString(buf.Bytes()), "image/gif", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// down‑scale every frame by 80%
|
// down-scale every frame by 80%
|
||||||
w, h := g.Config.Width, g.Config.Height
|
w, h := g.Config.Width, g.Config.Height
|
||||||
if w < 100 || h < 100 {
|
if w < 100 || h < 100 {
|
||||||
return "", "", fmt.Errorf("cannot compress animated GIF below 5 MiB without excessive quality loss")
|
return "", "", fmt.Errorf("cannot compress animated GIF below 5 MiB without excessive quality loss")
|
||||||
@@ -94,7 +94,7 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
|
|||||||
|
|
||||||
nw, nh := int(float64(w)*0.8), int(float64(h)*0.8)
|
nw, nh := int(float64(w)*0.8), int(float64(h)*0.8)
|
||||||
for i, frm := range g.Image {
|
for i, frm := range g.Image {
|
||||||
// convert paletted frame → RGBA for scaling
|
// convert paletted frame -> RGBA for scaling
|
||||||
rgba := image.NewRGBA(frm.Bounds())
|
rgba := image.NewRGBA(frm.Bounds())
|
||||||
draw.Draw(rgba, rgba.Bounds(), frm, frm.Bounds().Min, draw.Src)
|
draw.Draw(rgba, rgba.Bounds(), frm, frm.Bounds().Min, draw.Src)
|
||||||
|
|
||||||
@@ -109,6 +109,6 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
|
|||||||
g.Image[i] = paletted
|
g.Image[i] = paletted
|
||||||
}
|
}
|
||||||
g.Config.Width, g.Config.Height = nw, nh
|
g.Config.Width, g.Config.Height = nw, nh
|
||||||
// loop back and test size again …
|
// loop back and test size again ...
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
272
llm.go
272
llm.go
@@ -1,286 +1,30 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/openai/openai-go"
|
|
||||||
"github.com/openai/openai-go/packages/param"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Role string
|
// ChatCompletion is the interface for chat completion.
|
||||||
|
|
||||||
const (
|
|
||||||
RoleSystem Role = "system"
|
|
||||||
RoleUser Role = "user"
|
|
||||||
RoleAssistant Role = "assistant"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Image struct {
|
|
||||||
Base64 string
|
|
||||||
ContentType string
|
|
||||||
Url string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i Image) toRaw() map[string]any {
|
|
||||||
res := map[string]any{
|
|
||||||
"base64": i.Base64,
|
|
||||||
"contenttype": i.ContentType,
|
|
||||||
"url": i.Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *Image) fromRaw(raw map[string]any) Image {
|
|
||||||
var res Image
|
|
||||||
|
|
||||||
res.Base64 = raw["base64"].(string)
|
|
||||||
res.ContentType = raw["contenttype"].(string)
|
|
||||||
res.Url = raw["url"].(string)
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
type Message struct {
|
|
||||||
Role Role
|
|
||||||
Name string
|
|
||||||
Text string
|
|
||||||
Images []Image
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m Message) toRaw() map[string]any {
|
|
||||||
res := map[string]any{
|
|
||||||
"role": m.Role,
|
|
||||||
"name": m.Name,
|
|
||||||
"text": m.Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
images := make([]map[string]any, 0, len(m.Images))
|
|
||||||
for _, img := range m.Images {
|
|
||||||
images = append(images, img.toRaw())
|
|
||||||
}
|
|
||||||
|
|
||||||
res["images"] = images
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Message) fromRaw(raw map[string]any) Message {
|
|
||||||
var res Message
|
|
||||||
|
|
||||||
res.Role = Role(raw["role"].(string))
|
|
||||||
res.Name = raw["name"].(string)
|
|
||||||
res.Text = raw["text"].(string)
|
|
||||||
|
|
||||||
images := raw["images"].([]map[string]any)
|
|
||||||
for _, img := range images {
|
|
||||||
var i Image
|
|
||||||
|
|
||||||
res.Images = append(res.Images, i.fromRaw(img))
|
|
||||||
}
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m Message) toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion {
|
|
||||||
var res openai.ChatCompletionMessageParamUnion
|
|
||||||
|
|
||||||
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
|
|
||||||
var textContent param.Opt[string]
|
|
||||||
|
|
||||||
for _, img := range m.Images {
|
|
||||||
if img.Base64 != "" {
|
|
||||||
arrayOfContentParts = append(arrayOfContentParts,
|
|
||||||
openai.ChatCompletionContentPartUnionParam{
|
|
||||||
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
|
||||||
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
|
||||||
URL: "data:" + img.ContentType + ";base64," + img.Base64,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
} else if img.Url != "" {
|
|
||||||
arrayOfContentParts = append(arrayOfContentParts,
|
|
||||||
openai.ChatCompletionContentPartUnionParam{
|
|
||||||
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
|
||||||
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
|
||||||
URL: img.Url,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if m.Text != "" {
|
|
||||||
if len(arrayOfContentParts) > 0 {
|
|
||||||
arrayOfContentParts = append(arrayOfContentParts,
|
|
||||||
openai.ChatCompletionContentPartUnionParam{
|
|
||||||
OfText: &openai.ChatCompletionContentPartTextParam{
|
|
||||||
Text: "\n",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
textContent = openai.String(m.Text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
a := strings.Split(model, "-")
|
|
||||||
|
|
||||||
useSystemInsteadOfDeveloper := true
|
|
||||||
if len(a) > 1 && a[0][0] == 'o' {
|
|
||||||
useSystemInsteadOfDeveloper = false
|
|
||||||
}
|
|
||||||
|
|
||||||
switch m.Role {
|
|
||||||
case RoleSystem:
|
|
||||||
if useSystemInsteadOfDeveloper {
|
|
||||||
res = openai.ChatCompletionMessageParamUnion{
|
|
||||||
OfSystem: &openai.ChatCompletionSystemMessageParam{
|
|
||||||
Content: openai.ChatCompletionSystemMessageParamContentUnion{
|
|
||||||
OfString: textContent,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
res = openai.ChatCompletionMessageParamUnion{
|
|
||||||
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
|
||||||
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
|
|
||||||
OfString: textContent,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case RoleUser:
|
|
||||||
var name param.Opt[string]
|
|
||||||
if m.Name != "" {
|
|
||||||
name = openai.String(m.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
res = openai.ChatCompletionMessageParamUnion{
|
|
||||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
|
||||||
Name: name,
|
|
||||||
Content: openai.ChatCompletionUserMessageParamContentUnion{
|
|
||||||
OfString: textContent,
|
|
||||||
OfArrayOfContentParts: arrayOfContentParts,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
case RoleAssistant:
|
|
||||||
var name param.Opt[string]
|
|
||||||
if m.Name != "" {
|
|
||||||
name = openai.String(m.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
res = openai.ChatCompletionMessageParamUnion{
|
|
||||||
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
|
||||||
Name: name,
|
|
||||||
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
|
|
||||||
OfString: textContent,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return []openai.ChatCompletionMessageParamUnion{res}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolCall struct {
|
|
||||||
ID string
|
|
||||||
FunctionCall FunctionCall
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t ToolCall) toRaw() map[string]any {
|
|
||||||
res := map[string]any{
|
|
||||||
"id": t.ID,
|
|
||||||
}
|
|
||||||
|
|
||||||
res["function"] = t.FunctionCall.toRaw()
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t ToolCall) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
|
|
||||||
return []openai.ChatCompletionMessageParamUnion{{
|
|
||||||
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
|
||||||
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
|
|
||||||
{
|
|
||||||
ID: t.ID,
|
|
||||||
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
|
||||||
Name: t.FunctionCall.Name,
|
|
||||||
Arguments: t.FunctionCall.Arguments,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolCallResponse struct {
|
|
||||||
ID string
|
|
||||||
Result any
|
|
||||||
Error error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t ToolCallResponse) toRaw() map[string]any {
|
|
||||||
res := map[string]any{
|
|
||||||
"id": t.ID,
|
|
||||||
"result": t.Result,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Error != nil {
|
|
||||||
res["error"] = t.Error.Error()
|
|
||||||
}
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t ToolCallResponse) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
|
|
||||||
var refusal string
|
|
||||||
if t.Error != nil {
|
|
||||||
refusal = t.Error.Error()
|
|
||||||
}
|
|
||||||
|
|
||||||
if refusal != "" {
|
|
||||||
if t.Result != "" {
|
|
||||||
t.Result = fmt.Sprint(t.Result) + " (error in execution: " + refusal + ")"
|
|
||||||
} else {
|
|
||||||
t.Result = "error in execution:" + refusal
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return []openai.ChatCompletionMessageParamUnion{{
|
|
||||||
OfTool: &openai.ChatCompletionToolMessageParam{
|
|
||||||
ToolCallID: t.ID,
|
|
||||||
Content: openai.ChatCompletionToolMessageParamContentUnion{
|
|
||||||
OfString: openai.String(fmt.Sprint(t.Result)),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ChatCompletion interface {
|
type ChatCompletion interface {
|
||||||
ChatComplete(ctx context.Context, req Request) (Response, error)
|
ChatComplete(ctx context.Context, req Request) (Response, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LLM is the interface for language model providers.
|
||||||
type LLM interface {
|
type LLM interface {
|
||||||
ModelVersion(modelVersion string) (ChatCompletion, error)
|
ModelVersion(modelVersion string) (ChatCompletion, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OpenAI creates a new OpenAI LLM provider with the given API key.
|
||||||
func OpenAI(key string) LLM {
|
func OpenAI(key string) LLM {
|
||||||
return openaiImpl{key: key}
|
return openaiImpl{key: key}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Anthropic creates a new Anthropic LLM provider with the given API key.
|
||||||
func Anthropic(key string) LLM {
|
func Anthropic(key string) LLM {
|
||||||
return anthropic{key: key}
|
return anthropicImpl{key: key}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Google creates a new Google LLM provider with the given API key.
|
||||||
func Google(key string) LLM {
|
func Google(key string) LLM {
|
||||||
return google{key: key}
|
return googleImpl{key: key}
|
||||||
}
|
}
|
||||||
|
|||||||
238
mcp.go
Normal file
238
mcp.go
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
package llm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/modelcontextprotocol/go-sdk/mcp"
|
||||||
|
|
||||||
|
"gitea.stevedudenhoeffer.com/steve/go-llm/schema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MCPServer represents a connection to an MCP server.
|
||||||
|
// It manages the lifecycle of the connection and provides access to the server's tools.
|
||||||
|
type MCPServer struct {
|
||||||
|
// Name is a friendly name for this server (used for logging/identification)
|
||||||
|
Name string
|
||||||
|
|
||||||
|
// Command is the command to run the MCP server (for stdio transport)
|
||||||
|
Command string
|
||||||
|
|
||||||
|
// Args are arguments to pass to the command
|
||||||
|
Args []string
|
||||||
|
|
||||||
|
// Env are environment variables to set for the command (in addition to current environment)
|
||||||
|
Env []string
|
||||||
|
|
||||||
|
// URL is the URL for SSE or HTTP transport (alternative to Command)
|
||||||
|
URL string
|
||||||
|
|
||||||
|
// Transport specifies the transport type: "stdio" (default), "sse", or "http"
|
||||||
|
Transport string
|
||||||
|
|
||||||
|
client *mcp.Client
|
||||||
|
session *mcp.ClientSession
|
||||||
|
tools map[string]*mcp.Tool // tool name -> tool definition
|
||||||
|
mu sync.RWMutex
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect establishes a connection to the MCP server.
|
||||||
|
func (m *MCPServer) Connect(ctx context.Context) error {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
|
if m.session != nil {
|
||||||
|
return nil // Already connected
|
||||||
|
}
|
||||||
|
|
||||||
|
m.client = mcp.NewClient(&mcp.Implementation{
|
||||||
|
Name: "go-llm",
|
||||||
|
Version: "1.0.0",
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
var transport mcp.Transport
|
||||||
|
|
||||||
|
switch m.Transport {
|
||||||
|
case "sse":
|
||||||
|
transport = &mcp.SSEClientTransport{
|
||||||
|
Endpoint: m.URL,
|
||||||
|
}
|
||||||
|
case "http":
|
||||||
|
transport = &mcp.StreamableClientTransport{
|
||||||
|
Endpoint: m.URL,
|
||||||
|
}
|
||||||
|
default: // "stdio" or empty
|
||||||
|
cmd := exec.Command(m.Command, m.Args...)
|
||||||
|
cmd.Env = append(os.Environ(), m.Env...)
|
||||||
|
transport = &mcp.CommandTransport{
|
||||||
|
Command: cmd,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := m.client.Connect(ctx, transport, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to connect to MCP server %s: %w", m.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
m.session = session
|
||||||
|
|
||||||
|
// Load tools
|
||||||
|
m.tools = make(map[string]*mcp.Tool)
|
||||||
|
for tool, err := range session.Tools(ctx, nil) {
|
||||||
|
if err != nil {
|
||||||
|
m.session.Close()
|
||||||
|
m.session = nil
|
||||||
|
return fmt.Errorf("failed to list tools from %s: %w", m.Name, err)
|
||||||
|
}
|
||||||
|
m.tools[tool.Name] = tool
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the connection to the MCP server.
|
||||||
|
func (m *MCPServer) Close() error {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
|
||||||
|
if m.session == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := m.session.Close()
|
||||||
|
m.session = nil
|
||||||
|
m.tools = nil
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsConnected returns true if the server is connected.
|
||||||
|
func (m *MCPServer) IsConnected() bool {
|
||||||
|
m.mu.RLock()
|
||||||
|
defer m.mu.RUnlock()
|
||||||
|
return m.session != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tools returns the list of tool names available from this server.
|
||||||
|
func (m *MCPServer) Tools() []string {
|
||||||
|
m.mu.RLock()
|
||||||
|
defer m.mu.RUnlock()
|
||||||
|
|
||||||
|
var names []string
|
||||||
|
for name := range m.tools {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasTool returns true if this server provides the named tool.
|
||||||
|
func (m *MCPServer) HasTool(name string) bool {
|
||||||
|
m.mu.RLock()
|
||||||
|
defer m.mu.RUnlock()
|
||||||
|
_, ok := m.tools[name]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// CallTool calls a tool on the MCP server.
|
||||||
|
func (m *MCPServer) CallTool(ctx context.Context, name string, arguments map[string]any) (any, error) {
|
||||||
|
m.mu.RLock()
|
||||||
|
session := m.session
|
||||||
|
m.mu.RUnlock()
|
||||||
|
|
||||||
|
if session == nil {
|
||||||
|
return nil, fmt.Errorf("not connected to MCP server %s", m.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := session.CallTool(ctx, &mcp.CallToolParams{
|
||||||
|
Name: name,
|
||||||
|
Arguments: arguments,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the result content
|
||||||
|
if len(result.Content) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's a single text content, return it as a string
|
||||||
|
if len(result.Content) == 1 {
|
||||||
|
if textContent, ok := result.Content[0].(*mcp.TextContent); ok {
|
||||||
|
return textContent.Text, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For multiple contents or non-text, serialize to string
|
||||||
|
return contentToString(result.Content), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// toFunction converts an MCP tool to a go-llm Function (for schema purposes only).
|
||||||
|
func (m *MCPServer) toFunction(tool *mcp.Tool) Function {
|
||||||
|
var inputSchema map[string]any
|
||||||
|
if tool.InputSchema != nil {
|
||||||
|
data, err := json.Marshal(tool.InputSchema)
|
||||||
|
if err == nil {
|
||||||
|
_ = json.Unmarshal(data, &inputSchema)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if inputSchema == nil {
|
||||||
|
inputSchema = map[string]any{
|
||||||
|
"type": "object",
|
||||||
|
"properties": map[string]any{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Function{
|
||||||
|
Name: tool.Name,
|
||||||
|
Description: tool.Description,
|
||||||
|
Parameters: schema.NewRaw(inputSchema),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// contentToString converts MCP content to a string representation.
|
||||||
|
func contentToString(content []mcp.Content) string {
|
||||||
|
var parts []string
|
||||||
|
for _, c := range content {
|
||||||
|
switch tc := c.(type) {
|
||||||
|
case *mcp.TextContent:
|
||||||
|
parts = append(parts, tc.Text)
|
||||||
|
default:
|
||||||
|
if data, err := json.Marshal(c); err == nil {
|
||||||
|
parts = append(parts, string(data))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return parts[0]
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(parts)
|
||||||
|
return string(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithMCPServer adds an MCP server to the toolbox.
|
||||||
|
// The server must already be connected. Tools from the server will be available
|
||||||
|
// for use, and tool calls will be routed to the appropriate server.
|
||||||
|
func (t ToolBox) WithMCPServer(server *MCPServer) ToolBox {
|
||||||
|
if t.mcpServers == nil {
|
||||||
|
t.mcpServers = make(map[string]*MCPServer)
|
||||||
|
}
|
||||||
|
|
||||||
|
server.mu.RLock()
|
||||||
|
defer server.mu.RUnlock()
|
||||||
|
|
||||||
|
for name, tool := range server.tools {
|
||||||
|
// Add the function definition (for schema)
|
||||||
|
fn := server.toFunction(tool)
|
||||||
|
t.functions[name] = fn
|
||||||
|
|
||||||
|
// Track which server owns this tool
|
||||||
|
t.mcpServers[name] = server
|
||||||
|
}
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
115
message.go
Normal file
115
message.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package llm
|
||||||
|
|
||||||
|
// Role represents the role of a message in a conversation.
|
||||||
|
type Role string
|
||||||
|
|
||||||
|
const (
|
||||||
|
RoleSystem Role = "system"
|
||||||
|
RoleUser Role = "user"
|
||||||
|
RoleAssistant Role = "assistant"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Image represents an image that can be included in a message.
|
||||||
|
type Image struct {
|
||||||
|
Base64 string
|
||||||
|
ContentType string
|
||||||
|
Url string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i Image) toRaw() map[string]any {
|
||||||
|
res := map[string]any{
|
||||||
|
"base64": i.Base64,
|
||||||
|
"contenttype": i.ContentType,
|
||||||
|
"url": i.Url,
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) fromRaw(raw map[string]any) Image {
|
||||||
|
var res Image
|
||||||
|
|
||||||
|
res.Base64 = raw["base64"].(string)
|
||||||
|
res.ContentType = raw["contenttype"].(string)
|
||||||
|
res.Url = raw["url"].(string)
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// Message represents a message in a conversation.
|
||||||
|
type Message struct {
|
||||||
|
Role Role
|
||||||
|
Name string
|
||||||
|
Text string
|
||||||
|
Images []Image
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Message) toRaw() map[string]any {
|
||||||
|
res := map[string]any{
|
||||||
|
"role": m.Role,
|
||||||
|
"name": m.Name,
|
||||||
|
"text": m.Text,
|
||||||
|
}
|
||||||
|
|
||||||
|
images := make([]map[string]any, 0, len(m.Images))
|
||||||
|
for _, img := range m.Images {
|
||||||
|
images = append(images, img.toRaw())
|
||||||
|
}
|
||||||
|
|
||||||
|
res["images"] = images
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Message) fromRaw(raw map[string]any) Message {
|
||||||
|
var res Message
|
||||||
|
|
||||||
|
res.Role = Role(raw["role"].(string))
|
||||||
|
res.Name = raw["name"].(string)
|
||||||
|
res.Text = raw["text"].(string)
|
||||||
|
|
||||||
|
images := raw["images"].([]map[string]any)
|
||||||
|
for _, img := range images {
|
||||||
|
var i Image
|
||||||
|
|
||||||
|
res.Images = append(res.Images, i.fromRaw(img))
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolCall represents a tool call made by an assistant.
|
||||||
|
type ToolCall struct {
|
||||||
|
ID string
|
||||||
|
FunctionCall FunctionCall
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t ToolCall) toRaw() map[string]any {
|
||||||
|
res := map[string]any{
|
||||||
|
"id": t.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
res["function"] = t.FunctionCall.toRaw()
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToolCallResponse represents the response to a tool call.
|
||||||
|
type ToolCallResponse struct {
|
||||||
|
ID string
|
||||||
|
Result any
|
||||||
|
Error error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t ToolCallResponse) toRaw() map[string]any {
|
||||||
|
res := map[string]any{
|
||||||
|
"id": t.ID,
|
||||||
|
"result": t.Result,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Error != nil {
|
||||||
|
res["error"] = t.Error.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
206
openai.go
206
openai.go
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
"github.com/openai/openai-go/option"
|
"github.com/openai/openai-go/option"
|
||||||
|
"github.com/openai/openai-go/packages/param"
|
||||||
"github.com/openai/openai-go/shared"
|
"github.com/openai/openai-go/shared"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -24,14 +25,14 @@ func (o openaiImpl) newRequestToOpenAIRequest(request Request) openai.ChatComple
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, i := range request.Conversation {
|
for _, i := range request.Conversation {
|
||||||
res.Messages = append(res.Messages, i.toChatCompletionMessages(o.model)...)
|
res.Messages = append(res.Messages, inputToChatCompletionMessages(i, o.model)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, msg := range request.Messages {
|
for _, msg := range request.Messages {
|
||||||
res.Messages = append(res.Messages, msg.toChatCompletionMessages(o.model)...)
|
res.Messages = append(res.Messages, messageToChatCompletionMessages(msg, o.model)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tool := range request.Toolbox.functions {
|
for _, tool := range request.Toolbox.Functions() {
|
||||||
res.Tools = append(res.Tools, openai.ChatCompletionToolParam{
|
res.Tools = append(res.Tools, openai.ChatCompletionToolParam{
|
||||||
Type: "function",
|
Type: "function",
|
||||||
Function: shared.FunctionDefinitionParam{
|
Function: shared.FunctionDefinitionParam{
|
||||||
@@ -111,10 +112,9 @@ func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response
|
|||||||
req := o.newRequestToOpenAIRequest(request)
|
req := o.newRequestToOpenAIRequest(request)
|
||||||
|
|
||||||
resp, err := cl.Chat.Completions.New(ctx, req)
|
resp, err := cl.Chat.Completions.New(ctx, req)
|
||||||
//resp, err := cl.CreateChatCompletion(ctx, req)
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Response{}, fmt.Errorf("unhandled openaiImpl error: %w", err)
|
return Response{}, fmt.Errorf("unhandled openai error: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return o.responseToLLMResponse(resp), nil
|
return o.responseToLLMResponse(resp), nil
|
||||||
@@ -124,5 +124,199 @@ func (o openaiImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
|||||||
return openaiImpl{
|
return openaiImpl{
|
||||||
key: o.key,
|
key: o.key,
|
||||||
model: modelVersion,
|
model: modelVersion,
|
||||||
|
baseUrl: o.baseUrl,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// inputToChatCompletionMessages converts an Input to OpenAI chat completion messages.
|
||||||
|
func inputToChatCompletionMessages(input Input, model string) []openai.ChatCompletionMessageParamUnion {
|
||||||
|
switch v := input.(type) {
|
||||||
|
case Message:
|
||||||
|
return messageToChatCompletionMessages(v, model)
|
||||||
|
case ToolCall:
|
||||||
|
return toolCallToChatCompletionMessages(v)
|
||||||
|
case ToolCallResponse:
|
||||||
|
return toolCallResponseToChatCompletionMessages(v)
|
||||||
|
case ResponseChoice:
|
||||||
|
return responseChoiceToChatCompletionMessages(v)
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func messageToChatCompletionMessages(m Message, model string) []openai.ChatCompletionMessageParamUnion {
|
||||||
|
var res openai.ChatCompletionMessageParamUnion
|
||||||
|
|
||||||
|
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
|
||||||
|
var textContent param.Opt[string]
|
||||||
|
|
||||||
|
for _, img := range m.Images {
|
||||||
|
if img.Base64 != "" {
|
||||||
|
arrayOfContentParts = append(arrayOfContentParts,
|
||||||
|
openai.ChatCompletionContentPartUnionParam{
|
||||||
|
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
||||||
|
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
||||||
|
URL: "data:" + img.ContentType + ";base64," + img.Base64,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
} else if img.Url != "" {
|
||||||
|
arrayOfContentParts = append(arrayOfContentParts,
|
||||||
|
openai.ChatCompletionContentPartUnionParam{
|
||||||
|
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
||||||
|
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
||||||
|
URL: img.Url,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if m.Text != "" {
|
||||||
|
if len(arrayOfContentParts) > 0 {
|
||||||
|
arrayOfContentParts = append(arrayOfContentParts,
|
||||||
|
openai.ChatCompletionContentPartUnionParam{
|
||||||
|
OfText: &openai.ChatCompletionContentPartTextParam{
|
||||||
|
Text: "\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
textContent = openai.String(m.Text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
a := strings.Split(model, "-")
|
||||||
|
|
||||||
|
useSystemInsteadOfDeveloper := true
|
||||||
|
if len(a) > 1 && a[0][0] == 'o' {
|
||||||
|
useSystemInsteadOfDeveloper = false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch m.Role {
|
||||||
|
case RoleSystem:
|
||||||
|
if useSystemInsteadOfDeveloper {
|
||||||
|
res = openai.ChatCompletionMessageParamUnion{
|
||||||
|
OfSystem: &openai.ChatCompletionSystemMessageParam{
|
||||||
|
Content: openai.ChatCompletionSystemMessageParamContentUnion{
|
||||||
|
OfString: textContent,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
res = openai.ChatCompletionMessageParamUnion{
|
||||||
|
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
||||||
|
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
|
||||||
|
OfString: textContent,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case RoleUser:
|
||||||
|
var name param.Opt[string]
|
||||||
|
if m.Name != "" {
|
||||||
|
name = openai.String(m.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
res = openai.ChatCompletionMessageParamUnion{
|
||||||
|
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||||
|
Name: name,
|
||||||
|
Content: openai.ChatCompletionUserMessageParamContentUnion{
|
||||||
|
OfString: textContent,
|
||||||
|
OfArrayOfContentParts: arrayOfContentParts,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
case RoleAssistant:
|
||||||
|
var name param.Opt[string]
|
||||||
|
if m.Name != "" {
|
||||||
|
name = openai.String(m.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
res = openai.ChatCompletionMessageParamUnion{
|
||||||
|
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
||||||
|
Name: name,
|
||||||
|
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
|
||||||
|
OfString: textContent,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return []openai.ChatCompletionMessageParamUnion{res}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toolCallToChatCompletionMessages(t ToolCall) []openai.ChatCompletionMessageParamUnion {
|
||||||
|
return []openai.ChatCompletionMessageParamUnion{{
|
||||||
|
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
||||||
|
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
|
||||||
|
{
|
||||||
|
ID: t.ID,
|
||||||
|
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
||||||
|
Name: t.FunctionCall.Name,
|
||||||
|
Arguments: t.FunctionCall.Arguments,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toolCallResponseToChatCompletionMessages(t ToolCallResponse) []openai.ChatCompletionMessageParamUnion {
|
||||||
|
var refusal string
|
||||||
|
if t.Error != nil {
|
||||||
|
refusal = t.Error.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
result := t.Result
|
||||||
|
if refusal != "" {
|
||||||
|
if result != "" {
|
||||||
|
result = fmt.Sprint(result) + " (error in execution: " + refusal + ")"
|
||||||
|
} else {
|
||||||
|
result = "error in execution:" + refusal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return []openai.ChatCompletionMessageParamUnion{{
|
||||||
|
OfTool: &openai.ChatCompletionToolMessageParam{
|
||||||
|
ToolCallID: t.ID,
|
||||||
|
Content: openai.ChatCompletionToolMessageParamContentUnion{
|
||||||
|
OfString: openai.String(fmt.Sprint(result)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func responseChoiceToChatCompletionMessages(r ResponseChoice) []openai.ChatCompletionMessageParamUnion {
|
||||||
|
var as openai.ChatCompletionAssistantMessageParam
|
||||||
|
|
||||||
|
if r.Name != "" {
|
||||||
|
as.Name = openai.String(r.Name)
|
||||||
|
}
|
||||||
|
if r.Refusal != "" {
|
||||||
|
as.Refusal = openai.String(r.Refusal)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Content != "" {
|
||||||
|
as.Content.OfString = openai.String(r.Content)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, call := range r.Calls {
|
||||||
|
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
|
||||||
|
ID: call.ID,
|
||||||
|
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
||||||
|
Name: call.FunctionCall.Name,
|
||||||
|
Arguments: call.FunctionCall.Arguments,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return []openai.ChatCompletionMessageParamUnion{
|
||||||
|
{
|
||||||
|
OfAssistant: &as,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
2
parse.go
2
parse.go
@@ -1,4 +1,4 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|||||||
11
provider/anthropic/anthropic.go
Normal file
11
provider/anthropic/anthropic.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
// Package anthropic provides the Anthropic LLM provider.
|
||||||
|
package anthropic
|
||||||
|
|
||||||
|
import (
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New creates a new Anthropic LLM provider with the given API key.
|
||||||
|
func New(key string) llm.LLM {
|
||||||
|
return llm.Anthropic(key)
|
||||||
|
}
|
||||||
11
provider/google/google.go
Normal file
11
provider/google/google.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
// Package google provides the Google LLM provider.
|
||||||
|
package google
|
||||||
|
|
||||||
|
import (
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New creates a new Google LLM provider with the given API key.
|
||||||
|
func New(key string) llm.LLM {
|
||||||
|
return llm.Google(key)
|
||||||
|
}
|
||||||
11
provider/openai/openai.go
Normal file
11
provider/openai/openai.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
// Package openai provides the OpenAI LLM provider.
|
||||||
|
package openai
|
||||||
|
|
||||||
|
import (
|
||||||
|
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New creates a new OpenAI LLM provider with the given API key.
|
||||||
|
func New(key string) llm.LLM {
|
||||||
|
return llm.OpenAI(key)
|
||||||
|
}
|
||||||
25
request.go
25
request.go
@@ -1,17 +1,20 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/openai/openai-go"
|
|
||||||
)
|
|
||||||
|
|
||||||
type rawAble interface {
|
|
||||||
toRaw() map[string]any
|
|
||||||
fromRaw(raw map[string]any) Input
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Input is the interface for conversation inputs.
|
||||||
|
// Types that implement this interface can be part of a conversation:
|
||||||
|
// Message, ToolCall, ToolCallResponse, and ResponseChoice.
|
||||||
type Input interface {
|
type Input interface {
|
||||||
toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion
|
// isInput is a marker method to ensure only valid types implement this interface.
|
||||||
|
isInput()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Implement Input interface for all valid input types.
|
||||||
|
func (Message) isInput() {}
|
||||||
|
func (ToolCall) isInput() {}
|
||||||
|
func (ToolCallResponse) isInput() {}
|
||||||
|
func (ResponseChoice) isInput() {}
|
||||||
|
|
||||||
|
// Request represents a request to a language model.
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Conversation []Input
|
Conversation []Input
|
||||||
Messages []Message
|
Messages []Message
|
||||||
|
|||||||
38
response.go
38
response.go
@@ -1,9 +1,6 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/openai/openai-go"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
// ResponseChoice represents a single choice in a response.
|
||||||
type ResponseChoice struct {
|
type ResponseChoice struct {
|
||||||
Index int
|
Index int
|
||||||
Role Role
|
Role Role
|
||||||
@@ -32,36 +29,6 @@ func (r ResponseChoice) toRaw() map[string]any {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r ResponseChoice) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
|
|
||||||
var as openai.ChatCompletionAssistantMessageParam
|
|
||||||
|
|
||||||
if r.Name != "" {
|
|
||||||
as.Name = openai.String(r.Name)
|
|
||||||
}
|
|
||||||
if r.Refusal != "" {
|
|
||||||
as.Refusal = openai.String(r.Refusal)
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.Content != "" {
|
|
||||||
as.Content.OfString = openai.String(r.Content)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, call := range r.Calls {
|
|
||||||
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
|
|
||||||
ID: call.ID,
|
|
||||||
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
|
||||||
Name: call.FunctionCall.Name,
|
|
||||||
Arguments: call.FunctionCall.Arguments,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return []openai.ChatCompletionMessageParamUnion{
|
|
||||||
{
|
|
||||||
OfAssistant: &as,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r ResponseChoice) toInput() []Input {
|
func (r ResponseChoice) toInput() []Input {
|
||||||
var res []Input
|
var res []Input
|
||||||
|
|
||||||
@@ -79,6 +46,7 @@ func (r ResponseChoice) toInput() []Input {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Response represents a response from a language model.
|
||||||
type Response struct {
|
type Response struct {
|
||||||
Choices []ResponseChoice
|
Choices []ResponseChoice
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
)
|
)
|
||||||
|
|
||||||
type array struct {
|
type array struct {
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
)
|
)
|
||||||
|
|
||||||
// just enforcing that basic implements Type
|
// just enforcing that basic implements Type
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"slices"
|
"slices"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
)
|
)
|
||||||
|
|
||||||
type enum struct {
|
type enum struct {
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|||||||
134
schema/raw.go
Normal file
134
schema/raw.go
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Raw represents a raw JSON schema that is passed through directly.
|
||||||
|
// This is used for MCP tools where we receive the schema from the server.
|
||||||
|
type Raw struct {
|
||||||
|
schema map[string]any
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRaw creates a new Raw schema from a map.
|
||||||
|
func NewRaw(schema map[string]any) Raw {
|
||||||
|
if schema == nil {
|
||||||
|
schema = map[string]any{
|
||||||
|
"type": "object",
|
||||||
|
"properties": map[string]any{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Raw{schema: schema}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRawFromJSON creates a new Raw schema from JSON bytes.
|
||||||
|
func NewRawFromJSON(data []byte) (Raw, error) {
|
||||||
|
var schema map[string]any
|
||||||
|
if err := json.Unmarshal(data, &schema); err != nil {
|
||||||
|
return Raw{}, fmt.Errorf("failed to parse JSON schema: %w", err)
|
||||||
|
}
|
||||||
|
return NewRaw(schema), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) OpenAIParameters() openai.FunctionParameters {
|
||||||
|
return openai.FunctionParameters(r.schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) GoogleParameters() *genai.Schema {
|
||||||
|
return mapToGenaiSchema(r.schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) AnthropicInputSchema() map[string]any {
|
||||||
|
return r.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) Required() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) Description() string {
|
||||||
|
if desc, ok := r.schema["description"].(string); ok {
|
||||||
|
return desc
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) FromAny(val any) (reflect.Value, error) {
|
||||||
|
return reflect.ValueOf(val), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Raw) SetValueOnField(obj reflect.Value, val reflect.Value) {
|
||||||
|
// No-op for raw schemas
|
||||||
|
}
|
||||||
|
|
||||||
|
// mapToGenaiSchema converts a map[string]any JSON schema to genai.Schema
|
||||||
|
func mapToGenaiSchema(m map[string]any) *genai.Schema {
|
||||||
|
if m == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := &genai.Schema{}
|
||||||
|
|
||||||
|
// Type
|
||||||
|
if t, ok := m["type"].(string); ok {
|
||||||
|
switch t {
|
||||||
|
case "string":
|
||||||
|
schema.Type = genai.TypeString
|
||||||
|
case "number":
|
||||||
|
schema.Type = genai.TypeNumber
|
||||||
|
case "integer":
|
||||||
|
schema.Type = genai.TypeInteger
|
||||||
|
case "boolean":
|
||||||
|
schema.Type = genai.TypeBoolean
|
||||||
|
case "array":
|
||||||
|
schema.Type = genai.TypeArray
|
||||||
|
case "object":
|
||||||
|
schema.Type = genai.TypeObject
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Description
|
||||||
|
if desc, ok := m["description"].(string); ok {
|
||||||
|
schema.Description = desc
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enum
|
||||||
|
if enum, ok := m["enum"].([]any); ok {
|
||||||
|
for _, e := range enum {
|
||||||
|
if s, ok := e.(string); ok {
|
||||||
|
schema.Enum = append(schema.Enum, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Properties (for objects)
|
||||||
|
if props, ok := m["properties"].(map[string]any); ok {
|
||||||
|
schema.Properties = make(map[string]*genai.Schema)
|
||||||
|
for k, v := range props {
|
||||||
|
if vm, ok := v.(map[string]any); ok {
|
||||||
|
schema.Properties[k] = mapToGenaiSchema(vm)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Required
|
||||||
|
if req, ok := m["required"].([]any); ok {
|
||||||
|
for _, r := range req {
|
||||||
|
if s, ok := r.(string); ok {
|
||||||
|
schema.Required = append(schema.Required, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Items (for arrays)
|
||||||
|
if items, ok := m["items"].(map[string]any); ok {
|
||||||
|
schema.Items = mapToGenaiSchema(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema
|
||||||
|
}
|
||||||
@@ -3,8 +3,8 @@ package schema
|
|||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/google/generative-ai-go/genai"
|
|
||||||
"github.com/openai/openai-go"
|
"github.com/openai/openai-go"
|
||||||
|
"google.golang.org/genai"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Type interface {
|
type Type interface {
|
||||||
|
|||||||
16
toolbox.go
16
toolbox.go
@@ -1,7 +1,8 @@
|
|||||||
package go_llm
|
package llm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@@ -11,6 +12,7 @@ import (
|
|||||||
// the correct parameters.
|
// the correct parameters.
|
||||||
type ToolBox struct {
|
type ToolBox struct {
|
||||||
functions map[string]Function
|
functions map[string]Function
|
||||||
|
mcpServers map[string]*MCPServer // tool name -> MCP server that provides it
|
||||||
dontRequireTool bool
|
dontRequireTool bool
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,6 +93,18 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (t ToolBox) executeFunction(ctx *Context, functionName string, params string) (any, error) {
|
func (t ToolBox) executeFunction(ctx *Context, functionName string, params string) (any, error) {
|
||||||
|
// Check if this is an MCP tool
|
||||||
|
if server, ok := t.mcpServers[functionName]; ok {
|
||||||
|
var args map[string]any
|
||||||
|
if params != "" {
|
||||||
|
if err := json.Unmarshal([]byte(params), &args); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to parse MCP tool arguments: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return server.CallTool(ctx, functionName, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular function
|
||||||
f, ok := t.functions[functionName]
|
f, ok := t.functions[functionName]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|||||||
Reference in New Issue
Block a user