Files
go-llm/cmd/llm/commands.go
Steve Dudenhoeffer 97d54c10ae Implement interactive CLI for LLM providers with chat, tools, and image support
- Add Bubble Tea-based CLI interface for LLM interactions.
- Implement `.env.example` for environment variable setup.
- Add provider, model, and tool selection screens.
- Include support for API key configuration.
- Enable chat interactions with optional image and tool support.
- Introduce core utility functions: image handling, tool execution, chat request management, and response rendering.
- Implement style customization with Lip Gloss.
2026-01-24 15:53:36 -05:00

183 lines
4.4 KiB
Go

package main
import (
"context"
"encoding/base64"
"fmt"
"net/http"
"os"
"strings"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// Message types for async operations
// ChatResponseMsg contains the response from a chat completion
type ChatResponseMsg struct {
Response llm.Response
Err error
}
// ToolExecutionMsg contains results from tool execution
type ToolExecutionMsg struct {
Results []llm.ToolCallResponse
Err error
}
// ImageLoadedMsg contains a loaded image
type ImageLoadedMsg struct {
Image llm.Image
Err error
}
// sendChatRequest sends a chat completion request
func sendChatRequest(chat llm.ChatCompletion, req llm.Request) tea.Cmd {
return func() tea.Msg {
resp, err := chat.ChatComplete(context.Background(), req)
return ChatResponseMsg{Response: resp, Err: err}
}
}
// executeTools executes tool calls and returns results
func executeTools(toolbox llm.ToolBox, req llm.Request, resp llm.ResponseChoice) tea.Cmd {
return func() tea.Msg {
ctx := llm.NewContext(context.Background(), req, &resp, nil)
var results []llm.ToolCallResponse
for _, call := range resp.Calls {
result, err := toolbox.Execute(ctx, call)
results = append(results, llm.ToolCallResponse{
ID: call.ID,
Result: result,
Error: err,
})
}
return ToolExecutionMsg{Results: results, Err: nil}
}
}
// loadImageFromPath loads an image from a file path
func loadImageFromPath(path string) tea.Cmd {
return func() tea.Msg {
// Clean up the path
path = strings.TrimSpace(path)
path = strings.Trim(path, "\"'")
// Read the file
data, err := os.ReadFile(path)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("failed to read image file: %w", err)}
}
// Detect content type
contentType := http.DetectContentType(data)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("file is not an image: %s", contentType)}
}
// Base64 encode
encoded := base64.StdEncoding.EncodeToString(data)
return ImageLoadedMsg{
Image: llm.Image{
Base64: encoded,
ContentType: contentType,
},
}
}
}
// loadImageFromURL loads an image from a URL
func loadImageFromURL(url string) tea.Cmd {
return func() tea.Msg {
url = strings.TrimSpace(url)
// For URL images, we can just use the URL directly
return ImageLoadedMsg{
Image: llm.Image{
Url: url,
},
}
}
}
// loadImageFromBase64 loads an image from base64 data
func loadImageFromBase64(data string) tea.Cmd {
return func() tea.Msg {
data = strings.TrimSpace(data)
// Check if it's a data URL
if strings.HasPrefix(data, "data:") {
// Parse data URL: data:image/png;base64,....
parts := strings.SplitN(data, ",", 2)
if len(parts) != 2 {
return ImageLoadedMsg{Err: fmt.Errorf("invalid data URL format")}
}
// Extract content type from first part
mediaType := strings.TrimPrefix(parts[0], "data:")
mediaType = strings.TrimSuffix(mediaType, ";base64")
return ImageLoadedMsg{
Image: llm.Image{
Base64: parts[1],
ContentType: mediaType,
},
}
}
// Assume it's raw base64, try to detect content type
decoded, err := base64.StdEncoding.DecodeString(data)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("invalid base64 data: %w", err)}
}
contentType := http.DetectContentType(decoded)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("data is not an image: %s", contentType)}
}
return ImageLoadedMsg{
Image: llm.Image{
Base64: data,
ContentType: contentType,
},
}
}
}
// buildRequest builds a chat request from the current state
func buildRequest(m *Model, userText string) llm.Request {
// Create the user message with any pending images
userMsg := llm.Message{
Role: llm.RoleUser,
Text: userText,
Images: m.pendingImages,
}
req := llm.Request{
Conversation: m.conversation,
Messages: []llm.Message{
{Role: llm.RoleSystem, Text: m.systemPrompt},
userMsg,
},
Temperature: m.temperature,
}
// Add toolbox if enabled
if m.toolsEnabled && len(m.toolbox.Functions()) > 0 {
req.Toolbox = m.toolbox.WithRequireTool(false)
}
return req
}
// buildFollowUpRequest builds a follow-up request after tool execution
func buildFollowUpRequest(m *Model, previousReq llm.Request, resp llm.ResponseChoice, toolResults []llm.ToolCallResponse) llm.Request {
return previousReq.NextRequest(resp, toolResults)
}