Implement interactive CLI for LLM providers with chat, tools, and image support

- Add Bubble Tea-based CLI interface for LLM interactions.
- Implement `.env.example` for environment variable setup.
- Add provider, model, and tool selection screens.
- Include support for API key configuration.
- Enable chat interactions with optional image and tool support.
- Introduce core utility functions: image handling, tool execution, chat request management, and response rendering.
- Implement style customization with Lip Gloss.
This commit is contained in:
2026-01-24 15:53:36 -05:00
parent bf7c86ab2a
commit 97d54c10ae
12 changed files with 1550 additions and 0 deletions

11
cmd/llm/.env.example Normal file
View File

@@ -0,0 +1,11 @@
# go-llm CLI Environment Variables
# Copy this file to .env and fill in your API keys
# OpenAI API Key (https://platform.openai.com/api-keys)
OPENAI_API_KEY=
# Anthropic API Key (https://console.anthropic.com/settings/keys)
ANTHROPIC_API_KEY=
# Google AI API Key (https://aistudio.google.com/apikey)
GOOGLE_API_KEY=

182
cmd/llm/commands.go Normal file
View File

@@ -0,0 +1,182 @@
package main
import (
"context"
"encoding/base64"
"fmt"
"net/http"
"os"
"strings"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// Message types for async operations
// ChatResponseMsg contains the response from a chat completion
type ChatResponseMsg struct {
Response llm.Response
Err error
}
// ToolExecutionMsg contains results from tool execution
type ToolExecutionMsg struct {
Results []llm.ToolCallResponse
Err error
}
// ImageLoadedMsg contains a loaded image
type ImageLoadedMsg struct {
Image llm.Image
Err error
}
// sendChatRequest sends a chat completion request
func sendChatRequest(chat llm.ChatCompletion, req llm.Request) tea.Cmd {
return func() tea.Msg {
resp, err := chat.ChatComplete(context.Background(), req)
return ChatResponseMsg{Response: resp, Err: err}
}
}
// executeTools executes tool calls and returns results
func executeTools(toolbox llm.ToolBox, req llm.Request, resp llm.ResponseChoice) tea.Cmd {
return func() tea.Msg {
ctx := llm.NewContext(context.Background(), req, &resp, nil)
var results []llm.ToolCallResponse
for _, call := range resp.Calls {
result, err := toolbox.Execute(ctx, call)
results = append(results, llm.ToolCallResponse{
ID: call.ID,
Result: result,
Error: err,
})
}
return ToolExecutionMsg{Results: results, Err: nil}
}
}
// loadImageFromPath loads an image from a file path
func loadImageFromPath(path string) tea.Cmd {
return func() tea.Msg {
// Clean up the path
path = strings.TrimSpace(path)
path = strings.Trim(path, "\"'")
// Read the file
data, err := os.ReadFile(path)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("failed to read image file: %w", err)}
}
// Detect content type
contentType := http.DetectContentType(data)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("file is not an image: %s", contentType)}
}
// Base64 encode
encoded := base64.StdEncoding.EncodeToString(data)
return ImageLoadedMsg{
Image: llm.Image{
Base64: encoded,
ContentType: contentType,
},
}
}
}
// loadImageFromURL loads an image from a URL
func loadImageFromURL(url string) tea.Cmd {
return func() tea.Msg {
url = strings.TrimSpace(url)
// For URL images, we can just use the URL directly
return ImageLoadedMsg{
Image: llm.Image{
Url: url,
},
}
}
}
// loadImageFromBase64 loads an image from base64 data
func loadImageFromBase64(data string) tea.Cmd {
return func() tea.Msg {
data = strings.TrimSpace(data)
// Check if it's a data URL
if strings.HasPrefix(data, "data:") {
// Parse data URL: data:image/png;base64,....
parts := strings.SplitN(data, ",", 2)
if len(parts) != 2 {
return ImageLoadedMsg{Err: fmt.Errorf("invalid data URL format")}
}
// Extract content type from first part
mediaType := strings.TrimPrefix(parts[0], "data:")
mediaType = strings.TrimSuffix(mediaType, ";base64")
return ImageLoadedMsg{
Image: llm.Image{
Base64: parts[1],
ContentType: mediaType,
},
}
}
// Assume it's raw base64, try to detect content type
decoded, err := base64.StdEncoding.DecodeString(data)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("invalid base64 data: %w", err)}
}
contentType := http.DetectContentType(decoded)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("data is not an image: %s", contentType)}
}
return ImageLoadedMsg{
Image: llm.Image{
Base64: data,
ContentType: contentType,
},
}
}
}
// buildRequest builds a chat request from the current state
func buildRequest(m *Model, userText string) llm.Request {
// Create the user message with any pending images
userMsg := llm.Message{
Role: llm.RoleUser,
Text: userText,
Images: m.pendingImages,
}
req := llm.Request{
Conversation: m.conversation,
Messages: []llm.Message{
{Role: llm.RoleSystem, Text: m.systemPrompt},
userMsg,
},
Temperature: m.temperature,
}
// Add toolbox if enabled
if m.toolsEnabled && len(m.toolbox.Functions()) > 0 {
req.Toolbox = m.toolbox.WithRequireTool(false)
}
return req
}
// buildFollowUpRequest builds a follow-up request after tool execution
func buildFollowUpRequest(m *Model, previousReq llm.Request, resp llm.ResponseChoice, toolResults []llm.ToolCallResponse) llm.Request {
return previousReq.NextRequest(resp, toolResults)
}

25
cmd/llm/main.go Normal file
View File

@@ -0,0 +1,25 @@
package main
import (
"fmt"
"os"
tea "github.com/charmbracelet/bubbletea"
"github.com/joho/godotenv"
)
func main() {
// Load .env file if it exists (ignore error if not found)
_ = godotenv.Load()
p := tea.NewProgram(
InitialModel(),
tea.WithAltScreen(),
tea.WithMouseCellMotion(),
)
if _, err := p.Run(); err != nil {
fmt.Printf("Error running program: %v\n", err)
os.Exit(1)
}
}

295
cmd/llm/model.go Normal file
View File

@@ -0,0 +1,295 @@
package main
import (
"os"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// State represents the current view/screen of the application
type State int
const (
StateChat State = iota
StateProviderSelect
StateModelSelect
StateImageInput
StateToolsPanel
StateSettings
StateAPIKeyInput
)
// DisplayMessage represents a message for display in the UI
type DisplayMessage struct {
Role llm.Role
Content string
Images int // number of images attached
}
// ProviderInfo contains information about a provider
type ProviderInfo struct {
Name string
EnvVar string
Models []string
HasAPIKey bool
ModelIndex int
}
// Model is the main Bubble Tea model
type Model struct {
// State
state State
previousState State
// Provider
provider llm.LLM
providerName string
chat llm.ChatCompletion
modelName string
apiKeys map[string]string
providers []ProviderInfo
providerIndex int
// Conversation
conversation []llm.Input
messages []DisplayMessage
// Tools
toolbox llm.ToolBox
toolsEnabled bool
// Settings
systemPrompt string
temperature *float64
// Pending images
pendingImages []llm.Image
// UI Components
input textinput.Model
viewport viewport.Model
viewportReady bool
// Selection state (for lists)
listIndex int
listItems []string
// Dimensions
width int
height int
// Loading state
loading bool
err error
// For API key input
apiKeyInput textinput.Model
}
// InitialModel creates and returns the initial model
func InitialModel() Model {
ti := textinput.New()
ti.Placeholder = "Type your message..."
ti.Focus()
ti.CharLimit = 4096
ti.Width = 60
aki := textinput.New()
aki.Placeholder = "Enter API key..."
aki.CharLimit = 256
aki.Width = 60
aki.EchoMode = textinput.EchoPassword
// Initialize providers with environment variable checks
providers := []ProviderInfo{
{
Name: "OpenAI",
EnvVar: "OPENAI_API_KEY",
Models: []string{
"gpt-4.1",
"gpt-4.1-mini",
"gpt-4.1-nano",
"gpt-4o",
"gpt-4o-mini",
"gpt-4-turbo",
"gpt-3.5-turbo",
"o1",
"o1-mini",
"o1-preview",
"o3-mini",
},
},
{
Name: "Anthropic",
EnvVar: "ANTHROPIC_API_KEY",
Models: []string{
"claude-sonnet-4-20250514",
"claude-opus-4-20250514",
"claude-3-7-sonnet-20250219",
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
},
},
{
Name: "Google",
EnvVar: "GOOGLE_API_KEY",
Models: []string{
"gemini-2.0-flash",
"gemini-2.0-flash-lite",
"gemini-1.5-pro",
"gemini-1.5-flash",
"gemini-1.5-flash-8b",
"gemini-1.0-pro",
},
},
}
// Check for API keys in environment
apiKeys := make(map[string]string)
for i := range providers {
if key := os.Getenv(providers[i].EnvVar); key != "" {
apiKeys[providers[i].Name] = key
providers[i].HasAPIKey = true
}
}
m := Model{
state: StateProviderSelect,
input: ti,
apiKeyInput: aki,
apiKeys: apiKeys,
providers: providers,
systemPrompt: "You are a helpful assistant.",
toolbox: createDemoToolbox(),
toolsEnabled: false,
messages: []DisplayMessage{},
conversation: []llm.Input{},
}
// Build list items for provider selection
m.listItems = make([]string, len(providers))
for i, p := range providers {
status := " (no key)"
if p.HasAPIKey {
status = " (ready)"
}
m.listItems[i] = p.Name + status
}
return m
}
// Init initializes the model
func (m Model) Init() tea.Cmd {
return textinput.Blink
}
// selectProvider sets up the selected provider
func (m *Model) selectProvider(index int) error {
if index < 0 || index >= len(m.providers) {
return nil
}
p := m.providers[index]
key, ok := m.apiKeys[p.Name]
if !ok || key == "" {
return nil
}
m.providerName = p.Name
m.providerIndex = index
switch p.Name {
case "OpenAI":
m.provider = llm.OpenAI(key)
case "Anthropic":
m.provider = llm.Anthropic(key)
case "Google":
m.provider = llm.Google(key)
}
// Select default model
if len(p.Models) > 0 {
return m.selectModel(p.ModelIndex)
}
return nil
}
// selectModel sets the current model
func (m *Model) selectModel(index int) error {
if m.provider == nil {
return nil
}
p := m.providers[m.providerIndex]
if index < 0 || index >= len(p.Models) {
return nil
}
modelName := p.Models[index]
chat, err := m.provider.ModelVersion(modelName)
if err != nil {
return err
}
m.chat = chat
m.modelName = modelName
m.providers[m.providerIndex].ModelIndex = index
return nil
}
// newConversation resets the conversation
func (m *Model) newConversation() {
m.conversation = []llm.Input{}
m.messages = []DisplayMessage{}
m.pendingImages = []llm.Image{}
m.err = nil
}
// addUserMessage adds a user message to the conversation
func (m *Model) addUserMessage(text string, images []llm.Image) {
msg := llm.Message{
Role: llm.RoleUser,
Text: text,
Images: images,
}
m.conversation = append(m.conversation, msg)
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleUser,
Content: text,
Images: len(images),
})
}
// addAssistantMessage adds an assistant message to the conversation
func (m *Model) addAssistantMessage(content string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleAssistant,
Content: content,
})
}
// addToolCallMessage adds a tool call message to display
func (m *Model) addToolCallMessage(name string, args string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_call"),
Content: name + ": " + args,
})
}
// addToolResultMessage adds a tool result message to display
func (m *Model) addToolResultMessage(name string, result string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_result"),
Content: name + " -> " + result,
})
}

113
cmd/llm/styles.go Normal file
View File

@@ -0,0 +1,113 @@
package main
import (
"github.com/charmbracelet/lipgloss"
)
var (
// Colors
primaryColor = lipgloss.Color("205")
secondaryColor = lipgloss.Color("39")
accentColor = lipgloss.Color("212")
mutedColor = lipgloss.Color("241")
errorColor = lipgloss.Color("196")
successColor = lipgloss.Color("82")
// App styles
appStyle = lipgloss.NewStyle().Padding(1, 2)
// Header
headerStyle = lipgloss.NewStyle().
Bold(true).
Foreground(primaryColor).
BorderStyle(lipgloss.NormalBorder()).
BorderBottom(true).
BorderForeground(mutedColor).
Padding(0, 1)
// Provider badge
providerBadgeStyle = lipgloss.NewStyle().
Background(secondaryColor).
Foreground(lipgloss.Color("0")).
Padding(0, 1).
Bold(true)
// Messages
systemMsgStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Italic(true).
Padding(0, 1)
userMsgStyle = lipgloss.NewStyle().
Foreground(secondaryColor).
Padding(0, 1)
assistantMsgStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255")).
Padding(0, 1)
roleLabelStyle = lipgloss.NewStyle().
Bold(true).
Width(12)
// Tool calls
toolCallStyle = lipgloss.NewStyle().
Foreground(accentColor).
Italic(true).
Padding(0, 1)
toolResultStyle = lipgloss.NewStyle().
Foreground(successColor).
Padding(0, 1)
// Input area
inputStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.RoundedBorder()).
BorderForeground(primaryColor).
Padding(0, 1)
inputHelpStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Italic(true)
// Error
errorStyle = lipgloss.NewStyle().
Foreground(errorColor).
Bold(true)
// Loading
loadingStyle = lipgloss.NewStyle().
Foreground(accentColor).
Italic(true)
// List selection
selectedItemStyle = lipgloss.NewStyle().
Foreground(primaryColor).
Bold(true)
normalItemStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255"))
// Settings panel
settingLabelStyle = lipgloss.NewStyle().
Foreground(secondaryColor).
Width(15)
settingValueStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255"))
// Help text
helpStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Padding(1, 0)
// Image indicator
imageIndicatorStyle = lipgloss.NewStyle().
Foreground(accentColor).
Bold(true)
// Viewport
viewportStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.NormalBorder()).
BorderForeground(mutedColor)
)

105
cmd/llm/tools.go Normal file
View File

@@ -0,0 +1,105 @@
package main
import (
"fmt"
"math"
"strconv"
"strings"
"time"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// TimeParams is the parameter struct for the GetTime function
type TimeParams struct{}
// GetTime returns the current time
func GetTime(_ *llm.Context, _ TimeParams) (any, error) {
return time.Now().Format("Monday, January 2, 2006 3:04:05 PM MST"), nil
}
// CalcParams is the parameter struct for the Calculate function
type CalcParams struct {
A float64 `json:"a" description:"First number"`
B float64 `json:"b" description:"Second number"`
Op string `json:"op" description:"Operation: add, subtract, multiply, divide, power, sqrt, mod"`
}
// Calculate performs basic math operations
func Calculate(_ *llm.Context, params CalcParams) (any, error) {
switch strings.ToLower(params.Op) {
case "add", "+":
return params.A + params.B, nil
case "subtract", "sub", "-":
return params.A - params.B, nil
case "multiply", "mul", "*":
return params.A * params.B, nil
case "divide", "div", "/":
if params.B == 0 {
return nil, fmt.Errorf("division by zero")
}
return params.A / params.B, nil
case "power", "pow", "^":
return math.Pow(params.A, params.B), nil
case "sqrt":
if params.A < 0 {
return nil, fmt.Errorf("cannot take square root of negative number")
}
return math.Sqrt(params.A), nil
case "mod", "%":
return math.Mod(params.A, params.B), nil
default:
return nil, fmt.Errorf("unknown operation: %s", params.Op)
}
}
// WeatherParams is the parameter struct for the GetWeather function
type WeatherParams struct {
Location string `json:"location" description:"City name or location"`
}
// GetWeather returns mock weather data (for demo purposes)
func GetWeather(_ *llm.Context, params WeatherParams) (any, error) {
// This is a demo function - returns mock data
weathers := []string{"sunny", "cloudy", "rainy", "partly cloudy", "windy"}
temps := []int{65, 72, 58, 80, 45}
// Use location string to deterministically pick weather
idx := len(params.Location) % len(weathers)
return map[string]any{
"location": params.Location,
"temperature": strconv.Itoa(temps[idx]) + "F",
"condition": weathers[idx],
"humidity": "45%",
"note": "This is mock data for demonstration purposes",
}, nil
}
// RandomNumberParams is the parameter struct for the RandomNumber function
type RandomNumberParams struct {
Min int `json:"min" description:"Minimum value (inclusive)"`
Max int `json:"max" description:"Maximum value (inclusive)"`
}
// RandomNumber generates a pseudo-random number (using current time nanoseconds)
func RandomNumber(_ *llm.Context, params RandomNumberParams) (any, error) {
if params.Min > params.Max {
return nil, fmt.Errorf("min cannot be greater than max")
}
// Simple pseudo-random using time
n := time.Now().UnixNano()
rangeSize := params.Max - params.Min + 1
result := params.Min + int(n%int64(rangeSize))
return result, nil
}
// createDemoToolbox creates a toolbox with demo tools for testing
func createDemoToolbox() llm.ToolBox {
return llm.NewToolBox(
llm.NewFunction("get_time", "Get the current date and time", GetTime),
llm.NewFunction("calculate", "Perform basic math operations (add, subtract, multiply, divide, power, sqrt, mod)", Calculate),
llm.NewFunction("get_weather", "Get weather information for a location (demo data)", GetWeather),
llm.NewFunction("random_number", "Generate a random number between min and max", RandomNumber),
)
}

435
cmd/llm/update.go Normal file
View File

@@ -0,0 +1,435 @@
package main
import (
"fmt"
"strings"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// pendingRequest stores the request being processed for follow-up
var pendingRequest llm.Request
var pendingResponse llm.ResponseChoice
// Update handles messages and updates the model
func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
var cmd tea.Cmd
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
return m.handleKeyMsg(msg)
case tea.WindowSizeMsg:
m.width = msg.Width
m.height = msg.Height
headerHeight := 3
footerHeight := 4
verticalMargins := headerHeight + footerHeight
if !m.viewportReady {
m.viewport = viewport.New(msg.Width-4, msg.Height-verticalMargins)
m.viewport.HighPerformanceRendering = false
m.viewportReady = true
} else {
m.viewport.Width = msg.Width - 4
m.viewport.Height = msg.Height - verticalMargins
}
m.input.Width = msg.Width - 6
m.apiKeyInput.Width = msg.Width - 6
m.viewport.SetContent(m.renderMessages())
case ChatResponseMsg:
m.loading = false
if msg.Err != nil {
m.err = msg.Err
return m, nil
}
if len(msg.Response.Choices) == 0 {
m.err = fmt.Errorf("no response choices returned")
return m, nil
}
choice := msg.Response.Choices[0]
// Check for tool calls
if len(choice.Calls) > 0 && m.toolsEnabled {
// Store for follow-up
pendingResponse = choice
// Add assistant's response to conversation if there's content
if choice.Content != "" {
m.addAssistantMessage(choice.Content)
}
// Display tool calls
for _, call := range choice.Calls {
m.addToolCallMessage(call.FunctionCall.Name, call.FunctionCall.Arguments)
}
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
// Execute tools
m.loading = true
return m, executeTools(m.toolbox, pendingRequest, choice)
}
// Regular response - add to conversation and display
m.conversation = append(m.conversation, choice)
m.addAssistantMessage(choice.Content)
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
case ToolExecutionMsg:
if msg.Err != nil {
m.loading = false
m.err = msg.Err
return m, nil
}
// Display tool results
for i, result := range msg.Results {
name := pendingResponse.Calls[i].FunctionCall.Name
resultStr := fmt.Sprintf("%v", result.Result)
if result.Error != nil {
resultStr = "Error: " + result.Error.Error()
}
m.addToolResultMessage(name, resultStr)
}
// Add tool call responses to conversation
for _, result := range msg.Results {
m.conversation = append(m.conversation, result)
}
// Add the assistant's response to conversation
m.conversation = append(m.conversation, pendingResponse)
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
// Send follow-up request
followUp := buildFollowUpRequest(&m, pendingRequest, pendingResponse, msg.Results)
pendingRequest = followUp
return m, sendChatRequest(m.chat, followUp)
case ImageLoadedMsg:
if msg.Err != nil {
m.err = msg.Err
m.state = m.previousState
return m, nil
}
m.pendingImages = append(m.pendingImages, msg.Image)
m.state = m.previousState
m.err = nil
default:
// Update text input
if m.state == StateChat {
m.input, cmd = m.input.Update(msg)
cmds = append(cmds, cmd)
} else if m.state == StateAPIKeyInput {
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
cmds = append(cmds, cmd)
}
}
return m, tea.Batch(cmds...)
}
// handleKeyMsg handles keyboard input
func (m Model) handleKeyMsg(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
// Global key handling
switch msg.String() {
case "ctrl+c":
return m, tea.Quit
case "esc":
if m.state != StateChat {
m.state = StateChat
m.input.Focus()
return m, nil
}
return m, tea.Quit
}
// State-specific key handling
switch m.state {
case StateChat:
return m.handleChatKeys(msg)
case StateProviderSelect:
return m.handleProviderSelectKeys(msg)
case StateModelSelect:
return m.handleModelSelectKeys(msg)
case StateImageInput:
return m.handleImageInputKeys(msg)
case StateToolsPanel:
return m.handleToolsPanelKeys(msg)
case StateSettings:
return m.handleSettingsKeys(msg)
case StateAPIKeyInput:
return m.handleAPIKeyInputKeys(msg)
}
return m, nil
}
// handleChatKeys handles keys in chat state
func (m Model) handleChatKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
if m.loading {
return m, nil
}
text := strings.TrimSpace(m.input.Value())
if text == "" {
return m, nil
}
if m.chat == nil {
m.err = fmt.Errorf("no model selected - press Ctrl+P to select a provider")
return m, nil
}
// Build and send request
req := buildRequest(&m, text)
pendingRequest = req
// Add user message to display
m.addUserMessage(text, m.pendingImages)
// Clear input and pending images
m.input.Reset()
m.pendingImages = nil
m.err = nil
m.loading = true
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
return m, sendChatRequest(m.chat, req)
case "ctrl+i":
m.previousState = StateChat
m.state = StateImageInput
m.input.SetValue("")
m.input.Placeholder = "Enter image path or URL..."
return m, nil
case "ctrl+t":
m.state = StateToolsPanel
return m, nil
case "ctrl+p":
m.state = StateProviderSelect
m.listIndex = m.providerIndex
return m, nil
case "ctrl+m":
if m.provider == nil {
m.err = fmt.Errorf("select a provider first")
return m, nil
}
m.state = StateModelSelect
m.listItems = m.providers[m.providerIndex].Models
m.listIndex = m.providers[m.providerIndex].ModelIndex
return m, nil
case "ctrl+s":
m.state = StateSettings
return m, nil
case "ctrl+n":
m.newConversation()
m.viewport.SetContent(m.renderMessages())
return m, nil
case "up", "down", "pgup", "pgdown":
var cmd tea.Cmd
m.viewport, cmd = m.viewport.Update(msg)
return m, cmd
default:
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
}
// handleProviderSelectKeys handles keys in provider selection state
func (m Model) handleProviderSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "up", "k":
if m.listIndex > 0 {
m.listIndex--
}
case "down", "j":
if m.listIndex < len(m.providers)-1 {
m.listIndex++
}
case "enter":
p := m.providers[m.listIndex]
if !p.HasAPIKey {
// Need to get API key
m.state = StateAPIKeyInput
m.apiKeyInput.Focus()
m.apiKeyInput.SetValue("")
return m, textinput.Blink
}
err := m.selectProvider(m.listIndex)
if err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
m.newConversation()
return m, nil
}
return m, nil
}
// handleAPIKeyInputKeys handles keys in API key input state
func (m Model) handleAPIKeyInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
key := strings.TrimSpace(m.apiKeyInput.Value())
if key == "" {
return m, nil
}
// Store the API key
p := m.providers[m.listIndex]
m.apiKeys[p.Name] = key
m.providers[m.listIndex].HasAPIKey = true
// Update list items
for i, prov := range m.providers {
status := " (no key)"
if prov.HasAPIKey {
status = " (ready)"
}
m.listItems[i] = prov.Name + status
}
// Select the provider
err := m.selectProvider(m.listIndex)
if err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
m.newConversation()
return m, nil
default:
var cmd tea.Cmd
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
return m, cmd
}
}
// handleModelSelectKeys handles keys in model selection state
func (m Model) handleModelSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "up", "k":
if m.listIndex > 0 {
m.listIndex--
}
case "down", "j":
if m.listIndex < len(m.listItems)-1 {
m.listIndex++
}
case "enter":
err := m.selectModel(m.listIndex)
if err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
}
return m, nil
}
// handleImageInputKeys handles keys in image input state
func (m Model) handleImageInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
input := strings.TrimSpace(m.input.Value())
if input == "" {
m.state = m.previousState
m.input.Placeholder = "Type your message..."
return m, nil
}
m.input.Placeholder = "Type your message..."
// Determine input type and load
if strings.HasPrefix(input, "http://") || strings.HasPrefix(input, "https://") {
return m, loadImageFromURL(input)
} else if strings.HasPrefix(input, "data:") || len(input) > 100 && !strings.Contains(input, "/") && !strings.Contains(input, "\\") {
return m, loadImageFromBase64(input)
} else {
return m, loadImageFromPath(input)
}
default:
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
}
// handleToolsPanelKeys handles keys in tools panel state
func (m Model) handleToolsPanelKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "t":
m.toolsEnabled = !m.toolsEnabled
case "enter", "q":
m.state = StateChat
m.input.Focus()
}
return m, nil
}
// handleSettingsKeys handles keys in settings state
func (m Model) handleSettingsKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "1":
// Set temperature to nil (default)
m.temperature = nil
case "2":
t := 0.0
m.temperature = &t
case "3":
t := 0.5
m.temperature = &t
case "4":
t := 0.7
m.temperature = &t
case "5":
t := 1.0
m.temperature = &t
case "enter", "q":
m.state = StateChat
m.input.Focus()
}
return m, nil
}

296
cmd/llm/view.go Normal file
View File

@@ -0,0 +1,296 @@
package main
import (
"fmt"
"strings"
"github.com/charmbracelet/lipgloss"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// View renders the current state
func (m Model) View() string {
switch m.state {
case StateProviderSelect:
return m.renderProviderSelect()
case StateModelSelect:
return m.renderModelSelect()
case StateImageInput:
return m.renderImageInput()
case StateToolsPanel:
return m.renderToolsPanel()
case StateSettings:
return m.renderSettings()
case StateAPIKeyInput:
return m.renderAPIKeyInput()
default:
return m.renderChat()
}
}
// renderChat renders the main chat view
func (m Model) renderChat() string {
var b strings.Builder
// Header
provider := m.providerName
if provider == "" {
provider = "None"
}
model := m.modelName
if model == "" {
model = "None"
}
header := headerStyle.Render(fmt.Sprintf("go-llm CLI %s",
providerBadgeStyle.Render(fmt.Sprintf("%s/%s", provider, model))))
b.WriteString(header)
b.WriteString("\n")
// Messages viewport
if m.viewportReady {
b.WriteString(m.viewport.View())
b.WriteString("\n")
}
// Image indicator
if len(m.pendingImages) > 0 {
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s) attached]", len(m.pendingImages))))
b.WriteString("\n")
}
// Error
if m.err != nil {
b.WriteString(errorStyle.Render(" Error: " + m.err.Error()))
b.WriteString("\n")
}
// Loading
if m.loading {
b.WriteString(loadingStyle.Render(" Thinking..."))
b.WriteString("\n")
}
// Input
inputBox := inputStyle.Render(m.input.View())
b.WriteString(inputBox)
b.WriteString("\n")
// Help
help := inputHelpStyle.Render("Enter: send | Ctrl+I: image | Ctrl+T: tools | Ctrl+P: provider | Ctrl+M: model | Ctrl+S: settings | Ctrl+N: new | Esc: quit")
b.WriteString(help)
return appStyle.Render(b.String())
}
// renderMessages renders all messages for the viewport
func (m Model) renderMessages() string {
var b strings.Builder
if len(m.messages) == 0 {
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
b.WriteString("\n\n")
b.WriteString(lipgloss.NewStyle().Foreground(mutedColor).Render("Start a conversation by typing a message below."))
return b.String()
}
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
b.WriteString("\n\n")
for _, msg := range m.messages {
var content string
var style lipgloss.Style
switch msg.Role {
case llm.RoleUser:
style = userMsgStyle
label := roleLabelStyle.Foreground(secondaryColor).Render("[User]")
content = label + " " + msg.Content
if msg.Images > 0 {
content += imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s)]", msg.Images))
}
case llm.RoleAssistant:
style = assistantMsgStyle
label := roleLabelStyle.Foreground(lipgloss.Color("255")).Render("[Assistant]")
content = label + " " + msg.Content
case llm.Role("tool_call"):
style = toolCallStyle
content = " -> Calling: " + msg.Content
case llm.Role("tool_result"):
style = toolResultStyle
content = " <- Result: " + msg.Content
default:
style = assistantMsgStyle
content = msg.Content
}
b.WriteString(style.Render(content))
b.WriteString("\n\n")
}
return b.String()
}
// renderProviderSelect renders the provider selection view
func (m Model) renderProviderSelect() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Select Provider"))
b.WriteString("\n\n")
for i, item := range m.listItems {
cursor := " "
style := normalItemStyle
if i == m.listIndex {
cursor = "> "
style = selectedItemStyle
}
b.WriteString(style.Render(cursor + item))
b.WriteString("\n")
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderAPIKeyInput renders the API key input view
func (m Model) renderAPIKeyInput() string {
var b strings.Builder
provider := m.providers[m.listIndex]
b.WriteString(headerStyle.Render(fmt.Sprintf("Enter API Key for %s", provider.Name)))
b.WriteString("\n\n")
b.WriteString(fmt.Sprintf("Environment variable: %s\n\n", provider.EnvVar))
b.WriteString("Enter your API key below (it will be hidden):\n\n")
inputBox := inputStyle.Render(m.apiKeyInput.View())
b.WriteString(inputBox)
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter to confirm, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderModelSelect renders the model selection view
func (m Model) renderModelSelect() string {
var b strings.Builder
b.WriteString(headerStyle.Render(fmt.Sprintf("Select Model (%s)", m.providerName)))
b.WriteString("\n\n")
for i, item := range m.listItems {
cursor := " "
style := normalItemStyle
if i == m.listIndex {
cursor = "> "
style = selectedItemStyle
}
if item == m.modelName {
item += " (current)"
}
b.WriteString(style.Render(cursor + item))
b.WriteString("\n")
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderImageInput renders the image input view
func (m Model) renderImageInput() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Add Image"))
b.WriteString("\n\n")
b.WriteString("Enter an image source:\n")
b.WriteString(" - File path (e.g., /path/to/image.png)\n")
b.WriteString(" - URL (e.g., https://example.com/image.jpg)\n")
b.WriteString(" - Base64 data or data URL\n\n")
if len(m.pendingImages) > 0 {
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf("Currently attached: %d image(s)\n\n", len(m.pendingImages))))
}
inputBox := inputStyle.Render(m.input.View())
b.WriteString(inputBox)
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter to add image, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderToolsPanel renders the tools panel
func (m Model) renderToolsPanel() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Tools / Function Calling"))
b.WriteString("\n\n")
status := "DISABLED"
statusStyle := errorStyle
if m.toolsEnabled {
status = "ENABLED"
statusStyle = lipgloss.NewStyle().Foreground(successColor).Bold(true)
}
b.WriteString(settingLabelStyle.Render("Tools Status:"))
b.WriteString(statusStyle.Render(status))
b.WriteString("\n\n")
b.WriteString("Available tools:\n")
for _, fn := range m.toolbox.Functions() {
b.WriteString(fmt.Sprintf(" - %s: %s\n", selectedItemStyle.Render(fn.Name), fn.Description))
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Press 't' to toggle tools, Enter or 'q' to close"))
return appStyle.Render(b.String())
}
// renderSettings renders the settings view
func (m Model) renderSettings() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Settings"))
b.WriteString("\n\n")
// Temperature
tempStr := "default"
if m.temperature != nil {
tempStr = fmt.Sprintf("%.1f", *m.temperature)
}
b.WriteString(settingLabelStyle.Render("Temperature:"))
b.WriteString(settingValueStyle.Render(tempStr))
b.WriteString("\n\n")
b.WriteString("Press a key to set temperature:\n")
b.WriteString(" 1 - Default (model decides)\n")
b.WriteString(" 2 - 0.0 (deterministic)\n")
b.WriteString(" 3 - 0.5 (balanced)\n")
b.WriteString(" 4 - 0.7 (creative)\n")
b.WriteString(" 5 - 1.0 (very creative)\n")
b.WriteString("\n")
// System prompt
b.WriteString(settingLabelStyle.Render("System Prompt:"))
b.WriteString("\n")
b.WriteString(settingValueStyle.Render(" " + m.systemPrompt))
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter or 'q' to close"))
return appStyle.Render(b.String())
}