Files
go-llm/cmd/llm/model.go
Steve Dudenhoeffer 97d54c10ae Implement interactive CLI for LLM providers with chat, tools, and image support
- Add Bubble Tea-based CLI interface for LLM interactions.
- Implement `.env.example` for environment variable setup.
- Add provider, model, and tool selection screens.
- Include support for API key configuration.
- Enable chat interactions with optional image and tool support.
- Introduce core utility functions: image handling, tool execution, chat request management, and response rendering.
- Implement style customization with Lip Gloss.
2026-01-24 15:53:36 -05:00

296 lines
6.0 KiB
Go

package main
import (
"os"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// State represents the current view/screen of the application
type State int
const (
StateChat State = iota
StateProviderSelect
StateModelSelect
StateImageInput
StateToolsPanel
StateSettings
StateAPIKeyInput
)
// DisplayMessage represents a message for display in the UI
type DisplayMessage struct {
Role llm.Role
Content string
Images int // number of images attached
}
// ProviderInfo contains information about a provider
type ProviderInfo struct {
Name string
EnvVar string
Models []string
HasAPIKey bool
ModelIndex int
}
// Model is the main Bubble Tea model
type Model struct {
// State
state State
previousState State
// Provider
provider llm.LLM
providerName string
chat llm.ChatCompletion
modelName string
apiKeys map[string]string
providers []ProviderInfo
providerIndex int
// Conversation
conversation []llm.Input
messages []DisplayMessage
// Tools
toolbox llm.ToolBox
toolsEnabled bool
// Settings
systemPrompt string
temperature *float64
// Pending images
pendingImages []llm.Image
// UI Components
input textinput.Model
viewport viewport.Model
viewportReady bool
// Selection state (for lists)
listIndex int
listItems []string
// Dimensions
width int
height int
// Loading state
loading bool
err error
// For API key input
apiKeyInput textinput.Model
}
// InitialModel creates and returns the initial model
func InitialModel() Model {
ti := textinput.New()
ti.Placeholder = "Type your message..."
ti.Focus()
ti.CharLimit = 4096
ti.Width = 60
aki := textinput.New()
aki.Placeholder = "Enter API key..."
aki.CharLimit = 256
aki.Width = 60
aki.EchoMode = textinput.EchoPassword
// Initialize providers with environment variable checks
providers := []ProviderInfo{
{
Name: "OpenAI",
EnvVar: "OPENAI_API_KEY",
Models: []string{
"gpt-4.1",
"gpt-4.1-mini",
"gpt-4.1-nano",
"gpt-4o",
"gpt-4o-mini",
"gpt-4-turbo",
"gpt-3.5-turbo",
"o1",
"o1-mini",
"o1-preview",
"o3-mini",
},
},
{
Name: "Anthropic",
EnvVar: "ANTHROPIC_API_KEY",
Models: []string{
"claude-sonnet-4-20250514",
"claude-opus-4-20250514",
"claude-3-7-sonnet-20250219",
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
},
},
{
Name: "Google",
EnvVar: "GOOGLE_API_KEY",
Models: []string{
"gemini-2.0-flash",
"gemini-2.0-flash-lite",
"gemini-1.5-pro",
"gemini-1.5-flash",
"gemini-1.5-flash-8b",
"gemini-1.0-pro",
},
},
}
// Check for API keys in environment
apiKeys := make(map[string]string)
for i := range providers {
if key := os.Getenv(providers[i].EnvVar); key != "" {
apiKeys[providers[i].Name] = key
providers[i].HasAPIKey = true
}
}
m := Model{
state: StateProviderSelect,
input: ti,
apiKeyInput: aki,
apiKeys: apiKeys,
providers: providers,
systemPrompt: "You are a helpful assistant.",
toolbox: createDemoToolbox(),
toolsEnabled: false,
messages: []DisplayMessage{},
conversation: []llm.Input{},
}
// Build list items for provider selection
m.listItems = make([]string, len(providers))
for i, p := range providers {
status := " (no key)"
if p.HasAPIKey {
status = " (ready)"
}
m.listItems[i] = p.Name + status
}
return m
}
// Init initializes the model
func (m Model) Init() tea.Cmd {
return textinput.Blink
}
// selectProvider sets up the selected provider
func (m *Model) selectProvider(index int) error {
if index < 0 || index >= len(m.providers) {
return nil
}
p := m.providers[index]
key, ok := m.apiKeys[p.Name]
if !ok || key == "" {
return nil
}
m.providerName = p.Name
m.providerIndex = index
switch p.Name {
case "OpenAI":
m.provider = llm.OpenAI(key)
case "Anthropic":
m.provider = llm.Anthropic(key)
case "Google":
m.provider = llm.Google(key)
}
// Select default model
if len(p.Models) > 0 {
return m.selectModel(p.ModelIndex)
}
return nil
}
// selectModel sets the current model
func (m *Model) selectModel(index int) error {
if m.provider == nil {
return nil
}
p := m.providers[m.providerIndex]
if index < 0 || index >= len(p.Models) {
return nil
}
modelName := p.Models[index]
chat, err := m.provider.ModelVersion(modelName)
if err != nil {
return err
}
m.chat = chat
m.modelName = modelName
m.providers[m.providerIndex].ModelIndex = index
return nil
}
// newConversation resets the conversation
func (m *Model) newConversation() {
m.conversation = []llm.Input{}
m.messages = []DisplayMessage{}
m.pendingImages = []llm.Image{}
m.err = nil
}
// addUserMessage adds a user message to the conversation
func (m *Model) addUserMessage(text string, images []llm.Image) {
msg := llm.Message{
Role: llm.RoleUser,
Text: text,
Images: images,
}
m.conversation = append(m.conversation, msg)
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleUser,
Content: text,
Images: len(images),
})
}
// addAssistantMessage adds an assistant message to the conversation
func (m *Model) addAssistantMessage(content string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleAssistant,
Content: content,
})
}
// addToolCallMessage adds a tool call message to display
func (m *Model) addToolCallMessage(name string, args string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_call"),
Content: name + ": " + args,
})
}
// addToolResultMessage adds a tool result message to display
func (m *Model) addToolResultMessage(name string, result string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_result"),
Content: name + " -> " + result,
})
}