feat: add DeepSeek, Moonshot, xAI, Groq, Ollama; drop v1; migrate TUI to v2
CI / Root Module (push) Failing after 30s
CI / Lint (push) Failing after 50s
CI / V2 Module (push) Successful in 2m14s

Five OpenAI-compatible providers join the library as first-class constructors
(llm.DeepSeek, llm.Moonshot, llm.XAI, llm.Groq, llm.Ollama). Their wire-level
implementation is shared via a new v2/openaicompat package which is the
extracted guts of the old v2/openai provider; each provider supplies its own
Rules value to declare per-model constraints (e.g., DeepSeek Reasoner rejects
tools and temperature, Moonshot/xAI accept images only on *-vision* models,
Groq rejects audio input). v2/openai itself becomes a thin wrapper that sets
RestrictTemperature for o-series and gpt-5 models.

A new provider registry (v2/registry.go) exposes llm.Providers() and drives
the TUI's provider picker so adding a provider in future is a single-file
change.

The TUI at cmd/llm was migrated from v1 to v2 and moved to v2/cmd/llm. With
nothing else depending on v1, the v1 code at the repo root (all .go files,
schema/, internal/, provider/, root go.mod/go.sum) is deleted.

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
2026-04-24 13:34:39 +00:00
parent 9b91b2f794
commit 34119e5a00
58 changed files with 1921 additions and 4242 deletions
+27
View File
@@ -0,0 +1,27 @@
# go-llm CLI environment variables
# Copy this file to .env and fill in the keys for providers you use.
# OpenAI API Key (https://platform.openai.com/api-keys)
OPENAI_API_KEY=
# Anthropic API Key (https://console.anthropic.com/settings/keys)
ANTHROPIC_API_KEY=
# Google AI API Key (https://aistudio.google.com/apikey)
GOOGLE_API_KEY=
# DeepSeek API Key (https://platform.deepseek.com)
DEEPSEEK_API_KEY=
# Moonshot / Kimi API Key (https://platform.moonshot.ai)
MOONSHOT_API_KEY=
# xAI / Grok API Key (https://x.ai/api)
XAI_API_KEY=
# Groq API Key (https://console.groq.com/keys)
GROQ_API_KEY=
# Ollama runs locally with no API key required.
# Override the endpoint if you're not using localhost:11434.
# OLLAMA_BASE_URL=http://localhost:11434/v1
+136
View File
@@ -0,0 +1,136 @@
package main
import (
"context"
"encoding/base64"
"fmt"
"net/http"
"os"
"strings"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// Message types for async operations.
// ChatResponseMsg contains the response from a chat completion.
type ChatResponseMsg struct {
Response llm.Response
Err error
}
// ToolExecutionMsg contains results from executing tool calls, one Message
// (RoleTool) per ToolCall, in the same order.
type ToolExecutionMsg struct {
Results []llm.Message
Err error
}
// ImageLoadedMsg contains a loaded image.
type ImageLoadedMsg struct {
Image llm.Image
Err error
}
// sendChatRequest sends a completion request with the current conversation,
// returning a ChatResponseMsg tea.Msg when the provider responds.
func sendChatRequest(model *llm.Model, messages []llm.Message, toolbox *llm.ToolBox, toolsEnabled bool, temperature *float64) tea.Cmd {
return func() tea.Msg {
opts := buildOpts(toolbox, toolsEnabled, temperature)
resp, err := model.Complete(context.Background(), messages, opts...)
return ChatResponseMsg{Response: resp, Err: err}
}
}
// executeTools runs each tool call via the toolbox and returns ToolExecutionMsg
// with one RoleTool Message per call, in the same order.
func executeTools(toolbox *llm.ToolBox, calls []llm.ToolCall) tea.Cmd {
return func() tea.Msg {
ctx := context.Background()
results, err := toolbox.ExecuteAll(ctx, calls)
return ToolExecutionMsg{Results: results, Err: err}
}
}
// buildOpts constructs RequestOptions from the current CLI state.
func buildOpts(toolbox *llm.ToolBox, toolsEnabled bool, temperature *float64) []llm.RequestOption {
var opts []llm.RequestOption
if toolsEnabled && toolbox != nil && len(toolbox.AllTools()) > 0 {
opts = append(opts, llm.WithTools(toolbox))
}
if temperature != nil {
opts = append(opts, llm.WithTemperature(*temperature))
}
return opts
}
// loadImageFromPath loads an image from a file path.
func loadImageFromPath(path string) tea.Cmd {
return func() tea.Msg {
path = strings.TrimSpace(path)
path = strings.Trim(path, "\"'")
data, err := os.ReadFile(path)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("failed to read image file: %w", err)}
}
contentType := http.DetectContentType(data)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("file is not an image: %s", contentType)}
}
return ImageLoadedMsg{
Image: llm.Image{
Base64: base64.StdEncoding.EncodeToString(data),
ContentType: contentType,
},
}
}
}
// loadImageFromURL loads an image from a URL (kept as URL, not fetched).
func loadImageFromURL(url string) tea.Cmd {
return func() tea.Msg {
return ImageLoadedMsg{Image: llm.Image{URL: strings.TrimSpace(url)}}
}
}
// loadImageFromBase64 loads an image from base64 data (raw or data: URL).
func loadImageFromBase64(data string) tea.Cmd {
return func() tea.Msg {
data = strings.TrimSpace(data)
if strings.HasPrefix(data, "data:") {
parts := strings.SplitN(data, ",", 2)
if len(parts) != 2 {
return ImageLoadedMsg{Err: fmt.Errorf("invalid data URL format")}
}
mediaType := strings.TrimPrefix(parts[0], "data:")
mediaType = strings.TrimSuffix(mediaType, ";base64")
return ImageLoadedMsg{
Image: llm.Image{
Base64: parts[1],
ContentType: mediaType,
},
}
}
decoded, err := base64.StdEncoding.DecodeString(data)
if err != nil {
return ImageLoadedMsg{Err: fmt.Errorf("invalid base64 data: %w", err)}
}
contentType := http.DetectContentType(decoded)
if !strings.HasPrefix(contentType, "image/") {
return ImageLoadedMsg{Err: fmt.Errorf("data is not an image: %s", contentType)}
}
return ImageLoadedMsg{
Image: llm.Image{
Base64: data,
ContentType: contentType,
},
}
}
}
+25
View File
@@ -0,0 +1,25 @@
package main
import (
"fmt"
"os"
tea "github.com/charmbracelet/bubbletea"
"github.com/joho/godotenv"
)
func main() {
// Load .env file if it exists (ignore error if not found)
_ = godotenv.Load()
p := tea.NewProgram(
InitialModel(),
tea.WithAltScreen(),
tea.WithMouseCellMotion(),
)
if _, err := p.Run(); err != nil {
fmt.Printf("Error running program: %v\n", err)
os.Exit(1)
}
}
+245
View File
@@ -0,0 +1,245 @@
package main
import (
"os"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// State represents the current view/screen of the application.
type State int
const (
StateChat State = iota
StateProviderSelect
StateModelSelect
StateImageInput
StateToolsPanel
StateSettings
StateAPIKeyInput
)
// DisplayMessage represents a message for display in the UI.
type DisplayMessage struct {
Role llm.Role
Content string
Images int // number of images attached
}
// ProviderEntry is a CLI-local view of a registered provider, enriched with
// UI state (which model is currently chosen, whether we have a key, etc.).
type ProviderEntry struct {
Info llm.ProviderInfo
HasAPIKey bool
ModelIndex int
}
// Model is the main Bubble Tea model.
type Model struct {
// State
state State
previousState State
// Provider
client *llm.Client
chat *llm.Model
providerName string
modelName string
apiKeys map[string]string
providers []ProviderEntry
providerIndex int
// Conversation
conversation []llm.Message
messages []DisplayMessage
// Tools
toolbox *llm.ToolBox
toolsEnabled bool
// Settings
systemPrompt string
temperature *float64
// Pending images
pendingImages []llm.Image
// UI Components
input textinput.Model
viewport viewport.Model
viewportReady bool
// Selection state (for lists)
listIndex int
listItems []string
// Dimensions
width int
height int
// Loading state
loading bool
err error
// For API key input
apiKeyInput textinput.Model
}
// InitialModel creates and returns the initial model.
func InitialModel() Model {
ti := textinput.New()
ti.Placeholder = "Type your message..."
ti.Focus()
ti.CharLimit = 4096
ti.Width = 60
aki := textinput.New()
aki.Placeholder = "Enter API key..."
aki.CharLimit = 256
aki.Width = 60
aki.EchoMode = textinput.EchoPassword
// Build provider list from the go-llm registry.
registry := llm.Providers()
providers := make([]ProviderEntry, len(registry))
apiKeys := make(map[string]string)
for i, info := range registry {
entry := ProviderEntry{Info: info}
if info.EnvKey == "" {
// Key-less provider (e.g., Ollama).
entry.HasAPIKey = true
} else if key := os.Getenv(info.EnvKey); key != "" {
apiKeys[info.Name] = key
entry.HasAPIKey = true
}
providers[i] = entry
}
m := Model{
state: StateProviderSelect,
input: ti,
apiKeyInput: aki,
apiKeys: apiKeys,
providers: providers,
systemPrompt: "You are a helpful assistant.",
toolbox: createDemoToolbox(),
toolsEnabled: false,
messages: []DisplayMessage{},
conversation: []llm.Message{},
}
// Build list items for provider selection.
m.listItems = make([]string, len(providers))
for i, p := range providers {
status := " (no key)"
if p.HasAPIKey {
status = " (ready)"
if p.Info.EnvKey == "" {
status = " (local)"
}
}
m.listItems[i] = p.Info.DisplayName + status
}
return m
}
// Init initializes the model.
func (m Model) Init() tea.Cmd {
return textinput.Blink
}
// selectProvider sets up the selected provider.
func (m *Model) selectProvider(index int) error {
if index < 0 || index >= len(m.providers) {
return nil
}
p := m.providers[index]
key := m.apiKeys[p.Info.Name] // empty for key-less providers like Ollama
if p.Info.EnvKey != "" && key == "" {
return nil
}
m.providerName = p.Info.DisplayName
m.providerIndex = index
m.client = p.Info.New(key)
// Select default model.
if len(p.Info.Models) > 0 {
return m.selectModel(p.ModelIndex)
}
return nil
}
// selectModel sets the current model.
func (m *Model) selectModel(index int) error {
if m.client == nil {
return nil
}
p := m.providers[m.providerIndex]
if index < 0 || index >= len(p.Info.Models) {
return nil
}
modelName := p.Info.Models[index]
m.chat = m.client.Model(modelName)
m.modelName = modelName
m.providers[m.providerIndex].ModelIndex = index
return nil
}
// newConversation resets the conversation.
func (m *Model) newConversation() {
m.conversation = []llm.Message{}
m.messages = []DisplayMessage{}
m.pendingImages = []llm.Image{}
m.err = nil
}
// addUserMessage adds a user message to the conversation.
func (m *Model) addUserMessage(text string, images []llm.Image) {
msg := llm.Message{
Role: llm.RoleUser,
Content: llm.Content{Text: text, Images: images},
}
m.conversation = append(m.conversation, msg)
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleUser,
Content: text,
Images: len(images),
})
}
// addAssistantMessage adds an assistant message to the conversation display.
func (m *Model) addAssistantMessage(content string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.RoleAssistant,
Content: content,
})
}
// addToolCallMessage adds a tool call message to display.
func (m *Model) addToolCallMessage(name string, args string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_call"),
Content: name + ": " + args,
})
}
// addToolResultMessage adds a tool result message to display.
func (m *Model) addToolResultMessage(name string, result string) {
m.messages = append(m.messages, DisplayMessage{
Role: llm.Role("tool_result"),
Content: name + " -> " + result,
})
}
+113
View File
@@ -0,0 +1,113 @@
package main
import (
"github.com/charmbracelet/lipgloss"
)
var (
// Colors
primaryColor = lipgloss.Color("205")
secondaryColor = lipgloss.Color("39")
accentColor = lipgloss.Color("212")
mutedColor = lipgloss.Color("241")
errorColor = lipgloss.Color("196")
successColor = lipgloss.Color("82")
// App styles
appStyle = lipgloss.NewStyle().Padding(1, 2)
// Header
headerStyle = lipgloss.NewStyle().
Bold(true).
Foreground(primaryColor).
BorderStyle(lipgloss.NormalBorder()).
BorderBottom(true).
BorderForeground(mutedColor).
Padding(0, 1)
// Provider badge
providerBadgeStyle = lipgloss.NewStyle().
Background(secondaryColor).
Foreground(lipgloss.Color("0")).
Padding(0, 1).
Bold(true)
// Messages
systemMsgStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Italic(true).
Padding(0, 1)
userMsgStyle = lipgloss.NewStyle().
Foreground(secondaryColor).
Padding(0, 1)
assistantMsgStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255")).
Padding(0, 1)
roleLabelStyle = lipgloss.NewStyle().
Bold(true).
Width(12)
// Tool calls
toolCallStyle = lipgloss.NewStyle().
Foreground(accentColor).
Italic(true).
Padding(0, 1)
toolResultStyle = lipgloss.NewStyle().
Foreground(successColor).
Padding(0, 1)
// Input area
inputStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.RoundedBorder()).
BorderForeground(primaryColor).
Padding(0, 1)
inputHelpStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Italic(true)
// Error
errorStyle = lipgloss.NewStyle().
Foreground(errorColor).
Bold(true)
// Loading
loadingStyle = lipgloss.NewStyle().
Foreground(accentColor).
Italic(true)
// List selection
selectedItemStyle = lipgloss.NewStyle().
Foreground(primaryColor).
Bold(true)
normalItemStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255"))
// Settings panel
settingLabelStyle = lipgloss.NewStyle().
Foreground(secondaryColor).
Width(15)
settingValueStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("255"))
// Help text
helpStyle = lipgloss.NewStyle().
Foreground(mutedColor).
Padding(1, 0)
// Image indicator
imageIndicatorStyle = lipgloss.NewStyle().
Foreground(accentColor).
Bold(true)
// Viewport
viewportStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.NormalBorder()).
BorderForeground(mutedColor)
)
+114
View File
@@ -0,0 +1,114 @@
package main
import (
"context"
"encoding/json"
"fmt"
"math"
"strconv"
"strings"
"time"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// TimeParams is the parameter struct for the GetTime function.
type TimeParams struct{}
// GetTime returns the current time.
func GetTime(_ context.Context, _ TimeParams) (string, error) {
return time.Now().Format("Monday, January 2, 2006 3:04:05 PM MST"), nil
}
// CalcParams is the parameter struct for the Calculate function.
type CalcParams struct {
A float64 `json:"a" description:"First number"`
B float64 `json:"b" description:"Second number"`
Op string `json:"op" description:"Operation: add, subtract, multiply, divide, power, sqrt, mod"`
}
// Calculate performs basic math operations.
func Calculate(_ context.Context, params CalcParams) (string, error) {
var result float64
switch strings.ToLower(params.Op) {
case "add", "+":
result = params.A + params.B
case "subtract", "sub", "-":
result = params.A - params.B
case "multiply", "mul", "*":
result = params.A * params.B
case "divide", "div", "/":
if params.B == 0 {
return "", fmt.Errorf("division by zero")
}
result = params.A / params.B
case "power", "pow", "^":
result = math.Pow(params.A, params.B)
case "sqrt":
if params.A < 0 {
return "", fmt.Errorf("cannot take square root of negative number")
}
result = math.Sqrt(params.A)
case "mod", "%":
result = math.Mod(params.A, params.B)
default:
return "", fmt.Errorf("unknown operation: %s", params.Op)
}
return strconv.FormatFloat(result, 'f', -1, 64), nil
}
// WeatherParams is the parameter struct for the GetWeather function.
type WeatherParams struct {
Location string `json:"location" description:"City name or location"`
}
// GetWeather returns mock weather data (for demo purposes).
func GetWeather(_ context.Context, params WeatherParams) (string, error) {
weathers := []string{"sunny", "cloudy", "rainy", "partly cloudy", "windy"}
temps := []int{65, 72, 58, 80, 45}
idx := len(params.Location) % len(weathers)
out := map[string]any{
"location": params.Location,
"temperature": strconv.Itoa(temps[idx]) + "F",
"condition": weathers[idx],
"humidity": "45%",
"note": "This is mock data for demonstration purposes",
}
b, err := json.Marshal(out)
if err != nil {
return "", err
}
return string(b), nil
}
// RandomNumberParams is the parameter struct for the RandomNumber function.
type RandomNumberParams struct {
Min int `json:"min" description:"Minimum value (inclusive)"`
Max int `json:"max" description:"Maximum value (inclusive)"`
}
// RandomNumber generates a pseudo-random number (using current time nanoseconds).
func RandomNumber(_ context.Context, params RandomNumberParams) (string, error) {
if params.Min > params.Max {
return "", fmt.Errorf("min cannot be greater than max")
}
n := time.Now().UnixNano()
rangeSize := params.Max - params.Min + 1
result := params.Min + int(n%int64(rangeSize))
return strconv.Itoa(result), nil
}
// createDemoToolbox creates a toolbox with demo tools for testing.
func createDemoToolbox() *llm.ToolBox {
return llm.NewToolBox(
llm.Define[TimeParams]("get_time", "Get the current date and time", GetTime),
llm.Define[CalcParams]("calculate",
"Perform basic math operations (add, subtract, multiply, divide, power, sqrt, mod)",
Calculate),
llm.Define[WeatherParams]("get_weather",
"Get weather information for a location (demo data)", GetWeather),
llm.Define[RandomNumberParams]("random_number",
"Generate a random number between min and max", RandomNumber),
)
}
+409
View File
@@ -0,0 +1,409 @@
package main
import (
"fmt"
"strings"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// pendingToolCalls stores the last response's tool calls so we can pair them
// with tool execution results for display.
var pendingToolCalls []llm.ToolCall
// Update handles messages and updates the model.
func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
var cmd tea.Cmd
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
return m.handleKeyMsg(msg)
case tea.WindowSizeMsg:
m.width = msg.Width
m.height = msg.Height
headerHeight := 3
footerHeight := 4
verticalMargins := headerHeight + footerHeight
if !m.viewportReady {
m.viewport = viewport.New(msg.Width-4, msg.Height-verticalMargins)
m.viewport.HighPerformanceRendering = false
m.viewportReady = true
} else {
m.viewport.Width = msg.Width - 4
m.viewport.Height = msg.Height - verticalMargins
}
m.input.Width = msg.Width - 6
m.apiKeyInput.Width = msg.Width - 6
m.viewport.SetContent(m.renderMessages())
case ChatResponseMsg:
m.loading = false
if msg.Err != nil {
m.err = msg.Err
return m, nil
}
resp := msg.Response
// Add the assistant message to the conversation history.
m.conversation = append(m.conversation, resp.Message())
// Show any text the assistant produced alongside tool calls.
if resp.Text != "" {
m.addAssistantMessage(resp.Text)
}
if resp.HasToolCalls() && m.toolsEnabled {
pendingToolCalls = resp.ToolCalls
for _, call := range resp.ToolCalls {
m.addToolCallMessage(call.Name, call.Arguments)
}
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
m.loading = true
return m, executeTools(m.toolbox, resp.ToolCalls)
}
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
case ToolExecutionMsg:
if msg.Err != nil {
m.loading = false
m.err = msg.Err
return m, nil
}
// Display results paired with the tool calls that produced them.
for i, result := range msg.Results {
name := ""
if i < len(pendingToolCalls) {
name = pendingToolCalls[i].Name
}
m.addToolResultMessage(name, result.Content.Text)
}
// Append the raw tool result messages to the conversation so the
// assistant can reference them on the next turn.
m.conversation = append(m.conversation, msg.Results...)
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
// Ask the model to continue given the tool results.
return m, sendChatRequest(m.chat, m.conversation, m.toolbox, m.toolsEnabled, m.temperature)
case ImageLoadedMsg:
if msg.Err != nil {
m.err = msg.Err
m.state = m.previousState
return m, nil
}
m.pendingImages = append(m.pendingImages, msg.Image)
m.state = m.previousState
m.err = nil
default:
// Update text input.
if m.state == StateChat {
m.input, cmd = m.input.Update(msg)
cmds = append(cmds, cmd)
} else if m.state == StateAPIKeyInput {
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
cmds = append(cmds, cmd)
}
}
return m, tea.Batch(cmds...)
}
// handleKeyMsg handles keyboard input.
func (m Model) handleKeyMsg(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "ctrl+c":
return m, tea.Quit
case "esc":
if m.state != StateChat {
m.state = StateChat
m.input.Focus()
return m, nil
}
return m, tea.Quit
}
switch m.state {
case StateChat:
return m.handleChatKeys(msg)
case StateProviderSelect:
return m.handleProviderSelectKeys(msg)
case StateModelSelect:
return m.handleModelSelectKeys(msg)
case StateImageInput:
return m.handleImageInputKeys(msg)
case StateToolsPanel:
return m.handleToolsPanelKeys(msg)
case StateSettings:
return m.handleSettingsKeys(msg)
case StateAPIKeyInput:
return m.handleAPIKeyInputKeys(msg)
}
return m, nil
}
// handleChatKeys handles keys in chat state.
func (m Model) handleChatKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
if m.loading {
return m, nil
}
text := strings.TrimSpace(m.input.Value())
if text == "" {
return m, nil
}
if m.chat == nil {
m.err = fmt.Errorf("no model selected - press Ctrl+P to select a provider")
return m, nil
}
// Ensure a system message is at the head of the conversation.
if len(m.conversation) == 0 && m.systemPrompt != "" {
m.conversation = append(m.conversation, llm.SystemMessage(m.systemPrompt))
}
m.addUserMessage(text, m.pendingImages)
m.input.Reset()
m.pendingImages = nil
m.err = nil
m.loading = true
m.viewport.SetContent(m.renderMessages())
m.viewport.GotoBottom()
return m, sendChatRequest(m.chat, m.conversation, m.toolbox, m.toolsEnabled, m.temperature)
case "ctrl+i":
m.previousState = StateChat
m.state = StateImageInput
m.input.SetValue("")
m.input.Placeholder = "Enter image path or URL..."
return m, nil
case "ctrl+t":
m.state = StateToolsPanel
return m, nil
case "ctrl+p":
m.state = StateProviderSelect
m.listIndex = m.providerIndex
return m, nil
case "ctrl+m":
if m.client == nil {
m.err = fmt.Errorf("select a provider first")
return m, nil
}
m.state = StateModelSelect
m.listItems = m.providers[m.providerIndex].Info.Models
m.listIndex = m.providers[m.providerIndex].ModelIndex
return m, nil
case "ctrl+s":
m.state = StateSettings
return m, nil
case "ctrl+n":
m.newConversation()
m.viewport.SetContent(m.renderMessages())
return m, nil
case "up", "down", "pgup", "pgdown":
var cmd tea.Cmd
m.viewport, cmd = m.viewport.Update(msg)
return m, cmd
default:
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
}
// handleProviderSelectKeys handles keys in provider selection state.
func (m Model) handleProviderSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "up", "k":
if m.listIndex > 0 {
m.listIndex--
}
case "down", "j":
if m.listIndex < len(m.providers)-1 {
m.listIndex++
}
case "enter":
p := m.providers[m.listIndex]
if !p.HasAPIKey {
m.state = StateAPIKeyInput
m.apiKeyInput.Focus()
m.apiKeyInput.SetValue("")
return m, textinput.Blink
}
if err := m.selectProvider(m.listIndex); err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
m.newConversation()
return m, nil
}
return m, nil
}
// handleAPIKeyInputKeys handles keys in API key input state.
func (m Model) handleAPIKeyInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
key := strings.TrimSpace(m.apiKeyInput.Value())
if key == "" {
return m, nil
}
p := m.providers[m.listIndex]
m.apiKeys[p.Info.Name] = key
m.providers[m.listIndex].HasAPIKey = true
for i, prov := range m.providers {
status := " (no key)"
if prov.HasAPIKey {
status = " (ready)"
if prov.Info.EnvKey == "" {
status = " (local)"
}
}
m.listItems[i] = prov.Info.DisplayName + status
}
if err := m.selectProvider(m.listIndex); err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
m.newConversation()
return m, nil
default:
var cmd tea.Cmd
m.apiKeyInput, cmd = m.apiKeyInput.Update(msg)
return m, cmd
}
}
// handleModelSelectKeys handles keys in model selection state.
func (m Model) handleModelSelectKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "up", "k":
if m.listIndex > 0 {
m.listIndex--
}
case "down", "j":
if m.listIndex < len(m.listItems)-1 {
m.listIndex++
}
case "enter":
if err := m.selectModel(m.listIndex); err != nil {
m.err = err
return m, nil
}
m.state = StateChat
m.input.Focus()
}
return m, nil
}
// handleImageInputKeys handles keys in image input state.
func (m Model) handleImageInputKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "enter":
input := strings.TrimSpace(m.input.Value())
if input == "" {
m.state = m.previousState
m.input.Placeholder = "Type your message..."
return m, nil
}
m.input.Placeholder = "Type your message..."
switch {
case strings.HasPrefix(input, "http://") || strings.HasPrefix(input, "https://"):
return m, loadImageFromURL(input)
case strings.HasPrefix(input, "data:") || (len(input) > 100 && !strings.Contains(input, "/") && !strings.Contains(input, "\\")):
return m, loadImageFromBase64(input)
default:
return m, loadImageFromPath(input)
}
default:
var cmd tea.Cmd
m.input, cmd = m.input.Update(msg)
return m, cmd
}
}
// handleToolsPanelKeys handles keys in tools panel state.
func (m Model) handleToolsPanelKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "t":
m.toolsEnabled = !m.toolsEnabled
case "enter", "q":
m.state = StateChat
m.input.Focus()
}
return m, nil
}
// handleSettingsKeys handles keys in settings state.
func (m Model) handleSettingsKeys(msg tea.KeyMsg) (tea.Model, tea.Cmd) {
switch msg.String() {
case "1":
m.temperature = nil
case "2":
t := 0.0
m.temperature = &t
case "3":
t := 0.5
m.temperature = &t
case "4":
t := 0.7
m.temperature = &t
case "5":
t := 1.0
m.temperature = &t
case "enter", "q":
m.state = StateChat
m.input.Focus()
}
return m, nil
}
+291
View File
@@ -0,0 +1,291 @@
package main
import (
"fmt"
"strings"
"github.com/charmbracelet/lipgloss"
llm "gitea.stevedudenhoeffer.com/steve/go-llm/v2"
)
// View renders the current state.
func (m Model) View() string {
switch m.state {
case StateProviderSelect:
return m.renderProviderSelect()
case StateModelSelect:
return m.renderModelSelect()
case StateImageInput:
return m.renderImageInput()
case StateToolsPanel:
return m.renderToolsPanel()
case StateSettings:
return m.renderSettings()
case StateAPIKeyInput:
return m.renderAPIKeyInput()
default:
return m.renderChat()
}
}
// renderChat renders the main chat view.
func (m Model) renderChat() string {
var b strings.Builder
provider := m.providerName
if provider == "" {
provider = "None"
}
model := m.modelName
if model == "" {
model = "None"
}
header := headerStyle.Render(fmt.Sprintf("go-llm CLI %s",
providerBadgeStyle.Render(fmt.Sprintf("%s/%s", provider, model))))
b.WriteString(header)
b.WriteString("\n")
if m.viewportReady {
b.WriteString(m.viewport.View())
b.WriteString("\n")
}
if len(m.pendingImages) > 0 {
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s) attached]", len(m.pendingImages))))
b.WriteString("\n")
}
if m.err != nil {
b.WriteString(errorStyle.Render(" Error: " + m.err.Error()))
b.WriteString("\n")
}
if m.loading {
b.WriteString(loadingStyle.Render(" Thinking..."))
b.WriteString("\n")
}
inputBox := inputStyle.Render(m.input.View())
b.WriteString(inputBox)
b.WriteString("\n")
help := inputHelpStyle.Render("Enter: send | Ctrl+I: image | Ctrl+T: tools | Ctrl+P: provider | Ctrl+M: model | Ctrl+S: settings | Ctrl+N: new | Esc: quit")
b.WriteString(help)
return appStyle.Render(b.String())
}
// renderMessages renders all messages for the viewport.
func (m Model) renderMessages() string {
var b strings.Builder
if len(m.messages) == 0 {
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
b.WriteString("\n\n")
b.WriteString(lipgloss.NewStyle().Foreground(mutedColor).Render("Start a conversation by typing a message below."))
return b.String()
}
b.WriteString(systemMsgStyle.Render("[System] " + m.systemPrompt))
b.WriteString("\n\n")
for _, msg := range m.messages {
var content string
var style lipgloss.Style
switch msg.Role {
case llm.RoleUser:
style = userMsgStyle
label := roleLabelStyle.Foreground(secondaryColor).Render("[User]")
content = label + " " + msg.Content
if msg.Images > 0 {
content += imageIndicatorStyle.Render(fmt.Sprintf(" [%d image(s)]", msg.Images))
}
case llm.RoleAssistant:
style = assistantMsgStyle
label := roleLabelStyle.Foreground(lipgloss.Color("255")).Render("[Assistant]")
content = label + " " + msg.Content
case llm.Role("tool_call"):
style = toolCallStyle
content = " -> Calling: " + msg.Content
case llm.Role("tool_result"):
style = toolResultStyle
content = " <- Result: " + msg.Content
default:
style = assistantMsgStyle
content = msg.Content
}
b.WriteString(style.Render(content))
b.WriteString("\n\n")
}
return b.String()
}
// renderProviderSelect renders the provider selection view.
func (m Model) renderProviderSelect() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Select Provider"))
b.WriteString("\n\n")
for i, item := range m.listItems {
cursor := " "
style := normalItemStyle
if i == m.listIndex {
cursor = "> "
style = selectedItemStyle
}
b.WriteString(style.Render(cursor + item))
b.WriteString("\n")
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderAPIKeyInput renders the API key input view.
func (m Model) renderAPIKeyInput() string {
var b strings.Builder
provider := m.providers[m.listIndex]
b.WriteString(headerStyle.Render(fmt.Sprintf("Enter API Key for %s", provider.Info.DisplayName)))
b.WriteString("\n\n")
if provider.Info.EnvKey != "" {
b.WriteString(fmt.Sprintf("Environment variable: %s\n\n", provider.Info.EnvKey))
}
b.WriteString("Enter your API key below (it will be hidden):\n\n")
inputBox := inputStyle.Render(m.apiKeyInput.View())
b.WriteString(inputBox)
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter to confirm, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderModelSelect renders the model selection view.
func (m Model) renderModelSelect() string {
var b strings.Builder
b.WriteString(headerStyle.Render(fmt.Sprintf("Select Model (%s)", m.providerName)))
b.WriteString("\n\n")
for i, item := range m.listItems {
cursor := " "
style := normalItemStyle
if i == m.listIndex {
cursor = "> "
style = selectedItemStyle
}
if item == m.modelName {
item += " (current)"
}
b.WriteString(style.Render(cursor + item))
b.WriteString("\n")
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Use arrow keys or j/k to navigate, Enter to select, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderImageInput renders the image input view.
func (m Model) renderImageInput() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Add Image"))
b.WriteString("\n\n")
b.WriteString("Enter an image source:\n")
b.WriteString(" - File path (e.g., /path/to/image.png)\n")
b.WriteString(" - URL (e.g., https://example.com/image.jpg)\n")
b.WriteString(" - Base64 data or data URL\n\n")
if len(m.pendingImages) > 0 {
b.WriteString(imageIndicatorStyle.Render(fmt.Sprintf("Currently attached: %d image(s)\n\n", len(m.pendingImages))))
}
inputBox := inputStyle.Render(m.input.View())
b.WriteString(inputBox)
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter to add image, Esc to cancel"))
return appStyle.Render(b.String())
}
// renderToolsPanel renders the tools panel.
func (m Model) renderToolsPanel() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Tools / Function Calling"))
b.WriteString("\n\n")
status := "DISABLED"
statusStyle := errorStyle
if m.toolsEnabled {
status = "ENABLED"
statusStyle = lipgloss.NewStyle().Foreground(successColor).Bold(true)
}
b.WriteString(settingLabelStyle.Render("Tools Status:"))
b.WriteString(statusStyle.Render(status))
b.WriteString("\n\n")
b.WriteString("Available tools:\n")
if m.toolbox != nil {
for _, t := range m.toolbox.AllTools() {
b.WriteString(fmt.Sprintf(" - %s: %s\n", selectedItemStyle.Render(t.Name), t.Description))
}
}
b.WriteString("\n")
b.WriteString(helpStyle.Render("Press 't' to toggle tools, Enter or 'q' to close"))
return appStyle.Render(b.String())
}
// renderSettings renders the settings view.
func (m Model) renderSettings() string {
var b strings.Builder
b.WriteString(headerStyle.Render("Settings"))
b.WriteString("\n\n")
tempStr := "default"
if m.temperature != nil {
tempStr = fmt.Sprintf("%.1f", *m.temperature)
}
b.WriteString(settingLabelStyle.Render("Temperature:"))
b.WriteString(settingValueStyle.Render(tempStr))
b.WriteString("\n\n")
b.WriteString("Press a key to set temperature:\n")
b.WriteString(" 1 - Default (model decides)\n")
b.WriteString(" 2 - 0.0 (deterministic)\n")
b.WriteString(" 3 - 0.5 (balanced)\n")
b.WriteString(" 4 - 0.7 (creative)\n")
b.WriteString(" 5 - 1.0 (very creative)\n")
b.WriteString("\n")
b.WriteString(settingLabelStyle.Render("System Prompt:"))
b.WriteString("\n")
b.WriteString(settingValueStyle.Render(" " + m.systemPrompt))
b.WriteString("\n\n")
b.WriteString(helpStyle.Render("Enter or 'q' to close"))
return appStyle.Render(b.String())
}