Files
go-llm/v2/constructors.go
T
steve 34119e5a00
CI / Root Module (push) Failing after 30s
CI / Lint (push) Failing after 50s
CI / V2 Module (push) Successful in 2m14s
feat: add DeepSeek, Moonshot, xAI, Groq, Ollama; drop v1; migrate TUI to v2
Five OpenAI-compatible providers join the library as first-class constructors
(llm.DeepSeek, llm.Moonshot, llm.XAI, llm.Groq, llm.Ollama). Their wire-level
implementation is shared via a new v2/openaicompat package which is the
extracted guts of the old v2/openai provider; each provider supplies its own
Rules value to declare per-model constraints (e.g., DeepSeek Reasoner rejects
tools and temperature, Moonshot/xAI accept images only on *-vision* models,
Groq rejects audio input). v2/openai itself becomes a thin wrapper that sets
RestrictTemperature for o-series and gpt-5 models.

A new provider registry (v2/registry.go) exposes llm.Providers() and drives
the TUI's provider picker so adding a provider in future is a single-file
change.

The TUI at cmd/llm was migrated from v1 to v2 and moved to v2/cmd/llm. With
nothing else depending on v1, the v1 code at the repo root (all .go files,
schema/, internal/, provider/, root go.mod/go.sum) is deleted.

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-24 13:34:39 +00:00

120 lines
3.2 KiB
Go

package llm
import (
anthProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/anthropic"
deepseekProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/deepseek"
googleProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/google"
groqProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/groq"
moonshotProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/moonshot"
ollamaProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/ollama"
openaiProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/openai"
xaiProvider "gitea.stevedudenhoeffer.com/steve/go-llm/v2/xai"
)
// OpenAI creates an OpenAI client.
//
// Example:
//
// model := llm.OpenAI("sk-...").Model("gpt-4o")
func OpenAI(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(openaiProvider.New(apiKey, cfg.baseURL))
}
// Anthropic creates an Anthropic client.
//
// Example:
//
// model := llm.Anthropic("sk-ant-...").Model("claude-sonnet-4-20250514")
func Anthropic(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
_ = cfg // Anthropic doesn't support custom base URL in the SDK
return NewClient(anthProvider.New(apiKey))
}
// Google creates a Google (Gemini) client.
//
// Example:
//
// model := llm.Google("...").Model("gemini-2.0-flash")
func Google(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
_ = cfg // Google doesn't support custom base URL in the SDK
return NewClient(googleProvider.New(apiKey))
}
// DeepSeek creates a DeepSeek client (OpenAI-compatible).
//
// Example:
//
// model := llm.DeepSeek("sk-...").Model("deepseek-chat")
func DeepSeek(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(deepseekProvider.New(apiKey, cfg.baseURL))
}
// Moonshot creates a Moonshot AI (Kimi) client (OpenAI-compatible).
//
// Example:
//
// model := llm.Moonshot("sk-...").Model("kimi-k2-0711-preview")
func Moonshot(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(moonshotProvider.New(apiKey, cfg.baseURL))
}
// XAI creates an xAI (Grok) client (OpenAI-compatible).
//
// Example:
//
// model := llm.XAI("xai-...").Model("grok-2")
func XAI(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(xaiProvider.New(apiKey, cfg.baseURL))
}
// Groq creates a Groq client (OpenAI-compatible).
//
// Example:
//
// model := llm.Groq("gsk-...").Model("llama-3.3-70b-versatile")
func Groq(apiKey string, opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(groqProvider.New(apiKey, cfg.baseURL))
}
// Ollama creates a client for a local Ollama instance (OpenAI-compatible).
// No API key is required. Use WithBaseURL to point at a non-default host/port.
//
// Example:
//
// model := llm.Ollama().Model("llama3.2")
func Ollama(opts ...ClientOption) *Client {
cfg := &clientConfig{}
for _, opt := range opts {
opt(cfg)
}
return NewClient(ollamaProvider.New("", cfg.baseURL))
}