// Package ollama implements the go-llm v2 provider interface for Ollama // (https://ollama.com), a local model runner that exposes an OpenAI Chat // Completions-compatible endpoint. No API key is required; capability depends // on whichever model the user has pulled locally, so Rules are intentionally // empty — we trust the local user. package ollama import ( "gitea.stevedudenhoeffer.com/steve/go-llm/v2/openaicompat" ) // DefaultBaseURL points at a local Ollama instance with default port. const DefaultBaseURL = "http://localhost:11434/v1" // Provider is a type alias over openaicompat.Provider. type Provider = openaicompat.Provider // New creates a new Ollama provider. An empty baseURL uses DefaultBaseURL. // Ollama ignores the API key; callers may pass "". func New(apiKey, baseURL string) *Provider { if baseURL == "" { baseURL = DefaultBaseURL } return openaicompat.New(apiKey, baseURL, openaicompat.Rules{}) }