// Package groq implements the go-llm v2 provider interface for Groq // (https://console.groq.com). Groq hosts open-source models behind an OpenAI // Chat Completions-compatible endpoint, so this package is a thin wrapper over // openaicompat with its own defaults and per-model Rules. package groq import ( "strings" "gitea.stevedudenhoeffer.com/steve/go-llm/v2/openaicompat" ) // DefaultBaseURL is the public Groq OpenAI-compatible endpoint. const DefaultBaseURL = "https://api.groq.com/openai/v1" // Provider is a type alias over openaicompat.Provider. type Provider = openaicompat.Provider // New creates a new Groq provider. An empty baseURL uses DefaultBaseURL. func New(apiKey, baseURL string) *Provider { if baseURL == "" { baseURL = DefaultBaseURL } return openaicompat.New(apiKey, baseURL, openaicompat.Rules{ // Only Groq-hosted vision variants (e.g. *-vision-preview) accept images. SupportsVision: func(m string) bool { return strings.Contains(m, "vision") }, // Chat completions endpoint does not accept audio input; audio is via // dedicated transcription endpoints, which go-llm doesn't cover here. SupportsAudio: func(string) bool { return false }, // Reasoning models hosted on Groq (DeepSeek R1 distill family, qwen // reasoning variants, gpt-oss) accept reasoning_effort. Vanilla // Llama / Mixtral don't. SupportsReasoning: func(m string) bool { return strings.Contains(m, "deepseek-r1") || strings.Contains(m, "qwen") || strings.Contains(m, "gpt-oss") }, }) }