Files
go-llm/parse.go
Steve Dudenhoeffer bf7c86ab2a Refactor: modularize and streamline LLM providers and utility functions
- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation.
- Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`).
- Replace `go_llm` package name with `llm`.
- Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`.
- Add helper methods and restructure message handling for better separation of concerns.
2026-01-24 15:40:38 -05:00

51 lines
1.3 KiB
Go

package llm
import (
"strings"
)
// Providers are the allowed shortcuts in the providers, e.g.: if you set { "openai": OpenAI("key") } that'll allow
// for the "openai" provider to be used when parsed.
type Providers map[string]LLM
// Parse will parse the provided input and attempt to return a LLM chat completion interface.
// Input should be in the provided format:
// - provider/modelname
//
// where provider is a key inside Providers, and the modelname being passed to the LLM interface's GetModel
func (providers Providers) Parse(input string) ChatCompletion {
sections := strings.Split(input, "/")
var provider LLM
var ok bool
var modelVersion string
if len(sections) < 2 {
// is there a default provider?
provider, ok = providers["default"]
if !ok {
panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback")
}
modelVersion = sections[0]
} else {
provider, ok = providers[sections[0]]
modelVersion = sections[1]
}
if !ok {
panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback")
}
if provider == nil {
panic("unknown provider: " + sections[0])
}
res, err := provider.ModelVersion(modelVersion)
if err != nil {
panic(err)
}
return res
}