Add LLM parsing functionality

Introduce `Providers` struct to handle different language model providers. Implement `Parse` method to extract and validate provider/model from input string, then return a chat completion interface. Add error handling for invalid formats or unknown providers.
This commit is contained in:
Steve Dudenhoeffer 2025-05-01 22:11:23 -04:00
parent 39ffb82237
commit e9baf7910e

50
parse.go Normal file
View File

@ -0,0 +1,50 @@
package go_llm
import (
"strings"
)
// Providers are the allowed shortcuts in the providers, e.g.: if you set { "openai": OpenAI("key") } that'll allow
// for the "openai" provider to be used when parsed.
type Providers map[string]LLM
// Parse will parse the provided input and attempt to return a LLM chat completion interface.
// Input should be in the provided format:
// - provider/modelname
//
// where provider is a key inside Providers, and the modelname being passed to the LLM interface's GetModel
func (providers Providers) Parse(input string) ChatCompletion {
sections := strings.Split(input, "/")
var provider LLM
var ok bool
var modelVersion string
if len(sections) < 2 {
// is there a default provider?
provider, ok = providers["default"]
if !ok {
panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback")
}
modelVersion = sections[0]
} else {
provider, ok = providers[sections[0]]
modelVersion = sections[1]
}
if !ok {
panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback")
}
if provider == nil {
panic("unknown provider: " + sections[0])
}
res, err := provider.ModelVersion(modelVersion)
if err != nil {
panic(err)
}
return res
}