package go_llm import ( "strings" ) // Providers are the allowed shortcuts in the providers, e.g.: if you set { "openai": OpenAI("key") } that'll allow // for the "openai" provider to be used when parsed. type Providers map[string]LLM // Parse will parse the provided input and attempt to return a LLM chat completion interface. // Input should be in the provided format: // - provider/modelname // // where provider is a key inside Providers, and the modelname being passed to the LLM interface's GetModel func (providers Providers) Parse(input string) ChatCompletion { sections := strings.Split(input, "/") var provider LLM var ok bool var modelVersion string if len(sections) < 2 { // is there a default provider? provider, ok = providers["default"] if !ok { panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback") } modelVersion = sections[0] } else { provider, ok = providers[sections[0]] modelVersion = sections[1] } if !ok { panic("expected format: \"provider/model\" or provide a \"default\" provider to the Parse callback") } if provider == nil { panic("unknown provider: " + sections[0]) } res, err := provider.ModelVersion(modelVersion) if err != nil { panic(err) } return res }