Refactor: modularize and streamline LLM providers and utility functions

- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation.
- Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`).
- Replace `go_llm` package name with `llm`.
- Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`.
- Add helper methods and restructure message handling for better separation of concerns.
This commit is contained in:
2026-01-24 15:40:38 -05:00
parent be99af3597
commit bf7c86ab2a
18 changed files with 411 additions and 350 deletions

View File

@@ -1,9 +1,6 @@
package go_llm
import (
"github.com/openai/openai-go"
)
package llm
// ResponseChoice represents a single choice in a response.
type ResponseChoice struct {
Index int
Role Role
@@ -32,36 +29,6 @@ func (r ResponseChoice) toRaw() map[string]any {
return res
}
func (r ResponseChoice) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
var as openai.ChatCompletionAssistantMessageParam
if r.Name != "" {
as.Name = openai.String(r.Name)
}
if r.Refusal != "" {
as.Refusal = openai.String(r.Refusal)
}
if r.Content != "" {
as.Content.OfString = openai.String(r.Content)
}
for _, call := range r.Calls {
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
ID: call.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: call.FunctionCall.Name,
Arguments: call.FunctionCall.Arguments,
},
})
}
return []openai.ChatCompletionMessageParamUnion{
{
OfAssistant: &as,
},
}
}
func (r ResponseChoice) toInput() []Input {
var res []Input
@@ -79,6 +46,7 @@ func (r ResponseChoice) toInput() []Input {
return res
}
// Response represents a response from a language model.
type Response struct {
Choices []ResponseChoice
}