- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation. - Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`). - Replace `go_llm` package name with `llm`. - Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`. - Add helper methods and restructure message handling for better separation of concerns.
323 lines
7.9 KiB
Go
323 lines
7.9 KiB
Go
package llm
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"strings"
|
|
|
|
"github.com/openai/openai-go"
|
|
"github.com/openai/openai-go/option"
|
|
"github.com/openai/openai-go/packages/param"
|
|
"github.com/openai/openai-go/shared"
|
|
)
|
|
|
|
type openaiImpl struct {
|
|
key string
|
|
model string
|
|
baseUrl string
|
|
}
|
|
|
|
var _ LLM = openaiImpl{}
|
|
|
|
func (o openaiImpl) newRequestToOpenAIRequest(request Request) openai.ChatCompletionNewParams {
|
|
res := openai.ChatCompletionNewParams{
|
|
Model: o.model,
|
|
}
|
|
|
|
for _, i := range request.Conversation {
|
|
res.Messages = append(res.Messages, inputToChatCompletionMessages(i, o.model)...)
|
|
}
|
|
|
|
for _, msg := range request.Messages {
|
|
res.Messages = append(res.Messages, messageToChatCompletionMessages(msg, o.model)...)
|
|
}
|
|
|
|
for _, tool := range request.Toolbox.Functions() {
|
|
res.Tools = append(res.Tools, openai.ChatCompletionToolParam{
|
|
Type: "function",
|
|
Function: shared.FunctionDefinitionParam{
|
|
Name: tool.Name,
|
|
Description: openai.String(tool.Description),
|
|
Strict: openai.Bool(tool.Strict),
|
|
Parameters: tool.Parameters.OpenAIParameters(),
|
|
},
|
|
})
|
|
}
|
|
|
|
if request.Toolbox.RequiresTool() {
|
|
res.ToolChoice = openai.ChatCompletionToolChoiceOptionUnionParam{
|
|
OfAuto: openai.String("required"),
|
|
}
|
|
}
|
|
|
|
if request.Temperature != nil {
|
|
// these are known models that do not support custom temperatures
|
|
// all the o* models
|
|
// gpt-5* models
|
|
if !strings.HasPrefix(o.model, "o") && !strings.HasPrefix(o.model, "gpt-5") {
|
|
res.Temperature = openai.Float(*request.Temperature)
|
|
}
|
|
}
|
|
|
|
return res
|
|
}
|
|
|
|
func (o openaiImpl) responseToLLMResponse(response *openai.ChatCompletion) Response {
|
|
var res Response
|
|
|
|
if response == nil {
|
|
return res
|
|
}
|
|
|
|
if len(response.Choices) == 0 {
|
|
return res
|
|
}
|
|
|
|
for _, choice := range response.Choices {
|
|
var toolCalls []ToolCall
|
|
for _, call := range choice.Message.ToolCalls {
|
|
toolCall := ToolCall{
|
|
ID: call.ID,
|
|
FunctionCall: FunctionCall{
|
|
Name: call.Function.Name,
|
|
Arguments: strings.TrimSpace(call.Function.Arguments),
|
|
},
|
|
}
|
|
|
|
toolCalls = append(toolCalls, toolCall)
|
|
|
|
}
|
|
res.Choices = append(res.Choices, ResponseChoice{
|
|
Content: choice.Message.Content,
|
|
Role: Role(choice.Message.Role),
|
|
Refusal: choice.Message.Refusal,
|
|
Calls: toolCalls,
|
|
})
|
|
}
|
|
|
|
return res
|
|
}
|
|
|
|
func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response, error) {
|
|
var opts = []option.RequestOption{
|
|
option.WithAPIKey(o.key),
|
|
}
|
|
|
|
if o.baseUrl != "" {
|
|
opts = append(opts, option.WithBaseURL(o.baseUrl))
|
|
}
|
|
|
|
cl := openai.NewClient(opts...)
|
|
|
|
req := o.newRequestToOpenAIRequest(request)
|
|
|
|
resp, err := cl.Chat.Completions.New(ctx, req)
|
|
|
|
if err != nil {
|
|
return Response{}, fmt.Errorf("unhandled openai error: %w", err)
|
|
}
|
|
|
|
return o.responseToLLMResponse(resp), nil
|
|
}
|
|
|
|
func (o openaiImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
|
return openaiImpl{
|
|
key: o.key,
|
|
model: modelVersion,
|
|
baseUrl: o.baseUrl,
|
|
}, nil
|
|
}
|
|
|
|
// inputToChatCompletionMessages converts an Input to OpenAI chat completion messages.
|
|
func inputToChatCompletionMessages(input Input, model string) []openai.ChatCompletionMessageParamUnion {
|
|
switch v := input.(type) {
|
|
case Message:
|
|
return messageToChatCompletionMessages(v, model)
|
|
case ToolCall:
|
|
return toolCallToChatCompletionMessages(v)
|
|
case ToolCallResponse:
|
|
return toolCallResponseToChatCompletionMessages(v)
|
|
case ResponseChoice:
|
|
return responseChoiceToChatCompletionMessages(v)
|
|
default:
|
|
return nil
|
|
}
|
|
}
|
|
|
|
func messageToChatCompletionMessages(m Message, model string) []openai.ChatCompletionMessageParamUnion {
|
|
var res openai.ChatCompletionMessageParamUnion
|
|
|
|
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
|
|
var textContent param.Opt[string]
|
|
|
|
for _, img := range m.Images {
|
|
if img.Base64 != "" {
|
|
arrayOfContentParts = append(arrayOfContentParts,
|
|
openai.ChatCompletionContentPartUnionParam{
|
|
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
|
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
|
URL: "data:" + img.ContentType + ";base64," + img.Base64,
|
|
},
|
|
},
|
|
},
|
|
)
|
|
} else if img.Url != "" {
|
|
arrayOfContentParts = append(arrayOfContentParts,
|
|
openai.ChatCompletionContentPartUnionParam{
|
|
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
|
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
|
URL: img.Url,
|
|
},
|
|
},
|
|
},
|
|
)
|
|
}
|
|
}
|
|
|
|
if m.Text != "" {
|
|
if len(arrayOfContentParts) > 0 {
|
|
arrayOfContentParts = append(arrayOfContentParts,
|
|
openai.ChatCompletionContentPartUnionParam{
|
|
OfText: &openai.ChatCompletionContentPartTextParam{
|
|
Text: "\n",
|
|
},
|
|
},
|
|
)
|
|
} else {
|
|
textContent = openai.String(m.Text)
|
|
}
|
|
}
|
|
|
|
a := strings.Split(model, "-")
|
|
|
|
useSystemInsteadOfDeveloper := true
|
|
if len(a) > 1 && a[0][0] == 'o' {
|
|
useSystemInsteadOfDeveloper = false
|
|
}
|
|
|
|
switch m.Role {
|
|
case RoleSystem:
|
|
if useSystemInsteadOfDeveloper {
|
|
res = openai.ChatCompletionMessageParamUnion{
|
|
OfSystem: &openai.ChatCompletionSystemMessageParam{
|
|
Content: openai.ChatCompletionSystemMessageParamContentUnion{
|
|
OfString: textContent,
|
|
},
|
|
},
|
|
}
|
|
} else {
|
|
res = openai.ChatCompletionMessageParamUnion{
|
|
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
|
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
|
|
OfString: textContent,
|
|
},
|
|
},
|
|
}
|
|
}
|
|
|
|
case RoleUser:
|
|
var name param.Opt[string]
|
|
if m.Name != "" {
|
|
name = openai.String(m.Name)
|
|
}
|
|
|
|
res = openai.ChatCompletionMessageParamUnion{
|
|
OfUser: &openai.ChatCompletionUserMessageParam{
|
|
Name: name,
|
|
Content: openai.ChatCompletionUserMessageParamContentUnion{
|
|
OfString: textContent,
|
|
OfArrayOfContentParts: arrayOfContentParts,
|
|
},
|
|
},
|
|
}
|
|
|
|
case RoleAssistant:
|
|
var name param.Opt[string]
|
|
if m.Name != "" {
|
|
name = openai.String(m.Name)
|
|
}
|
|
|
|
res = openai.ChatCompletionMessageParamUnion{
|
|
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
|
Name: name,
|
|
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
|
|
OfString: textContent,
|
|
},
|
|
},
|
|
}
|
|
}
|
|
|
|
return []openai.ChatCompletionMessageParamUnion{res}
|
|
}
|
|
|
|
func toolCallToChatCompletionMessages(t ToolCall) []openai.ChatCompletionMessageParamUnion {
|
|
return []openai.ChatCompletionMessageParamUnion{{
|
|
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
|
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
|
|
{
|
|
ID: t.ID,
|
|
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
|
Name: t.FunctionCall.Name,
|
|
Arguments: t.FunctionCall.Arguments,
|
|
},
|
|
},
|
|
},
|
|
},
|
|
}}
|
|
}
|
|
|
|
func toolCallResponseToChatCompletionMessages(t ToolCallResponse) []openai.ChatCompletionMessageParamUnion {
|
|
var refusal string
|
|
if t.Error != nil {
|
|
refusal = t.Error.Error()
|
|
}
|
|
|
|
result := t.Result
|
|
if refusal != "" {
|
|
if result != "" {
|
|
result = fmt.Sprint(result) + " (error in execution: " + refusal + ")"
|
|
} else {
|
|
result = "error in execution:" + refusal
|
|
}
|
|
}
|
|
|
|
return []openai.ChatCompletionMessageParamUnion{{
|
|
OfTool: &openai.ChatCompletionToolMessageParam{
|
|
ToolCallID: t.ID,
|
|
Content: openai.ChatCompletionToolMessageParamContentUnion{
|
|
OfString: openai.String(fmt.Sprint(result)),
|
|
},
|
|
},
|
|
}}
|
|
}
|
|
|
|
func responseChoiceToChatCompletionMessages(r ResponseChoice) []openai.ChatCompletionMessageParamUnion {
|
|
var as openai.ChatCompletionAssistantMessageParam
|
|
|
|
if r.Name != "" {
|
|
as.Name = openai.String(r.Name)
|
|
}
|
|
if r.Refusal != "" {
|
|
as.Refusal = openai.String(r.Refusal)
|
|
}
|
|
|
|
if r.Content != "" {
|
|
as.Content.OfString = openai.String(r.Content)
|
|
}
|
|
|
|
for _, call := range r.Calls {
|
|
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
|
|
ID: call.ID,
|
|
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
|
Name: call.FunctionCall.Name,
|
|
Arguments: call.FunctionCall.Arguments,
|
|
},
|
|
})
|
|
}
|
|
return []openai.ChatCompletionMessageParamUnion{
|
|
{
|
|
OfAssistant: &as,
|
|
},
|
|
}
|
|
}
|