2024-10-06 21:02:26 -04:00
|
|
|
package go_llm
|
2024-10-06 20:01:01 -04:00
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
oai "github.com/sashabaranov/go-openai"
|
|
|
|
)
|
|
|
|
|
|
|
|
type openai struct {
|
|
|
|
key string
|
|
|
|
model string
|
|
|
|
}
|
|
|
|
|
|
|
|
var _ LLM = openai{}
|
|
|
|
|
|
|
|
func (o openai) requestToOpenAIRequest(request Request) oai.ChatCompletionRequest {
|
|
|
|
res := oai.ChatCompletionRequest{
|
|
|
|
Model: o.model,
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, msg := range request.Messages {
|
2024-10-06 21:02:26 -04:00
|
|
|
m := oai.ChatCompletionMessage{
|
2024-10-06 20:01:01 -04:00
|
|
|
Content: msg.Text,
|
|
|
|
Role: string(msg.Role),
|
|
|
|
Name: msg.Name,
|
2024-10-06 21:02:26 -04:00
|
|
|
}
|
|
|
|
|
2024-10-07 16:33:57 -04:00
|
|
|
for _, img := range msg.Images {
|
|
|
|
if img.Base64 != "" {
|
|
|
|
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
|
|
|
|
Type: "image_url",
|
|
|
|
ImageURL: &oai.ChatMessageImageURL{
|
|
|
|
URL: fmt.Sprintf("data:%s;base64,%s", img.ContentType, img.Base64),
|
|
|
|
},
|
|
|
|
})
|
|
|
|
} else if img.Url != "" {
|
|
|
|
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
|
|
|
|
Type: "image_url",
|
|
|
|
ImageURL: &oai.ChatMessageImageURL{
|
|
|
|
URL: img.Url,
|
|
|
|
},
|
|
|
|
})
|
2024-10-06 21:02:26 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
res.Messages = append(res.Messages, m)
|
2024-10-06 20:01:01 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, tool := range request.Toolbox {
|
|
|
|
res.Tools = append(res.Tools, oai.Tool{
|
|
|
|
Type: "function",
|
|
|
|
Function: &oai.FunctionDefinition{
|
|
|
|
Name: tool.Name,
|
|
|
|
Description: tool.Description,
|
|
|
|
Strict: tool.Strict,
|
|
|
|
Parameters: tool.Parameters,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-10-06 21:02:26 -04:00
|
|
|
if request.Temperature != nil {
|
|
|
|
res.Temperature = *request.Temperature
|
|
|
|
}
|
|
|
|
|
2024-10-06 20:01:01 -04:00
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o openai) responseToLLMResponse(response oai.ChatCompletionResponse) Response {
|
|
|
|
res := Response{}
|
|
|
|
|
|
|
|
for _, choice := range response.Choices {
|
|
|
|
var tools []ToolCall
|
|
|
|
for _, call := range choice.Message.ToolCalls {
|
|
|
|
toolCall := ToolCall{
|
|
|
|
ID: call.ID,
|
|
|
|
FunctionCall: FunctionCall{
|
|
|
|
Name: call.Function.Name,
|
|
|
|
Arguments: call.Function.Arguments,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
tools = append(tools, toolCall)
|
|
|
|
|
|
|
|
}
|
|
|
|
res.Choices = append(res.Choices, ResponseChoice{
|
|
|
|
Content: choice.Message.Content,
|
|
|
|
Role: Role(choice.Message.Role),
|
|
|
|
Name: choice.Message.Name,
|
|
|
|
Refusal: choice.Message.Refusal,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o openai) ChatComplete(ctx context.Context, request Request) (Response, error) {
|
|
|
|
cl := oai.NewClient(o.key)
|
|
|
|
|
|
|
|
req := o.requestToOpenAIRequest(request)
|
|
|
|
|
|
|
|
resp, err := cl.CreateChatCompletion(ctx, req)
|
|
|
|
|
|
|
|
fmt.Println("resp:", fmt.Sprintf("%#v", resp))
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return Response{}, fmt.Errorf("unhandled openai error: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return o.responseToLLMResponse(resp), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (o openai) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
|
|
|
return openai{
|
|
|
|
key: o.key,
|
|
|
|
model: modelVersion,
|
|
|
|
}, nil
|
|
|
|
}
|