Refactor entire system to be more contextual so that conversation flow can be more easily managed
This commit is contained in:
46
openai.go
46
openai.go
@@ -3,6 +3,7 @@ package go_llm
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
oai "github.com/sashabaranov/go-openai"
|
||||
@@ -15,47 +16,17 @@ type openaiImpl struct {
|
||||
|
||||
var _ LLM = openaiImpl{}
|
||||
|
||||
func (o openaiImpl) requestToOpenAIRequest(request Request) oai.ChatCompletionRequest {
|
||||
func (o openaiImpl) newRequestToOpenAIRequest(request Request) oai.ChatCompletionRequest {
|
||||
res := oai.ChatCompletionRequest{
|
||||
Model: o.model,
|
||||
}
|
||||
|
||||
for _, i := range request.Conversation {
|
||||
res.Messages = append(res.Messages, i.toChatCompletionMessages()...)
|
||||
}
|
||||
|
||||
for _, msg := range request.Messages {
|
||||
m := oai.ChatCompletionMessage{
|
||||
Content: msg.Text,
|
||||
Role: string(msg.Role),
|
||||
Name: msg.Name,
|
||||
}
|
||||
|
||||
for _, img := range msg.Images {
|
||||
if img.Base64 != "" {
|
||||
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
|
||||
Type: "image_url",
|
||||
ImageURL: &oai.ChatMessageImageURL{
|
||||
URL: fmt.Sprintf("data:%s;base64,%s", img.ContentType, img.Base64),
|
||||
},
|
||||
})
|
||||
} else if img.Url != "" {
|
||||
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
|
||||
Type: "image_url",
|
||||
ImageURL: &oai.ChatMessageImageURL{
|
||||
URL: img.Url,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// openai does not allow Content and MultiContent to be set at the same time, so we need to check
|
||||
if len(m.MultiContent) > 0 && m.Content != "" {
|
||||
m.MultiContent = append([]oai.ChatMessagePart{{
|
||||
Type: "text",
|
||||
Text: m.Content,
|
||||
}}, m.MultiContent...)
|
||||
|
||||
m.Content = ""
|
||||
}
|
||||
|
||||
res.Messages = append(res.Messages, m)
|
||||
res.Messages = append(res.Messages, msg.toChatCompletionMessages()...)
|
||||
}
|
||||
|
||||
if request.Toolbox != nil {
|
||||
@@ -130,8 +101,9 @@ func (o openaiImpl) responseToLLMResponse(response oai.ChatCompletionResponse) R
|
||||
func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response, error) {
|
||||
cl := oai.NewClient(o.key)
|
||||
|
||||
req := o.requestToOpenAIRequest(request)
|
||||
req := o.newRequestToOpenAIRequest(request)
|
||||
|
||||
slog.Info("openaiImpl.ChatComplete", "req", fmt.Sprintf("%#v", req))
|
||||
resp, err := cl.CreateChatCompletion(ctx, req)
|
||||
|
||||
fmt.Println("resp:", fmt.Sprintf("%#v", resp))
|
||||
|
Reference in New Issue
Block a user