Refactor entire system to be more contextual so that conversation flow can be more easily managed

This commit is contained in:
2025-03-16 22:38:58 -04:00
parent 0d909edd44
commit 7f5e34e437
9 changed files with 377 additions and 61 deletions

View File

@@ -3,6 +3,7 @@ package go_llm
import (
"context"
"fmt"
"log/slog"
"strings"
oai "github.com/sashabaranov/go-openai"
@@ -15,47 +16,17 @@ type openaiImpl struct {
var _ LLM = openaiImpl{}
func (o openaiImpl) requestToOpenAIRequest(request Request) oai.ChatCompletionRequest {
func (o openaiImpl) newRequestToOpenAIRequest(request Request) oai.ChatCompletionRequest {
res := oai.ChatCompletionRequest{
Model: o.model,
}
for _, i := range request.Conversation {
res.Messages = append(res.Messages, i.toChatCompletionMessages()...)
}
for _, msg := range request.Messages {
m := oai.ChatCompletionMessage{
Content: msg.Text,
Role: string(msg.Role),
Name: msg.Name,
}
for _, img := range msg.Images {
if img.Base64 != "" {
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
Type: "image_url",
ImageURL: &oai.ChatMessageImageURL{
URL: fmt.Sprintf("data:%s;base64,%s", img.ContentType, img.Base64),
},
})
} else if img.Url != "" {
m.MultiContent = append(m.MultiContent, oai.ChatMessagePart{
Type: "image_url",
ImageURL: &oai.ChatMessageImageURL{
URL: img.Url,
},
})
}
}
// openai does not allow Content and MultiContent to be set at the same time, so we need to check
if len(m.MultiContent) > 0 && m.Content != "" {
m.MultiContent = append([]oai.ChatMessagePart{{
Type: "text",
Text: m.Content,
}}, m.MultiContent...)
m.Content = ""
}
res.Messages = append(res.Messages, m)
res.Messages = append(res.Messages, msg.toChatCompletionMessages()...)
}
if request.Toolbox != nil {
@@ -130,8 +101,9 @@ func (o openaiImpl) responseToLLMResponse(response oai.ChatCompletionResponse) R
func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response, error) {
cl := oai.NewClient(o.key)
req := o.requestToOpenAIRequest(request)
req := o.newRequestToOpenAIRequest(request)
slog.Info("openaiImpl.ChatComplete", "req", fmt.Sprintf("%#v", req))
resp, err := cl.CreateChatCompletion(ctx, req)
fmt.Println("resp:", fmt.Sprintf("%#v", resp))