55 lines
1.5 KiB
Go
55 lines
1.5 KiB
Go
package go_llm
|
|
|
|
import "github.com/sashabaranov/go-openai"
|
|
|
|
type rawAble interface {
|
|
toRaw() map[string]any
|
|
fromRaw(raw map[string]any) Input
|
|
}
|
|
|
|
type Input interface {
|
|
toChatCompletionMessages() []openai.ChatCompletionMessage
|
|
}
|
|
type Request struct {
|
|
Conversation []Input
|
|
Messages []Message
|
|
Toolbox *ToolBox
|
|
Temperature *float32
|
|
}
|
|
|
|
// NextRequest will take the current request's conversation, messages, the response, and any tool results, and
|
|
// return a new request with the conversation updated to include the response and tool results.
|
|
func (req Request) NextRequest(resp ResponseChoice, toolResults []ToolCallResponse) Request {
|
|
var res Request
|
|
|
|
res.Toolbox = req.Toolbox
|
|
res.Temperature = req.Temperature
|
|
|
|
res.Conversation = make([]Input, len(req.Conversation))
|
|
copy(res.Conversation, req.Conversation)
|
|
|
|
// now for every input message, convert those to an Input to add to the conversation
|
|
for _, msg := range req.Messages {
|
|
res.Conversation = append(res.Conversation, msg)
|
|
}
|
|
|
|
// if there are tool calls, then we need to add those to the conversation
|
|
for _, call := range resp.Calls {
|
|
res.Conversation = append(res.Conversation, call)
|
|
}
|
|
|
|
if resp.Content != "" || resp.Refusal != "" {
|
|
res.Conversation = append(res.Conversation, Message{
|
|
Role: RoleAssistant,
|
|
Text: resp.Content,
|
|
})
|
|
}
|
|
|
|
// if there are tool results, then we need to add those to the conversation
|
|
for _, result := range toolResults {
|
|
res.Conversation = append(res.Conversation, result)
|
|
}
|
|
|
|
return res
|
|
}
|