go-llm/request.go

49 lines
1.3 KiB
Go

package go_llm
import (
"github.com/openai/openai-go"
)
type rawAble interface {
toRaw() map[string]any
fromRaw(raw map[string]any) Input
}
type Input interface {
toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion
}
type Request struct {
Conversation []Input
Messages []Message
Toolbox ToolBox
Temperature *float64
}
// NextRequest will take the current request's conversation, messages, the response, and any tool results, and
// return a new request with the conversation updated to include the response and tool results.
func (req Request) NextRequest(resp ResponseChoice, toolResults []ToolCallResponse) Request {
var res Request
res.Toolbox = req.Toolbox
res.Temperature = req.Temperature
res.Conversation = make([]Input, len(req.Conversation))
copy(res.Conversation, req.Conversation)
// now for every input message, convert those to an Input to add to the conversation
for _, msg := range req.Messages {
res.Conversation = append(res.Conversation, msg)
}
if resp.Content != "" || resp.Refusal != "" || len(resp.Calls) > 0 {
res.Conversation = append(res.Conversation, resp)
}
// if there are tool results, then we need to add those to the conversation
for _, result := range toolResults {
res.Conversation = append(res.Conversation, result)
}
return res
}