go-llm/response.go
Steve Dudenhoeffer 2737a5b2be Refactor response handling for clarity and consistency.
Simplified how responses and tool calls are appended to conversations. Adjusted structure in message formatting to better align with tool call requirements, ensuring consistent data representation.
2025-03-17 00:18:32 -04:00

73 lines
1.4 KiB
Go

package go_llm
import "github.com/sashabaranov/go-openai"
type ResponseChoice struct {
Index int
Role Role
Content string
Refusal string
Name string
Calls []ToolCall
}
func (r ResponseChoice) toRaw() map[string]any {
res := map[string]any{
"index": r.Index,
"role": r.Role,
"content": r.Content,
"refusal": r.Refusal,
"name": r.Name,
}
calls := make([]map[string]any, 0, len(r.Calls))
for _, call := range r.Calls {
calls = append(calls, call.toRaw())
}
res["tool_calls"] = calls
return res
}
func (r ResponseChoice) toChatCompletionMessages() []openai.ChatCompletionMessage {
var res = openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: r.Content,
Refusal: r.Refusal,
}
for _, call := range r.Calls {
res.ToolCalls = append(res.ToolCalls, openai.ToolCall{
ID: call.ID,
Type: openai.ToolTypeFunction,
Function: openai.FunctionCall{
Name: call.FunctionCall.Name,
Arguments: call.FunctionCall.Arguments,
},
})
}
return []openai.ChatCompletionMessage{res}
}
func (r ResponseChoice) toInput() []Input {
var res []Input
for _, call := range r.Calls {
res = append(res, call)
}
if r.Content != "" || r.Refusal != "" {
res = append(res, Message{
Role: RoleAssistant,
Text: r.Content,
})
}
return res
}
type Response struct {
Choices []ResponseChoice
}