go-llm/llm.go
Steve Dudenhoeffer 7c9eb08cb4 Add support for integers and tool configuration in schema handling
This update introduces support for `jsonschema.Integer` types and updates the logic to handle nested items in schemas. Added a new default error log for unknown types using `slog.Error`. Also, integrated tool configuration with a `FunctionCallingConfig` when `dontRequireTool` is false.
2025-04-06 01:23:10 -04:00

287 lines
6.0 KiB
Go

package go_llm
import (
"context"
"fmt"
"strings"
"github.com/openai/openai-go"
"github.com/openai/openai-go/packages/param"
)
type Role string
const (
RoleSystem Role = "system"
RoleUser Role = "user"
RoleAssistant Role = "assistant"
)
type Image struct {
Base64 string
ContentType string
Url string
}
func (i Image) toRaw() map[string]any {
res := map[string]any{
"base64": i.Base64,
"contenttype": i.ContentType,
"url": i.Url,
}
return res
}
func (i *Image) fromRaw(raw map[string]any) Image {
var res Image
res.Base64 = raw["base64"].(string)
res.ContentType = raw["contenttype"].(string)
res.Url = raw["url"].(string)
return res
}
type Message struct {
Role Role
Name string
Text string
Images []Image
}
func (m Message) toRaw() map[string]any {
res := map[string]any{
"role": m.Role,
"name": m.Name,
"text": m.Text,
}
images := make([]map[string]any, 0, len(m.Images))
for _, img := range m.Images {
images = append(images, img.toRaw())
}
res["images"] = images
return res
}
func (m *Message) fromRaw(raw map[string]any) Message {
var res Message
res.Role = Role(raw["role"].(string))
res.Name = raw["name"].(string)
res.Text = raw["text"].(string)
images := raw["images"].([]map[string]any)
for _, img := range images {
var i Image
res.Images = append(res.Images, i.fromRaw(img))
}
return res
}
func (m Message) toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion {
var res openai.ChatCompletionMessageParamUnion
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
var textContent param.Opt[string]
for _, img := range m.Images {
if img.Base64 != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: "data:" + img.ContentType + ";base64," + img.Base64,
},
},
},
)
} else if img.Url != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: img.Url,
},
},
},
)
}
}
if m.Text != "" {
if len(arrayOfContentParts) > 0 {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfText: &openai.ChatCompletionContentPartTextParam{
Text: "\n",
},
},
)
} else {
textContent = openai.String(m.Text)
}
}
a := strings.Split(model, "-")
useSystemInsteadOfDeveloper := true
if len(a) > 1 && a[0][0] == 'o' {
useSystemInsteadOfDeveloper = false
}
switch m.Role {
case RoleSystem:
if useSystemInsteadOfDeveloper {
res = openai.ChatCompletionMessageParamUnion{
OfSystem: &openai.ChatCompletionSystemMessageParam{
Content: openai.ChatCompletionSystemMessageParamContentUnion{
OfString: textContent,
},
},
}
} else {
res = openai.ChatCompletionMessageParamUnion{
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
OfString: textContent,
},
},
}
}
case RoleUser:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfUser: &openai.ChatCompletionUserMessageParam{
Name: name,
Content: openai.ChatCompletionUserMessageParamContentUnion{
OfString: textContent,
OfArrayOfContentParts: arrayOfContentParts,
},
},
}
case RoleAssistant:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
Name: name,
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
OfString: textContent,
},
},
}
}
return []openai.ChatCompletionMessageParamUnion{res}
}
type ToolCall struct {
ID string
FunctionCall FunctionCall
}
func (t ToolCall) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
}
res["function"] = t.FunctionCall.toRaw()
return res
}
func (t ToolCall) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
return []openai.ChatCompletionMessageParamUnion{{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
{
ID: t.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: t.FunctionCall.Name,
Arguments: t.FunctionCall.Arguments,
},
},
},
},
}}
}
type ToolCallResponse struct {
ID string
Result any
Error error
}
func (t ToolCallResponse) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
"result": t.Result,
}
if t.Error != nil {
res["error"] = t.Error.Error()
}
return res
}
func (t ToolCallResponse) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
var refusal string
if t.Error != nil {
refusal = t.Error.Error()
}
if refusal != "" {
if t.Result != "" {
t.Result = fmt.Sprint(t.Result) + " (error in execution: " + refusal + ")"
} else {
t.Result = "error in execution:" + refusal
}
}
return []openai.ChatCompletionMessageParamUnion{{
OfTool: &openai.ChatCompletionToolMessageParam{
ToolCallID: t.ID,
Content: openai.ChatCompletionToolMessageParamContentUnion{
OfString: openai.String(fmt.Sprint(t.Result)),
},
},
}}
}
type ChatCompletion interface {
ChatComplete(ctx context.Context, req Request) (Response, error)
}
type LLM interface {
ModelVersion(modelVersion string) (ChatCompletion, error)
}
func OpenAI(key string) LLM {
return openaiImpl{key: key}
}
func Anthropic(key string) LLM {
return anthropic{key: key}
}
func Google(key string) LLM {
return google{key: key}
}