Refactor: modularize and streamline LLM providers and utility functions
- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation. - Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`). - Replace `go_llm` package name with `llm`. - Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`. - Add helper methods and restructure message handling for better separation of concerns.
This commit is contained in:
272
llm.go
272
llm.go
@@ -1,286 +1,30 @@
|
||||
package go_llm
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/openai/openai-go"
|
||||
"github.com/openai/openai-go/packages/param"
|
||||
)
|
||||
|
||||
type Role string
|
||||
|
||||
const (
|
||||
RoleSystem Role = "system"
|
||||
RoleUser Role = "user"
|
||||
RoleAssistant Role = "assistant"
|
||||
)
|
||||
|
||||
type Image struct {
|
||||
Base64 string
|
||||
ContentType string
|
||||
Url string
|
||||
}
|
||||
|
||||
func (i Image) toRaw() map[string]any {
|
||||
res := map[string]any{
|
||||
"base64": i.Base64,
|
||||
"contenttype": i.ContentType,
|
||||
"url": i.Url,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (i *Image) fromRaw(raw map[string]any) Image {
|
||||
var res Image
|
||||
|
||||
res.Base64 = raw["base64"].(string)
|
||||
res.ContentType = raw["contenttype"].(string)
|
||||
res.Url = raw["url"].(string)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Role Role
|
||||
Name string
|
||||
Text string
|
||||
Images []Image
|
||||
}
|
||||
|
||||
func (m Message) toRaw() map[string]any {
|
||||
res := map[string]any{
|
||||
"role": m.Role,
|
||||
"name": m.Name,
|
||||
"text": m.Text,
|
||||
}
|
||||
|
||||
images := make([]map[string]any, 0, len(m.Images))
|
||||
for _, img := range m.Images {
|
||||
images = append(images, img.toRaw())
|
||||
}
|
||||
|
||||
res["images"] = images
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (m *Message) fromRaw(raw map[string]any) Message {
|
||||
var res Message
|
||||
|
||||
res.Role = Role(raw["role"].(string))
|
||||
res.Name = raw["name"].(string)
|
||||
res.Text = raw["text"].(string)
|
||||
|
||||
images := raw["images"].([]map[string]any)
|
||||
for _, img := range images {
|
||||
var i Image
|
||||
|
||||
res.Images = append(res.Images, i.fromRaw(img))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (m Message) toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion {
|
||||
var res openai.ChatCompletionMessageParamUnion
|
||||
|
||||
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
|
||||
var textContent param.Opt[string]
|
||||
|
||||
for _, img := range m.Images {
|
||||
if img.Base64 != "" {
|
||||
arrayOfContentParts = append(arrayOfContentParts,
|
||||
openai.ChatCompletionContentPartUnionParam{
|
||||
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
||||
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
||||
URL: "data:" + img.ContentType + ";base64," + img.Base64,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
} else if img.Url != "" {
|
||||
arrayOfContentParts = append(arrayOfContentParts,
|
||||
openai.ChatCompletionContentPartUnionParam{
|
||||
OfImageURL: &openai.ChatCompletionContentPartImageParam{
|
||||
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
|
||||
URL: img.Url,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if m.Text != "" {
|
||||
if len(arrayOfContentParts) > 0 {
|
||||
arrayOfContentParts = append(arrayOfContentParts,
|
||||
openai.ChatCompletionContentPartUnionParam{
|
||||
OfText: &openai.ChatCompletionContentPartTextParam{
|
||||
Text: "\n",
|
||||
},
|
||||
},
|
||||
)
|
||||
} else {
|
||||
textContent = openai.String(m.Text)
|
||||
}
|
||||
}
|
||||
|
||||
a := strings.Split(model, "-")
|
||||
|
||||
useSystemInsteadOfDeveloper := true
|
||||
if len(a) > 1 && a[0][0] == 'o' {
|
||||
useSystemInsteadOfDeveloper = false
|
||||
}
|
||||
|
||||
switch m.Role {
|
||||
case RoleSystem:
|
||||
if useSystemInsteadOfDeveloper {
|
||||
res = openai.ChatCompletionMessageParamUnion{
|
||||
OfSystem: &openai.ChatCompletionSystemMessageParam{
|
||||
Content: openai.ChatCompletionSystemMessageParamContentUnion{
|
||||
OfString: textContent,
|
||||
},
|
||||
},
|
||||
}
|
||||
} else {
|
||||
res = openai.ChatCompletionMessageParamUnion{
|
||||
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
|
||||
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
|
||||
OfString: textContent,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case RoleUser:
|
||||
var name param.Opt[string]
|
||||
if m.Name != "" {
|
||||
name = openai.String(m.Name)
|
||||
}
|
||||
|
||||
res = openai.ChatCompletionMessageParamUnion{
|
||||
OfUser: &openai.ChatCompletionUserMessageParam{
|
||||
Name: name,
|
||||
Content: openai.ChatCompletionUserMessageParamContentUnion{
|
||||
OfString: textContent,
|
||||
OfArrayOfContentParts: arrayOfContentParts,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
case RoleAssistant:
|
||||
var name param.Opt[string]
|
||||
if m.Name != "" {
|
||||
name = openai.String(m.Name)
|
||||
}
|
||||
|
||||
res = openai.ChatCompletionMessageParamUnion{
|
||||
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
||||
Name: name,
|
||||
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
|
||||
OfString: textContent,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return []openai.ChatCompletionMessageParamUnion{res}
|
||||
}
|
||||
|
||||
type ToolCall struct {
|
||||
ID string
|
||||
FunctionCall FunctionCall
|
||||
}
|
||||
|
||||
func (t ToolCall) toRaw() map[string]any {
|
||||
res := map[string]any{
|
||||
"id": t.ID,
|
||||
}
|
||||
|
||||
res["function"] = t.FunctionCall.toRaw()
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (t ToolCall) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
|
||||
return []openai.ChatCompletionMessageParamUnion{{
|
||||
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
|
||||
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
|
||||
{
|
||||
ID: t.ID,
|
||||
Function: openai.ChatCompletionMessageToolCallFunctionParam{
|
||||
Name: t.FunctionCall.Name,
|
||||
Arguments: t.FunctionCall.Arguments,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}}
|
||||
}
|
||||
|
||||
type ToolCallResponse struct {
|
||||
ID string
|
||||
Result any
|
||||
Error error
|
||||
}
|
||||
|
||||
func (t ToolCallResponse) toRaw() map[string]any {
|
||||
res := map[string]any{
|
||||
"id": t.ID,
|
||||
"result": t.Result,
|
||||
}
|
||||
|
||||
if t.Error != nil {
|
||||
res["error"] = t.Error.Error()
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (t ToolCallResponse) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
|
||||
var refusal string
|
||||
if t.Error != nil {
|
||||
refusal = t.Error.Error()
|
||||
}
|
||||
|
||||
if refusal != "" {
|
||||
if t.Result != "" {
|
||||
t.Result = fmt.Sprint(t.Result) + " (error in execution: " + refusal + ")"
|
||||
} else {
|
||||
t.Result = "error in execution:" + refusal
|
||||
}
|
||||
}
|
||||
|
||||
return []openai.ChatCompletionMessageParamUnion{{
|
||||
OfTool: &openai.ChatCompletionToolMessageParam{
|
||||
ToolCallID: t.ID,
|
||||
Content: openai.ChatCompletionToolMessageParamContentUnion{
|
||||
OfString: openai.String(fmt.Sprint(t.Result)),
|
||||
},
|
||||
},
|
||||
}}
|
||||
}
|
||||
|
||||
// ChatCompletion is the interface for chat completion.
|
||||
type ChatCompletion interface {
|
||||
ChatComplete(ctx context.Context, req Request) (Response, error)
|
||||
}
|
||||
|
||||
// LLM is the interface for language model providers.
|
||||
type LLM interface {
|
||||
ModelVersion(modelVersion string) (ChatCompletion, error)
|
||||
}
|
||||
|
||||
// OpenAI creates a new OpenAI LLM provider with the given API key.
|
||||
func OpenAI(key string) LLM {
|
||||
return openaiImpl{key: key}
|
||||
}
|
||||
|
||||
// Anthropic creates a new Anthropic LLM provider with the given API key.
|
||||
func Anthropic(key string) LLM {
|
||||
return anthropic{key: key}
|
||||
return anthropicImpl{key: key}
|
||||
}
|
||||
|
||||
// Google creates a new Google LLM provider with the given API key.
|
||||
func Google(key string) LLM {
|
||||
return google{key: key}
|
||||
return googleImpl{key: key}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user