Refactor: modularize and streamline LLM providers and utility functions

- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation.
- Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`).
- Replace `go_llm` package name with `llm`.
- Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`.
- Add helper methods and restructure message handling for better separation of concerns.
This commit is contained in:
2026-01-24 15:40:38 -05:00
parent be99af3597
commit bf7c86ab2a
18 changed files with 411 additions and 350 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.claude
.idea

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"
@@ -10,19 +10,19 @@ import (
"log/slog"
"net/http"
"gitea.stevedudenhoeffer.com/steve/go-llm/utils"
"gitea.stevedudenhoeffer.com/steve/go-llm/internal/imageutil"
anth "github.com/liushuangls/go-anthropic/v2"
)
type anthropic struct {
type anthropicImpl struct {
key string
model string
}
var _ LLM = anthropic{}
var _ LLM = anthropicImpl{}
func (a anthropic) ModelVersion(modelVersion string) (ChatCompletion, error) {
func (a anthropicImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
a.model = modelVersion
// TODO: model verification?
@@ -36,7 +36,7 @@ func deferClose(c io.Closer) {
}
}
func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
func (a anthropicImpl) requestToAnthropicRequest(req Request) anth.MessagesRequest {
res := anth.MessagesRequest{
Model: anth.Model(a.model),
MaxTokens: 1000,
@@ -90,7 +90,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
// Check if image size exceeds 5MiB (5242880 bytes)
if len(raw) >= 5242880 {
compressed, mime, err := utils.CompressImage(img.Base64, 5*1024*1024)
compressed, mime, err := imageutil.CompressImage(img.Base64, 5*1024*1024)
// just replace the image with the compressed one
if err != nil {
@@ -157,7 +157,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
}
}
for _, tool := range req.Toolbox.functions {
for _, tool := range req.Toolbox.Functions() {
res.Tools = append(res.Tools, anth.ToolDefinition{
Name: tool.Name,
Description: tool.Description,
@@ -177,7 +177,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
return res
}
func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
func (a anthropicImpl) responseToLLMResponse(in anth.MessagesResponse) Response {
choice := ResponseChoice{}
for _, msg := range in.Content {
@@ -212,7 +212,7 @@ func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
}
}
func (a anthropic) ChatComplete(ctx context.Context, req Request) (Response, error) {
func (a anthropicImpl) ChatComplete(ctx context.Context, req Request) (Response, error) {
cl := anth.NewClient(a.key)
res, err := cl.CreateMessages(ctx, a.requestToAnthropicRequest(req))

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import "fmt"

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"reflect"

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"
@@ -11,22 +11,24 @@ import (
"google.golang.org/genai"
)
type google struct {
type googleImpl struct {
key string
model string
}
func (g google) ModelVersion(modelVersion string) (ChatCompletion, error) {
var _ LLM = googleImpl{}
func (g googleImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
g.model = modelVersion
return g, nil
}
func (g google) requestToContents(in Request) ([]*genai.Content, *genai.GenerateContentConfig) {
func (g googleImpl) requestToContents(in Request) ([]*genai.Content, *genai.GenerateContentConfig) {
var contents []*genai.Content
var cfg genai.GenerateContentConfig
for _, tool := range in.Toolbox.functions {
for _, tool := range in.Toolbox.Functions() {
cfg.Tools = append(cfg.Tools, &genai.Tool{
FunctionDeclarations: []*genai.FunctionDeclaration{
{
@@ -101,7 +103,7 @@ func (g google) requestToContents(in Request) ([]*genai.Content, *genai.Generate
return contents, &cfg
}
func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Response, error) {
func (g googleImpl) responseToLLMResponse(in *genai.GenerateContentResponse) (Response, error) {
res := Response{}
for _, c := range in.Candidates {
@@ -142,7 +144,7 @@ func (g google) responseToLLMResponse(in *genai.GenerateContentResponse) (Respon
return res, nil
}
func (g google) ChatComplete(ctx context.Context, req Request) (Response, error) {
func (g googleImpl) ChatComplete(ctx context.Context, req Request) (Response, error) {
cl, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: g.key,
Backend: genai.BackendGeminiAPI,

View File

@@ -1,4 +1,4 @@
package utils
package imageutil
import (
"bytes"
@@ -12,8 +12,8 @@ import (
"golang.org/x/image/draw"
)
// CompressImage takes a base64encoded image (JPEG, PNG or GIF) and returns
// a base64encoded version that is at most maxLength in size, or an error.
// CompressImage takes a base-64-encoded image (JPEG, PNG or GIF) and returns
// a base-64-encoded version that is at most maxLength in size, or an error.
func CompressImage(b64 string, maxLength int) (string, string, error) {
raw, err := base64.StdEncoding.DecodeString(b64)
if err != nil {
@@ -29,12 +29,12 @@ func CompressImage(b64 string, maxLength int) (string, string, error) {
case "image/gif":
return compressGIF(raw, maxLength)
default: // jpeg, png, webp, etc. treat as raster
default: // jpeg, png, webp, etc. -> treat as raster
return compressRaster(raw, maxLength)
}
}
// ---------- Raster path (jpeg / png / singleframe gif) ----------
// ---------- Raster path (jpeg / png / single-frame gif) ----------
func compressRaster(src []byte, maxLength int) (string, string, error) {
img, _, err := image.Decode(bytes.NewReader(src))
@@ -57,7 +57,7 @@ func compressRaster(src []byte, maxLength int) (string, string, error) {
continue
}
// downscale 80%
// down-scale 80%
b := img.Bounds()
if b.Dx() < 100 || b.Dy() < 100 {
return "", "", fmt.Errorf("cannot compress below %.02fMiB without destroying image", float64(maxLength)/1048576.0)
@@ -86,7 +86,7 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
return base64.StdEncoding.EncodeToString(buf.Bytes()), "image/gif", nil
}
// downscale every frame by 80%
// down-scale every frame by 80%
w, h := g.Config.Width, g.Config.Height
if w < 100 || h < 100 {
return "", "", fmt.Errorf("cannot compress animated GIF below 5 MiB without excessive quality loss")
@@ -94,7 +94,7 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
nw, nh := int(float64(w)*0.8), int(float64(h)*0.8)
for i, frm := range g.Image {
// convert paletted frame RGBA for scaling
// convert paletted frame -> RGBA for scaling
rgba := image.NewRGBA(frm.Bounds())
draw.Draw(rgba, rgba.Bounds(), frm, frm.Bounds().Min, draw.Src)
@@ -109,6 +109,6 @@ func compressGIF(src []byte, maxLength int) (string, string, error) {
g.Image[i] = paletted
}
g.Config.Width, g.Config.Height = nw, nh
// loop back and test size again
// loop back and test size again ...
}
}

272
llm.go
View File

@@ -1,286 +1,30 @@
package go_llm
package llm
import (
"context"
"fmt"
"strings"
"github.com/openai/openai-go"
"github.com/openai/openai-go/packages/param"
)
type Role string
const (
RoleSystem Role = "system"
RoleUser Role = "user"
RoleAssistant Role = "assistant"
)
type Image struct {
Base64 string
ContentType string
Url string
}
func (i Image) toRaw() map[string]any {
res := map[string]any{
"base64": i.Base64,
"contenttype": i.ContentType,
"url": i.Url,
}
return res
}
func (i *Image) fromRaw(raw map[string]any) Image {
var res Image
res.Base64 = raw["base64"].(string)
res.ContentType = raw["contenttype"].(string)
res.Url = raw["url"].(string)
return res
}
type Message struct {
Role Role
Name string
Text string
Images []Image
}
func (m Message) toRaw() map[string]any {
res := map[string]any{
"role": m.Role,
"name": m.Name,
"text": m.Text,
}
images := make([]map[string]any, 0, len(m.Images))
for _, img := range m.Images {
images = append(images, img.toRaw())
}
res["images"] = images
return res
}
func (m *Message) fromRaw(raw map[string]any) Message {
var res Message
res.Role = Role(raw["role"].(string))
res.Name = raw["name"].(string)
res.Text = raw["text"].(string)
images := raw["images"].([]map[string]any)
for _, img := range images {
var i Image
res.Images = append(res.Images, i.fromRaw(img))
}
return res
}
func (m Message) toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion {
var res openai.ChatCompletionMessageParamUnion
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
var textContent param.Opt[string]
for _, img := range m.Images {
if img.Base64 != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: "data:" + img.ContentType + ";base64," + img.Base64,
},
},
},
)
} else if img.Url != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: img.Url,
},
},
},
)
}
}
if m.Text != "" {
if len(arrayOfContentParts) > 0 {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfText: &openai.ChatCompletionContentPartTextParam{
Text: "\n",
},
},
)
} else {
textContent = openai.String(m.Text)
}
}
a := strings.Split(model, "-")
useSystemInsteadOfDeveloper := true
if len(a) > 1 && a[0][0] == 'o' {
useSystemInsteadOfDeveloper = false
}
switch m.Role {
case RoleSystem:
if useSystemInsteadOfDeveloper {
res = openai.ChatCompletionMessageParamUnion{
OfSystem: &openai.ChatCompletionSystemMessageParam{
Content: openai.ChatCompletionSystemMessageParamContentUnion{
OfString: textContent,
},
},
}
} else {
res = openai.ChatCompletionMessageParamUnion{
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
OfString: textContent,
},
},
}
}
case RoleUser:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfUser: &openai.ChatCompletionUserMessageParam{
Name: name,
Content: openai.ChatCompletionUserMessageParamContentUnion{
OfString: textContent,
OfArrayOfContentParts: arrayOfContentParts,
},
},
}
case RoleAssistant:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
Name: name,
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
OfString: textContent,
},
},
}
}
return []openai.ChatCompletionMessageParamUnion{res}
}
type ToolCall struct {
ID string
FunctionCall FunctionCall
}
func (t ToolCall) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
}
res["function"] = t.FunctionCall.toRaw()
return res
}
func (t ToolCall) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
return []openai.ChatCompletionMessageParamUnion{{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
{
ID: t.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: t.FunctionCall.Name,
Arguments: t.FunctionCall.Arguments,
},
},
},
},
}}
}
type ToolCallResponse struct {
ID string
Result any
Error error
}
func (t ToolCallResponse) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
"result": t.Result,
}
if t.Error != nil {
res["error"] = t.Error.Error()
}
return res
}
func (t ToolCallResponse) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
var refusal string
if t.Error != nil {
refusal = t.Error.Error()
}
if refusal != "" {
if t.Result != "" {
t.Result = fmt.Sprint(t.Result) + " (error in execution: " + refusal + ")"
} else {
t.Result = "error in execution:" + refusal
}
}
return []openai.ChatCompletionMessageParamUnion{{
OfTool: &openai.ChatCompletionToolMessageParam{
ToolCallID: t.ID,
Content: openai.ChatCompletionToolMessageParamContentUnion{
OfString: openai.String(fmt.Sprint(t.Result)),
},
},
}}
}
// ChatCompletion is the interface for chat completion.
type ChatCompletion interface {
ChatComplete(ctx context.Context, req Request) (Response, error)
}
// LLM is the interface for language model providers.
type LLM interface {
ModelVersion(modelVersion string) (ChatCompletion, error)
}
// OpenAI creates a new OpenAI LLM provider with the given API key.
func OpenAI(key string) LLM {
return openaiImpl{key: key}
}
// Anthropic creates a new Anthropic LLM provider with the given API key.
func Anthropic(key string) LLM {
return anthropic{key: key}
return anthropicImpl{key: key}
}
// Google creates a new Google LLM provider with the given API key.
func Google(key string) LLM {
return google{key: key}
return googleImpl{key: key}
}

115
message.go Normal file
View File

@@ -0,0 +1,115 @@
package llm
// Role represents the role of a message in a conversation.
type Role string
const (
RoleSystem Role = "system"
RoleUser Role = "user"
RoleAssistant Role = "assistant"
)
// Image represents an image that can be included in a message.
type Image struct {
Base64 string
ContentType string
Url string
}
func (i Image) toRaw() map[string]any {
res := map[string]any{
"base64": i.Base64,
"contenttype": i.ContentType,
"url": i.Url,
}
return res
}
func (i *Image) fromRaw(raw map[string]any) Image {
var res Image
res.Base64 = raw["base64"].(string)
res.ContentType = raw["contenttype"].(string)
res.Url = raw["url"].(string)
return res
}
// Message represents a message in a conversation.
type Message struct {
Role Role
Name string
Text string
Images []Image
}
func (m Message) toRaw() map[string]any {
res := map[string]any{
"role": m.Role,
"name": m.Name,
"text": m.Text,
}
images := make([]map[string]any, 0, len(m.Images))
for _, img := range m.Images {
images = append(images, img.toRaw())
}
res["images"] = images
return res
}
func (m *Message) fromRaw(raw map[string]any) Message {
var res Message
res.Role = Role(raw["role"].(string))
res.Name = raw["name"].(string)
res.Text = raw["text"].(string)
images := raw["images"].([]map[string]any)
for _, img := range images {
var i Image
res.Images = append(res.Images, i.fromRaw(img))
}
return res
}
// ToolCall represents a tool call made by an assistant.
type ToolCall struct {
ID string
FunctionCall FunctionCall
}
func (t ToolCall) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
}
res["function"] = t.FunctionCall.toRaw()
return res
}
// ToolCallResponse represents the response to a tool call.
type ToolCallResponse struct {
ID string
Result any
Error error
}
func (t ToolCallResponse) toRaw() map[string]any {
res := map[string]any{
"id": t.ID,
"result": t.Result,
}
if t.Error != nil {
res["error"] = t.Error.Error()
}
return res
}

210
openai.go
View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"
@@ -7,6 +7,7 @@ import (
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/openai/openai-go/packages/param"
"github.com/openai/openai-go/shared"
)
@@ -24,14 +25,14 @@ func (o openaiImpl) newRequestToOpenAIRequest(request Request) openai.ChatComple
}
for _, i := range request.Conversation {
res.Messages = append(res.Messages, i.toChatCompletionMessages(o.model)...)
res.Messages = append(res.Messages, inputToChatCompletionMessages(i, o.model)...)
}
for _, msg := range request.Messages {
res.Messages = append(res.Messages, msg.toChatCompletionMessages(o.model)...)
res.Messages = append(res.Messages, messageToChatCompletionMessages(msg, o.model)...)
}
for _, tool := range request.Toolbox.functions {
for _, tool := range request.Toolbox.Functions() {
res.Tools = append(res.Tools, openai.ChatCompletionToolParam{
Type: "function",
Function: shared.FunctionDefinitionParam{
@@ -111,10 +112,9 @@ func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response
req := o.newRequestToOpenAIRequest(request)
resp, err := cl.Chat.Completions.New(ctx, req)
//resp, err := cl.CreateChatCompletion(ctx, req)
if err != nil {
return Response{}, fmt.Errorf("unhandled openaiImpl error: %w", err)
return Response{}, fmt.Errorf("unhandled openai error: %w", err)
}
return o.responseToLLMResponse(resp), nil
@@ -122,7 +122,201 @@ func (o openaiImpl) ChatComplete(ctx context.Context, request Request) (Response
func (o openaiImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
return openaiImpl{
key: o.key,
model: modelVersion,
key: o.key,
model: modelVersion,
baseUrl: o.baseUrl,
}, nil
}
// inputToChatCompletionMessages converts an Input to OpenAI chat completion messages.
func inputToChatCompletionMessages(input Input, model string) []openai.ChatCompletionMessageParamUnion {
switch v := input.(type) {
case Message:
return messageToChatCompletionMessages(v, model)
case ToolCall:
return toolCallToChatCompletionMessages(v)
case ToolCallResponse:
return toolCallResponseToChatCompletionMessages(v)
case ResponseChoice:
return responseChoiceToChatCompletionMessages(v)
default:
return nil
}
}
func messageToChatCompletionMessages(m Message, model string) []openai.ChatCompletionMessageParamUnion {
var res openai.ChatCompletionMessageParamUnion
var arrayOfContentParts []openai.ChatCompletionContentPartUnionParam
var textContent param.Opt[string]
for _, img := range m.Images {
if img.Base64 != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: "data:" + img.ContentType + ";base64," + img.Base64,
},
},
},
)
} else if img.Url != "" {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfImageURL: &openai.ChatCompletionContentPartImageParam{
ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
URL: img.Url,
},
},
},
)
}
}
if m.Text != "" {
if len(arrayOfContentParts) > 0 {
arrayOfContentParts = append(arrayOfContentParts,
openai.ChatCompletionContentPartUnionParam{
OfText: &openai.ChatCompletionContentPartTextParam{
Text: "\n",
},
},
)
} else {
textContent = openai.String(m.Text)
}
}
a := strings.Split(model, "-")
useSystemInsteadOfDeveloper := true
if len(a) > 1 && a[0][0] == 'o' {
useSystemInsteadOfDeveloper = false
}
switch m.Role {
case RoleSystem:
if useSystemInsteadOfDeveloper {
res = openai.ChatCompletionMessageParamUnion{
OfSystem: &openai.ChatCompletionSystemMessageParam{
Content: openai.ChatCompletionSystemMessageParamContentUnion{
OfString: textContent,
},
},
}
} else {
res = openai.ChatCompletionMessageParamUnion{
OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
Content: openai.ChatCompletionDeveloperMessageParamContentUnion{
OfString: textContent,
},
},
}
}
case RoleUser:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfUser: &openai.ChatCompletionUserMessageParam{
Name: name,
Content: openai.ChatCompletionUserMessageParamContentUnion{
OfString: textContent,
OfArrayOfContentParts: arrayOfContentParts,
},
},
}
case RoleAssistant:
var name param.Opt[string]
if m.Name != "" {
name = openai.String(m.Name)
}
res = openai.ChatCompletionMessageParamUnion{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
Name: name,
Content: openai.ChatCompletionAssistantMessageParamContentUnion{
OfString: textContent,
},
},
}
}
return []openai.ChatCompletionMessageParamUnion{res}
}
func toolCallToChatCompletionMessages(t ToolCall) []openai.ChatCompletionMessageParamUnion {
return []openai.ChatCompletionMessageParamUnion{{
OfAssistant: &openai.ChatCompletionAssistantMessageParam{
ToolCalls: []openai.ChatCompletionMessageToolCallParam{
{
ID: t.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: t.FunctionCall.Name,
Arguments: t.FunctionCall.Arguments,
},
},
},
},
}}
}
func toolCallResponseToChatCompletionMessages(t ToolCallResponse) []openai.ChatCompletionMessageParamUnion {
var refusal string
if t.Error != nil {
refusal = t.Error.Error()
}
result := t.Result
if refusal != "" {
if result != "" {
result = fmt.Sprint(result) + " (error in execution: " + refusal + ")"
} else {
result = "error in execution:" + refusal
}
}
return []openai.ChatCompletionMessageParamUnion{{
OfTool: &openai.ChatCompletionToolMessageParam{
ToolCallID: t.ID,
Content: openai.ChatCompletionToolMessageParamContentUnion{
OfString: openai.String(fmt.Sprint(result)),
},
},
}}
}
func responseChoiceToChatCompletionMessages(r ResponseChoice) []openai.ChatCompletionMessageParamUnion {
var as openai.ChatCompletionAssistantMessageParam
if r.Name != "" {
as.Name = openai.String(r.Name)
}
if r.Refusal != "" {
as.Refusal = openai.String(r.Refusal)
}
if r.Content != "" {
as.Content.OfString = openai.String(r.Content)
}
for _, call := range r.Calls {
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
ID: call.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: call.FunctionCall.Name,
Arguments: call.FunctionCall.Arguments,
},
})
}
return []openai.ChatCompletionMessageParamUnion{
{
OfAssistant: &as,
},
}
}

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"strings"

View File

@@ -0,0 +1,11 @@
// Package anthropic provides the Anthropic LLM provider.
package anthropic
import (
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// New creates a new Anthropic LLM provider with the given API key.
func New(key string) llm.LLM {
return llm.Anthropic(key)
}

11
provider/google/google.go Normal file
View File

@@ -0,0 +1,11 @@
// Package google provides the Google LLM provider.
package google
import (
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// New creates a new Google LLM provider with the given API key.
func New(key string) llm.LLM {
return llm.Google(key)
}

11
provider/openai/openai.go Normal file
View File

@@ -0,0 +1,11 @@
// Package openai provides the OpenAI LLM provider.
package openai
import (
llm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// New creates a new OpenAI LLM provider with the given API key.
func New(key string) llm.LLM {
return llm.OpenAI(key)
}

View File

@@ -1,17 +1,20 @@
package go_llm
import (
"github.com/openai/openai-go"
)
type rawAble interface {
toRaw() map[string]any
fromRaw(raw map[string]any) Input
}
package llm
// Input is the interface for conversation inputs.
// Types that implement this interface can be part of a conversation:
// Message, ToolCall, ToolCallResponse, and ResponseChoice.
type Input interface {
toChatCompletionMessages(model string) []openai.ChatCompletionMessageParamUnion
// isInput is a marker method to ensure only valid types implement this interface.
isInput()
}
// Implement Input interface for all valid input types.
func (Message) isInput() {}
func (ToolCall) isInput() {}
func (ToolCallResponse) isInput() {}
func (ResponseChoice) isInput() {}
// Request represents a request to a language model.
type Request struct {
Conversation []Input
Messages []Message

View File

@@ -1,9 +1,6 @@
package go_llm
import (
"github.com/openai/openai-go"
)
package llm
// ResponseChoice represents a single choice in a response.
type ResponseChoice struct {
Index int
Role Role
@@ -32,36 +29,6 @@ func (r ResponseChoice) toRaw() map[string]any {
return res
}
func (r ResponseChoice) toChatCompletionMessages(_ string) []openai.ChatCompletionMessageParamUnion {
var as openai.ChatCompletionAssistantMessageParam
if r.Name != "" {
as.Name = openai.String(r.Name)
}
if r.Refusal != "" {
as.Refusal = openai.String(r.Refusal)
}
if r.Content != "" {
as.Content.OfString = openai.String(r.Content)
}
for _, call := range r.Calls {
as.ToolCalls = append(as.ToolCalls, openai.ChatCompletionMessageToolCallParam{
ID: call.ID,
Function: openai.ChatCompletionMessageToolCallFunctionParam{
Name: call.FunctionCall.Name,
Arguments: call.FunctionCall.Arguments,
},
})
}
return []openai.ChatCompletionMessageParamUnion{
{
OfAssistant: &as,
},
}
}
func (r ResponseChoice) toInput() []Input {
var res []Input
@@ -79,6 +46,7 @@ func (r ResponseChoice) toInput() []Input {
return res
}
// Response represents a response from a language model.
type Response struct {
Choices []ResponseChoice
}

View File

@@ -1,4 +1,4 @@
package go_llm
package llm
import (
"context"