Refactor: modularize and streamline LLM providers and utility functions
- Migrate `compress_image.go` to `internal/imageutil` for better encapsulation. - Reorganize LLM provider implementations into distinct packages (`google`, `openai`, and `anthropic`). - Replace `go_llm` package name with `llm`. - Refactor internal APIs for improved clarity, including renaming `anthropic` to `anthropicImpl` and `google` to `googleImpl`. - Add helper methods and restructure message handling for better separation of concerns.
This commit is contained in:
20
anthropic.go
20
anthropic.go
@@ -1,4 +1,4 @@
|
||||
package go_llm
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -10,19 +10,19 @@ import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"gitea.stevedudenhoeffer.com/steve/go-llm/utils"
|
||||
"gitea.stevedudenhoeffer.com/steve/go-llm/internal/imageutil"
|
||||
|
||||
anth "github.com/liushuangls/go-anthropic/v2"
|
||||
)
|
||||
|
||||
type anthropic struct {
|
||||
type anthropicImpl struct {
|
||||
key string
|
||||
model string
|
||||
}
|
||||
|
||||
var _ LLM = anthropic{}
|
||||
var _ LLM = anthropicImpl{}
|
||||
|
||||
func (a anthropic) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
||||
func (a anthropicImpl) ModelVersion(modelVersion string) (ChatCompletion, error) {
|
||||
a.model = modelVersion
|
||||
|
||||
// TODO: model verification?
|
||||
@@ -36,7 +36,7 @@ func deferClose(c io.Closer) {
|
||||
}
|
||||
}
|
||||
|
||||
func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||
func (a anthropicImpl) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||
res := anth.MessagesRequest{
|
||||
Model: anth.Model(a.model),
|
||||
MaxTokens: 1000,
|
||||
@@ -90,7 +90,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||
// Check if image size exceeds 5MiB (5242880 bytes)
|
||||
if len(raw) >= 5242880 {
|
||||
|
||||
compressed, mime, err := utils.CompressImage(img.Base64, 5*1024*1024)
|
||||
compressed, mime, err := imageutil.CompressImage(img.Base64, 5*1024*1024)
|
||||
|
||||
// just replace the image with the compressed one
|
||||
if err != nil {
|
||||
@@ -157,7 +157,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||
}
|
||||
}
|
||||
|
||||
for _, tool := range req.Toolbox.functions {
|
||||
for _, tool := range req.Toolbox.Functions() {
|
||||
res.Tools = append(res.Tools, anth.ToolDefinition{
|
||||
Name: tool.Name,
|
||||
Description: tool.Description,
|
||||
@@ -177,7 +177,7 @@ func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest {
|
||||
return res
|
||||
}
|
||||
|
||||
func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
|
||||
func (a anthropicImpl) responseToLLMResponse(in anth.MessagesResponse) Response {
|
||||
choice := ResponseChoice{}
|
||||
for _, msg := range in.Content {
|
||||
|
||||
@@ -212,7 +212,7 @@ func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response {
|
||||
}
|
||||
}
|
||||
|
||||
func (a anthropic) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
||||
func (a anthropicImpl) ChatComplete(ctx context.Context, req Request) (Response, error) {
|
||||
cl := anth.NewClient(a.key)
|
||||
|
||||
res, err := cl.CreateMessages(ctx, a.requestToAnthropicRequest(req))
|
||||
|
||||
Reference in New Issue
Block a user