From a67ad54bcc7f68b230f33452ae502bf1d6f1e5fc Mon Sep 17 00:00:00 2001 From: Steve Dudenhoeffer Date: Sun, 6 Oct 2024 20:01:01 -0400 Subject: [PATCH] initial commit --- go.mod | 27 +++++++++++ go.sum | 47 ++++++++++++++++++++ pkg/anthropic.go | 113 +++++++++++++++++++++++++++++++++++++++++++++++ pkg/function.go | 13 ++++++ pkg/llm.go | 57 ++++++++++++++++++++++++ pkg/openai.go | 93 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 350 insertions(+) create mode 100644 go.mod create mode 100644 go.sum create mode 100644 pkg/anthropic.go create mode 100644 pkg/function.go create mode 100644 pkg/llm.go create mode 100644 pkg/openai.go diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..03b8e42 --- /dev/null +++ b/go.mod @@ -0,0 +1,27 @@ +module go-llm + +go 1.23.1 + +require ( + github.com/atotto/clipboard v0.1.4 // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/charmbracelet/bubbles v0.20.0 // indirect + github.com/charmbracelet/bubbletea v1.1.1 // indirect + github.com/charmbracelet/lipgloss v0.13.0 // indirect + github.com/charmbracelet/x/ansi v0.2.3 // indirect + github.com/charmbracelet/x/term v0.2.0 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/liushuangls/go-anthropic/v2 v2.8.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.15.2 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/sashabaranov/go-openai v1.31.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.24.0 // indirect + golang.org/x/text v0.3.8 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..f7d2d15 --- /dev/null +++ b/go.sum @@ -0,0 +1,47 @@ +github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= +github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= +github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= +github.com/charmbracelet/bubbletea v1.1.1 h1:KJ2/DnmpfqFtDNVTvYZ6zpPFL9iRCRr0qqKOCvppbPY= +github.com/charmbracelet/bubbletea v1.1.1/go.mod h1:9Ogk0HrdbHolIKHdjfFpyXJmiCzGwy+FesYkZr7hYU4= +github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw= +github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY= +github.com/charmbracelet/x/ansi v0.2.3 h1:VfFN0NUpcjBRd4DnKfRaIRo53KRgey/nhOoEqosGDEY= +github.com/charmbracelet/x/ansi v0.2.3/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw= +github.com/charmbracelet/x/term v0.2.0 h1:cNB9Ot9q8I711MyZ7myUR5HFWL/lc3OpU8jZ4hwm0x0= +github.com/charmbracelet/x/term v0.2.0/go.mod h1:GVxgxAbjUrmpvIINHIQnJJKpMlHiZ4cktEQCN6GWyF0= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/liushuangls/go-anthropic/v2 v2.8.0 h1:0zH2jDNycbrlszxnLrG+Gx8vVT0yJAPWU4s3ZTkWzgI= +github.com/liushuangls/go-anthropic/v2 v2.8.0/go.mod h1:8BKv/fkeTaL5R9R9bGkaknYBueyw2WxY20o7bImbOek= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= +github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/sashabaranov/go-openai v1.31.0 h1:rGe77x7zUeCjtS2IS7NCY6Tp4bQviXNMhkQM6hz/UC4= +github.com/sashabaranov/go-openai v1.31.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg= +golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY= +golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= diff --git a/pkg/anthropic.go b/pkg/anthropic.go new file mode 100644 index 0000000..b69b6ae --- /dev/null +++ b/pkg/anthropic.go @@ -0,0 +1,113 @@ +package llm + +import ( + "context" + "fmt" + anth "github.com/liushuangls/go-anthropic/v2" +) + +type anthropic struct { + key string + model string +} + +var _ LLM = anthropic{} + +func (a anthropic) ModelVersion(modelVersion string) (ChatCompletion, error) { + a.model = modelVersion + + // TODO: model verification? + return a, nil +} + +func (a anthropic) requestToAnthropicRequest(req Request) anth.MessagesRequest { + res := anth.MessagesRequest{ + Model: anth.Model(a.model), + MaxTokens: 1000, + } + + msgs := []anth.Message{} + + // we gotta convert messages into anthropic messages, however + // anthropic does not have a "system" message type, so we need to + // append it to the res.System field instead + + for _, msg := range req.Messages { + if msg.Role == RoleSystem { + if len(res.System) > 0 { + res.System += "\n" + } + res.System += msg.Text + } else { + role := anth.RoleUser + + if msg.Role == RoleAssistant { + role = anth.RoleAssistant + } + + msgs = append(msgs, anth.Message{ + Role: role, + Content: []anth.MessageContent{ + { + Type: anth.MessagesContentTypeText, + Text: &msg.Text, + }, + }, + }) + } + } + + for _, tool := range req.Toolbox { + res.Tools = append(res.Tools, anth.ToolDefinition{ + Name: tool.Name, + Description: tool.Description, + InputSchema: tool.Parameters, + }) + } + + res.Messages = msgs + + return res +} + +func (a anthropic) responseToLLMResponse(in anth.MessagesResponse) Response { + res := Response{} + + for _, msg := range in.Content { + choice := ResponseChoice{} + + switch msg.Type { + case anth.MessagesContentTypeText: + if msg.Text != nil { + choice.Content = *msg.Text + } + + case anth.MessagesContentTypeToolUse: + if msg.MessageContentToolUse != nil { + choice.Calls = append(choice.Calls, ToolCall{ + ID: msg.MessageContentToolUse.ID, + FunctionCall: FunctionCall{ + Name: msg.MessageContentToolUse.Name, + Arguments: msg.MessageContentToolUse.Input, + }, + }) + } + } + + res.Choices = append(res.Choices, choice) + } + + return res +} + +func (a anthropic) ChatComplete(ctx context.Context, req Request) (Response, error) { + cl := anth.NewClient(a.key) + + res, err := cl.CreateMessages(ctx, a.requestToAnthropicRequest(req)) + + if err != nil { + return Response{}, fmt.Errorf("failed to chat complete: %w", err) + } + + return a.responseToLLMResponse(res), nil +} diff --git a/pkg/function.go b/pkg/function.go new file mode 100644 index 0000000..1e00d2c --- /dev/null +++ b/pkg/function.go @@ -0,0 +1,13 @@ +package llm + +type Function struct { + Name string `json:"name"` + Description string `json:"description,omitempty"` + Strict bool `json:"strict,omitempty"` + Parameters any `json:"parameters"` +} + +type FunctionCall struct { + Name string `json:"name,omitempty"` + Arguments any `json:"arguments,omitempty"` +} diff --git a/pkg/llm.go b/pkg/llm.go new file mode 100644 index 0000000..6833e19 --- /dev/null +++ b/pkg/llm.go @@ -0,0 +1,57 @@ +package llm + +import ( + "context" +) + +type Role string + +const ( + RoleSystem Role = "system" + RoleUser Role = "user" + RoleAssistant Role = "assistant" +) + +type Message struct { + Role Role + Name string + Text string +} + +type Request struct { + Messages []Message + Toolbox []Function +} + +type ToolCall struct { + ID string + FunctionCall FunctionCall +} + +type ResponseChoice struct { + Index int + Role Role + Content string + Refusal string + Name string + Calls []ToolCall +} +type Response struct { + Choices []ResponseChoice +} + +type ChatCompletion interface { + ChatComplete(ctx context.Context, req Request) (Response, error) +} + +type LLM interface { + ModelVersion(modelVersion string) (ChatCompletion, error) +} + +func OpenAI(key string) LLM { + return openai{key: key} +} + +func Anthropic(key string) LLM { + return anthropic{key: key} +} diff --git a/pkg/openai.go b/pkg/openai.go new file mode 100644 index 0000000..2236238 --- /dev/null +++ b/pkg/openai.go @@ -0,0 +1,93 @@ +package llm + +import ( + "context" + "fmt" + oai "github.com/sashabaranov/go-openai" +) + +type openai struct { + key string + model string +} + +var _ LLM = openai{} + +func (o openai) requestToOpenAIRequest(request Request) oai.ChatCompletionRequest { + res := oai.ChatCompletionRequest{ + Model: o.model, + } + + for _, msg := range request.Messages { + res.Messages = append(res.Messages, oai.ChatCompletionMessage{ + Content: msg.Text, + Role: string(msg.Role), + Name: msg.Name, + }) + } + + for _, tool := range request.Toolbox { + res.Tools = append(res.Tools, oai.Tool{ + Type: "function", + Function: &oai.FunctionDefinition{ + Name: tool.Name, + Description: tool.Description, + Strict: tool.Strict, + Parameters: tool.Parameters, + }, + }) + } + + return res +} + +func (o openai) responseToLLMResponse(response oai.ChatCompletionResponse) Response { + res := Response{} + + for _, choice := range response.Choices { + var tools []ToolCall + for _, call := range choice.Message.ToolCalls { + toolCall := ToolCall{ + ID: call.ID, + FunctionCall: FunctionCall{ + Name: call.Function.Name, + Arguments: call.Function.Arguments, + }, + } + + tools = append(tools, toolCall) + + } + res.Choices = append(res.Choices, ResponseChoice{ + Content: choice.Message.Content, + Role: Role(choice.Message.Role), + Name: choice.Message.Name, + Refusal: choice.Message.Refusal, + }) + } + + return res +} + +func (o openai) ChatComplete(ctx context.Context, request Request) (Response, error) { + cl := oai.NewClient(o.key) + + req := o.requestToOpenAIRequest(request) + + resp, err := cl.CreateChatCompletion(ctx, req) + + fmt.Println("resp:", fmt.Sprintf("%#v", resp)) + + if err != nil { + return Response{}, fmt.Errorf("unhandled openai error: %w", err) + } + + return o.responseToLLMResponse(resp), nil +} + +func (o openai) ModelVersion(modelVersion string) (ChatCompletion, error) { + return openai{ + key: o.key, + model: modelVersion, + }, nil +}