Update go-llm module version in go.mod and go.sum

This commit is contained in:
2025-05-03 05:36:37 -04:00
parent 82580a5a7a
commit 24f248d900
5 changed files with 54 additions and 82 deletions

View File

@@ -18,6 +18,8 @@ import (
)
type Agent struct {
agents.Agent
// Model is the chat completion model to use
Model gollm.ChatCompletion
@@ -42,6 +44,7 @@ type Response struct {
// do what is necessary to answer the question.
func (a Agent) Answer(ctx context.Context, questions []string) (Response, error) {
var res Response
a.Agent = agents.NewAgent(a.Model, gollm.NewToolBox()).WithMaxCalls(200)
if a.MaxCommands <= 0 {
a.MaxCommands = 10000
@@ -111,7 +114,7 @@ func (a Agent) Answer(ctx context.Context, questions []string) (Response, error)
fmt.Println(opwd)
slog.Info("pwd", "pwd", opwd, "epwd", epwd)
tools := map[string]*gollm.Function{
tools := map[string]gollm.Function{
"exit": gollm.NewFunction(
"exit",
"exit the container",
@@ -258,7 +261,7 @@ func (a Agent) Answer(ctx context.Context, questions []string) (Response, error)
When you are done, please use "exit" to exit the container.
Respond with any number of commands to answer the question, they will be executed in order.`
var toolbox []*gollm.Function
var toolbox []gollm.Function
// add unrestricted tools
toolbox = append(toolbox, tools["exit"], tools["write"], tools["read"])
@@ -291,7 +294,7 @@ Respond with any number of commands to answer the question, they will be execute
slog.Info("answered question and learned nothing")
}
res.Knowledge, err = agents.KnowledgeIntegrate(ctx, a.Model, res.Knowledge, r)
res.Knowledge, err = a.KnowledgeIntegrate(ctx, res.Knowledge, r)
if err != nil {
return res, fmt.Errorf("error integrating knowledge: %w", err)
}