Update LLM integration and add new agent tools and utilities
Refactored LLM handling to use updated langchaingo models and tools, replacing gollm dependencies. Introduced agent-related utilities, tools, and counters for better modular functionality. Added a parser for LLM model configuration and revamped the answering mechanism with enhanced support for tool-based interaction.
This commit is contained in:
46
pkg/agent/ask.go
Normal file
46
pkg/agent/ask.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package agent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var AskTool = FromFunction(
|
||||
func(ctx *Context, args struct {
|
||||
Question string `description:"the question to answer"`
|
||||
}) (FuncResponse, error) {
|
||||
var q Question
|
||||
|
||||
q.Question = args.Question
|
||||
ctx = ctx.WithQuestion(q)
|
||||
|
||||
answers, err := ask(ctx, q)
|
||||
|
||||
if err != nil {
|
||||
return FuncResponse{}, err
|
||||
}
|
||||
|
||||
tb := ToolBox{}
|
||||
tb.Register(SummarizeAnswers)
|
||||
|
||||
b, err := json.Marshal(answers.Answers)
|
||||
if err != nil {
|
||||
return FuncResponse{}, fmt.Errorf("failed to marshal answers: %w", err)
|
||||
}
|
||||
|
||||
q = Question{Question: string(b)}
|
||||
ctx = ctx.WithQuestion(q)
|
||||
answers, err = tb.Run(ctx, q)
|
||||
|
||||
if err != nil {
|
||||
return FuncResponse{}, fmt.Errorf("failed to summarize answers: %w", err)
|
||||
}
|
||||
|
||||
if len(answers.Answers) == 0 {
|
||||
return FuncResponse{}, fmt.Errorf("no response from model")
|
||||
}
|
||||
|
||||
return FuncResponse{Result: answers.Answers[0].Answer}, nil
|
||||
}).
|
||||
WithName("ask").
|
||||
WithDescription("Ask the agent a question, this is useful for splitting a question into multiple parts")
|
||||
Reference in New Issue
Block a user