Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
This commit is contained in:
153
pkg/agent/agent.go
Normal file
153
pkg/agent/agent.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package agent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
|
||||
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||
)
|
||||
|
||||
type Agent struct {
|
||||
// ToolBox is the toolbox to use for the agent.
|
||||
ToolBox *gollm.ToolBox
|
||||
|
||||
// Model is the model to use for the agent.
|
||||
Model gollm.ChatCompletion
|
||||
|
||||
// OnNewFunction is a callback that, if non-nil, will be called when a new function is called by the LLM.
|
||||
// The "answer" and "no_answer" functions are not included in this callback.
|
||||
// Return an error to stop the function from being called.
|
||||
OnNewFunction func(ctx context.Context, funcName string, question string, parameter string) (any, error)
|
||||
|
||||
// OnFunctionFinished is a callback that, if non-nil, will be called when a function has finished executing. The
|
||||
// function name is passed in, as well as the question, the parameter, all similar to OnNewFunction. The result of
|
||||
// the function is also passed in, as well as any error that occurred. Finally, the result passed from the
|
||||
// OnNewFunction that preceded this function is passed in as well.
|
||||
OnFunctionFinished func(ctx context.Context, funcName string, question string, parameter string, result string, err error, newFunctionResult any) error
|
||||
|
||||
req gollm.Request
|
||||
}
|
||||
|
||||
func NewAgent(req gollm.Request) *Agent {
|
||||
return &Agent{req: req}
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
Text string
|
||||
Sources []string
|
||||
}
|
||||
|
||||
func deferClose(cl io.Closer) {
|
||||
if cl != nil {
|
||||
_ = cl.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Agent) AddConversation(in gollm.Input) {
|
||||
a.req.Conversation = append(a.req.Conversation, in)
|
||||
}
|
||||
|
||||
func (a *Agent) AddMessage(msg gollm.Message) {
|
||||
slog.Info("adding message", "message", msg)
|
||||
a.req.Messages = append(a.req.Messages, msg)
|
||||
}
|
||||
|
||||
// Execute will execute the current request with the given messages. The messages will be appended to the current
|
||||
// request, but they will _not_ be saved into the embedded request. However, the embedded request will be
|
||||
// generated with the on the results from the ChatComplete call.
|
||||
func (a *Agent) Execute(ctx context.Context, msgs ...gollm.Message) error {
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
req := a.req
|
||||
|
||||
slog.Info("executing", "request", req, "messages", msgs)
|
||||
for _, c := range req.Conversation {
|
||||
slog.Info("conversation", "message", c)
|
||||
}
|
||||
|
||||
req.Messages = append(req.Messages, msgs...)
|
||||
for _, m := range req.Messages {
|
||||
slog.Info("messages", "message", m)
|
||||
}
|
||||
|
||||
req.Toolbox = a.ToolBox
|
||||
|
||||
fmt.Println("req:")
|
||||
spew.Dump(req)
|
||||
res, err := a.Model.ChatComplete(ctx, req)
|
||||
fmt.Println("res:")
|
||||
spew.Dump(res)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(res.Choices) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
choice := res.Choices[0]
|
||||
var callsOutput = make(chan gollm.ToolCallResponse, len(choice.Calls))
|
||||
fnCall := func(call gollm.ToolCall) gollm.ToolCallResponse {
|
||||
str, err := a.ToolBox.Execute(gollm.NewContext(ctx, a.req, &choice, &call), call)
|
||||
|
||||
if err != nil {
|
||||
return gollm.ToolCallResponse{
|
||||
ID: call.ID,
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
|
||||
return gollm.ToolCallResponse{
|
||||
ID: call.ID,
|
||||
Result: str,
|
||||
}
|
||||
}
|
||||
|
||||
for _, call := range choice.Calls {
|
||||
go func(call gollm.ToolCall) {
|
||||
var arg any
|
||||
var err error
|
||||
|
||||
if a.OnNewFunction != nil {
|
||||
arg, err = a.OnNewFunction(ctx, call.FunctionCall.Name, choice.Content, call.FunctionCall.Arguments)
|
||||
if err != nil {
|
||||
callsOutput <- gollm.ToolCallResponse{
|
||||
ID: call.ID,
|
||||
Error: err,
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
callRes := fnCall(call)
|
||||
|
||||
if a.OnFunctionFinished != nil {
|
||||
err = a.OnFunctionFinished(ctx, call.FunctionCall.Name, choice.Content, call.FunctionCall.Arguments, callRes.Result, callRes.Error, arg)
|
||||
if err != nil {
|
||||
callsOutput <- gollm.ToolCallResponse{
|
||||
ID: call.ID,
|
||||
Error: err,
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
callsOutput <- callRes
|
||||
}(call)
|
||||
}
|
||||
|
||||
var answers []gollm.ToolCallResponse
|
||||
for i := 0; i < len(choice.Calls); i++ {
|
||||
result := <-callsOutput
|
||||
answers = append(answers, result)
|
||||
}
|
||||
|
||||
close(callsOutput)
|
||||
slog.Info("generating new request", "answers", answers, "choice", choice)
|
||||
a.req = gollm.NewContext(ctx, a.req, &choice, nil).ToNewRequest(answers...)
|
||||
return nil
|
||||
}
|
Reference in New Issue
Block a user