Add core implementation for AI-powered question answering

Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
This commit is contained in:
2025-03-21 11:10:48 -04:00
parent 20bcaefaa2
commit 693ac4e6a7
18 changed files with 1893 additions and 18 deletions

View File

@@ -0,0 +1,66 @@
package agents
import (
"context"
"fmt"
"strings"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type QuestionSplitter struct {
Model gollm.ChatCompletion
ContextualInfo []string
}
func (q QuestionSplitter) SplitQuestion(ctx context.Context, question string) ([]string, error) {
var res []string
req := gollm.Request{
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"questions",
"split the provided question by the user into sub-questions",
func(ctx *gollm.Context, args struct {
Questions []string `description:"The questions to evaluate"`
}) (string, error) {
res = args.Questions
return "", nil
}),
),
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `The user is going to ask you a question, if the question would be better answered split into multiple questions, please do so.
Respond using the "questions" function.
If the question is fine as is, respond with the original question passed to the "questions" function.`,
},
},
}
if len(q.ContextualInfo) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(q.ContextualInfo, "\n"),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: question,
})
resp, err := q.Model.ChatComplete(ctx, req)
if err != nil {
return nil, err
}
if len(resp.Choices) == 0 {
return nil, fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
_, _ = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, nil
}