answer/pkg/agents/question_splitter.go
Steve Dudenhoeffer 693ac4e6a7 Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
2025-03-21 11:10:48 -04:00

67 lines
1.6 KiB
Go

package agents
import (
"context"
"fmt"
"strings"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type QuestionSplitter struct {
Model gollm.ChatCompletion
ContextualInfo []string
}
func (q QuestionSplitter) SplitQuestion(ctx context.Context, question string) ([]string, error) {
var res []string
req := gollm.Request{
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"questions",
"split the provided question by the user into sub-questions",
func(ctx *gollm.Context, args struct {
Questions []string `description:"The questions to evaluate"`
}) (string, error) {
res = args.Questions
return "", nil
}),
),
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `The user is going to ask you a question, if the question would be better answered split into multiple questions, please do so.
Respond using the "questions" function.
If the question is fine as is, respond with the original question passed to the "questions" function.`,
},
},
}
if len(q.ContextualInfo) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(q.ContextualInfo, "\n"),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: question,
})
resp, err := q.Model.ChatComplete(ctx, req)
if err != nil {
return nil, err
}
if len(resp.Choices) == 0 {
return nil, fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
_, _ = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, nil
}