Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
102 lines
2.7 KiB
Go
102 lines
2.7 KiB
Go
package agents
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"strings"
|
|
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
|
|
|
|
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
|
)
|
|
|
|
type RemainingQuestions struct {
|
|
Model gollm.ChatCompletion
|
|
ContextualInformation []string
|
|
}
|
|
|
|
// Process takes a knowledge object and processes it into a response string.
|
|
func (a RemainingQuestions) Process(ctx context.Context, knowledge shared.Knowledge) ([]string, error) {
|
|
originalQuestions := strings.Join(knowledge.OriginalQuestions, "\n")
|
|
infoGained := ""
|
|
|
|
// group all the gained knowledge by source
|
|
var m = map[string][]string{}
|
|
for _, k := range knowledge.Knowledge {
|
|
m[k.Source] = append(m[k.Source], k.Info)
|
|
}
|
|
|
|
// now order them in a list so they can be referenced by index
|
|
type source struct {
|
|
source string
|
|
info []string
|
|
}
|
|
|
|
var sources []source
|
|
for k, v := range m {
|
|
sources = append(sources, source{
|
|
source: k,
|
|
info: v,
|
|
})
|
|
|
|
if len(infoGained) > 0 {
|
|
infoGained += "\n"
|
|
}
|
|
|
|
infoGained += strings.Join(v, "\n")
|
|
}
|
|
|
|
systemPrompt := `I am trying to answer a question, and I gathered some knowledge in an attempt to do so. Here is what I am trying to answer:
|
|
` + originalQuestions + `
|
|
|
|
Here is the knowledge I have gathered from ` + fmt.Sprint(len(sources)) + ` sources:
|
|
` + infoGained
|
|
|
|
systemPrompt += "\n\nUsing the information gathered, have all of the questions been answered? If not, what questions remain? Use the function 'remaining_questions' to answer this question with 0 or more remaining questions."
|
|
|
|
var res []string
|
|
req := gollm.Request{
|
|
Messages: []gollm.Message{
|
|
{
|
|
Role: gollm.RoleSystem,
|
|
Text: systemPrompt,
|
|
},
|
|
},
|
|
Toolbox: gollm.NewToolBox(
|
|
gollm.NewFunction(
|
|
"remaining_questions",
|
|
"Given the information learned above, the following questions remain unanswered",
|
|
func(ctx *gollm.Context, args struct {
|
|
RemainingQuestions []string `description:"The questions that remain unanswered, if any"`
|
|
}) (string, error) {
|
|
res = append(res, args.RemainingQuestions...)
|
|
return "ok", nil
|
|
})),
|
|
}
|
|
|
|
if len(a.ContextualInformation) > 0 {
|
|
req.Messages = append(req.Messages, gollm.Message{
|
|
Role: gollm.RoleSystem,
|
|
Text: "Some contextual information you should be aware of: " + strings.Join(a.ContextualInformation, "\n"),
|
|
})
|
|
}
|
|
|
|
resp, err := a.Model.ChatComplete(ctx, req)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to chat complete: %w", err)
|
|
}
|
|
|
|
if len(resp.Choices) == 0 {
|
|
return nil, fmt.Errorf("no choices returned")
|
|
}
|
|
|
|
choice := resp.Choices[0]
|
|
|
|
if len(choice.Calls) == 0 {
|
|
return nil, fmt.Errorf("no calls returned")
|
|
}
|
|
|
|
_, err = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
|
|
return res, err
|
|
}
|