answer/pkg/agents/shared/knowledgeworker.go

164 lines
5.2 KiB
Go

package shared
import (
"context"
"fmt"
"strings"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type KnowledgeWorker struct {
Model gollm.ChatCompletion
ToolBox *gollm.ToolBox
ContextualInformation []string
OnNewFunction func(ctx context.Context, funcName string, args string) (any, error)
OnFunctionFinished func(ctx context.Context, funcName string, args string, result any, err error, newFunctionResult any) error
}
const DefaultPrompt = `Use the provided tools to answer the questions in your current knowledge.`
// Answer will try to answer the remaining questions in the knowledge object, while providing the LLM with a couple
// extra state update objects to manage Knowledge.CurrentObjectives and Knowledge.NotesToSelf.
// systemPrompt is the main prompt to tell the LLM what to do.
// userInput is the input that the LLM is trying to learn answers from.
// source is the source of the knowledge, for example a URL.
// Any tool call that returns a Knowledge object will be handled by this function in crafting the final Knowledge object.
// Any other return type will be passed to the resultWorker function, if provided.
func (w KnowledgeWorker) Answer(context context.Context, knowledge *agents.Knowledge, systemPrompt string, userInput string, source string, history []gollm.Message, resultWorker func(res gollm.ToolCallResponse)) (agents.Knowledge, error) {
var req gollm.Request
if systemPrompt != "" {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: systemPrompt,
})
} else {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: DefaultPrompt,
})
}
k := knowledge.ToSystemMessage()
if k.Text != "" {
req.Messages = append(req.Messages, k)
}
if len(w.ContextualInformation) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Contextual Information: " + strings.Join(w.ContextualInformation, ", "),
})
}
if len(history) > 0 {
req.Messages = append(req.Messages, history...)
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: `Feel free to call "learn", "notes_to_self", or "new_objectives" to leave notes for yourself or set new objectives for the LLM to work on, all can be called multiple times.`,
})
lastMsg := "Please try to batch all of your calls, such as if there are things you are learning and notes you are setting, try to do a call for each in one response."
if userInput != "" {
lastMsg += "\nNext input you are trying to use function calls to answer your objectives from is: " + userInput
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: lastMsg,
})
req.Toolbox = w.ToolBox.WithFunctions(
gollm.NewFunction(
"notes_to_self",
"leave future executions of the LLM a note or two, can be called many times",
func(ctx *gollm.Context, args struct {
NotesToSelf []string `description:"Notes to leave for yourself for later."`
}) (any, error) {
return agents.Knowledge{
NotesToSelf: args.NotesToSelf,
}, nil
}),
gollm.NewFunction(
"new_objectives",
"Set new objectives for the LLM to work on, can be called many times. If no new objectives are set, the LLM will continue to work on the current objectives.",
func(ctx *gollm.Context, args struct {
Objectives []string `description:"The objectives to set for executions going forward."`
}) (any, error) {
return agents.Knowledge{
CurrentObjectives: args.Objectives,
}, nil
}),
gollm.NewFunction(
"learn",
`Use learn to pass some relevant information to the model. The model will use this information to answer the question. Use it to learn relevant information from the text. Keep these concise and relevant to the question. Can be called many times.`,
func(ctx *gollm.Context, args struct {
Info []string `description:"The information to learn from the input."`
}) (any, error) {
var k []agents.TidBit
for _, i := range args.Info {
k = append(k, agents.TidBit{Info: i, Source: source})
}
return agents.Knowledge{
Knowledge: k,
}, nil
})).
WithRequireTool(true)
for _, m := range req.Messages {
fmt.Println("Role: ", m.Role, "Text: ", m.Text)
}
fmt.Println("Calling...")
resp, err := w.Model.ChatComplete(context, req)
if err != nil {
return agents.Knowledge{}, fmt.Errorf("error calling model: %w", err)
}
if len(resp.Choices) == 0 {
return agents.Knowledge{}, fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
if len(choice.Calls) == 0 {
return agents.Knowledge{}, fmt.Errorf("no calls found")
}
var callNames []string
for _, c := range choice.Calls {
callNames = append(callNames, c.FunctionCall.Name)
}
results, err := w.ToolBox.ExecuteCallbacks(gollm.NewContext(context, req, &choice, nil), choice.Calls, w.OnNewFunction, w.OnFunctionFinished)
if err != nil {
return agents.Knowledge{}, fmt.Errorf("error executing callbacks: %w", err)
}
var res = agents.Knowledge{}
for _, r := range results {
switch v := r.Result.(type) {
case agents.Knowledge:
res = res.Absorb(v)
default:
if resultWorker != nil {
resultWorker(r)
}
}
}
return res, nil
}