answer/pkg/agents/search_terms.go
Steve Dudenhoeffer 693ac4e6a7 Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
2025-03-21 11:10:48 -04:00

66 lines
1.7 KiB
Go

package agents
import (
"context"
"fmt"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type SearchTerms struct {
Model gollm.ChatCompletion
Context []string
}
// SearchTerms will create search terms for the given question.
// alreadySearched is a list of search terms that have already been used, and should not be used again.
func (q SearchTerms) SearchTerms(ctx context.Context, question string, alreadySearched []string) (string, error) {
var res string
req := gollm.Request{
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"search_terms",
"search DuckDuckGo with these search terms for the given question",
func(ctx *gollm.Context, args struct {
SearchTerms string `description:"The search terms to use for the search"`
}) (string, error) {
res = args.SearchTerms
return "", nil
}),
),
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `You are to generate search terms for a question using DuckDuckGo. The question will be provided by the user.`,
},
},
}
if len(alreadySearched) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: fmt.Sprintf("The following search terms have already been used: %v", alreadySearched),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: fmt.Sprintf("The question is: %s", question),
})
resp, err := q.Model.ChatComplete(ctx, req)
if err != nil {
return "", err
}
if len(resp.Choices) == 0 {
return "", fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
_, _ = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, nil
}