Add core implementation for AI-powered question answering

Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
This commit is contained in:
2025-03-21 11:10:48 -04:00
parent 20bcaefaa2
commit 693ac4e6a7
18 changed files with 1893 additions and 18 deletions

View File

@@ -0,0 +1,186 @@
package agents
import (
"context"
"fmt"
"regexp"
"strconv"
"strings"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type KnowledgeProcessor struct {
Model gollm.ChatCompletion
ContextualInformation []string
}
// Process takes a knowledge object and processes it into a response string.
func (a KnowledgeProcessor) Process(ctx context.Context, knowledge shared.Knowledge) (string, error) {
originalQuestions := strings.Join(knowledge.OriginalQuestions, "\n")
infoGained := ""
// group all the gained knowledge by source
var m = map[string][]string{}
for _, k := range knowledge.Knowledge {
m[k.Source] = append(m[k.Source], k.Info)
}
// now order them in a list so they can be referenced by index
type source struct {
source string
info []string
}
var sources []source
for k, v := range m {
sources = append(sources, source{
source: k,
info: v,
})
if len(infoGained) > 0 {
infoGained += "\n"
}
infoGained += strings.Join(v, "\n")
}
systemPrompt := `I am trying to answer a question, and I gathered some knowledge in an attempt to do so. Here is what I am trying to answer:
` + originalQuestions + `
Here is the knowledge I have gathered from ` + fmt.Sprint(len(sources)) + ` sources:
` + infoGained
if len(knowledge.RemainingQuestions) > 0 {
systemPrompt += "\n\nI still have some questions that I could not find an answer to:\n" + strings.Join(knowledge.RemainingQuestions, "\n")
}
systemPrompt += "\n\nUsing the sources, write an answer to the original question. Note any information that wasn't able to be answered."
req := gollm.Request{
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: systemPrompt,
},
},
}
if len(a.ContextualInformation) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(a.ContextualInformation, "\n"),
})
}
resp, err := a.Model.ChatComplete(ctx, req)
if err != nil {
return "", fmt.Errorf("failed to chat complete: %w", err)
}
systemPrompt = `I am trying to source an analysis of information I have gathered.
To do this I will provide you with all of the sourced information I have gathered in the format of:
[Source]
- Information
- Information
- Information
Where Source will be a number from 1 to ` + fmt.Sprint(len(sources)) + ` and Information will be the information gathered from that source.
You should then read the information provided by the user and tag the information with citations from the sources provided. If a fact is provided by multiple sources, you should tag it with all of the sources that provide that information.
For instance, if the sourced data were:
[1]
- The diameter of the moon is 3,474.8 km
- The moon's age is 4.53 billion years
[2]
- The moon's age is 4.53 billion years
[3]
- The moon is on average 238,855 miles away from the Earth
And the user provided the following information:
The moon is 4.5 billion years old, 238,855 miles away from the Earth, and has a diameter of 3,474.8 km.
You would then tag the information with the sources like so:
The moon is 4.5 billion years old [1,2], 238,855 miles away from the Earth [3], and has a diameter of 3,474.8 km [1].`
providedIntel := `Here is the information I have gathered:
`
for i, s := range sources {
providedIntel += "[" + fmt.Sprint(i+1) + "]\n"
for _, info := range s.info {
providedIntel += " - " + info + "\n"
}
}
summarizedData := `Here is the I need you to source with citations:
` + resp.Choices[0].Content
req = gollm.Request{
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: systemPrompt,
},
{
Role: gollm.RoleSystem,
Text: providedIntel,
},
{
Role: gollm.RoleUser,
Text: summarizedData,
},
},
}
resp, err = a.Model.ChatComplete(ctx, req)
if err != nil {
return "", fmt.Errorf("failed to chat complete: %w", err)
}
// now go through the response and find all citations
// use this by looking for \[[\d+,]+\]
// then use the number to find the source
re := regexp.MustCompile(`\[([\d,\s]+)]`)
// find all the citations
citations := re.FindAllString(resp.Choices[0].Content, -1)
// now we need to find the sources
lookup := map[int][]string{}
for _, c := range citations {
c = strings.Trim(c, "[]")
a := strings.Split(c, ",")
for _, v := range a {
v = strings.TrimSpace(v)
i, _ := strconv.Atoi(v)
if i < 1 || i > len(sources) {
continue
}
lookup[i] = append(lookup[i], sources[i-1].source)
}
}
res := resp.Choices[0].Content
if len(lookup) > 0 {
res += "\n\nHere are the sources for the information provided:\n"
for i := 1; i <= len(sources); i++ {
if _, ok := lookup[i]; !ok {
continue
}
res += "[" + fmt.Sprint(i) + "] <" + lookup[i][0] + ">\n"
}
}
return res, nil
}

View File

@@ -0,0 +1,66 @@
package agents
import (
"context"
"fmt"
"strings"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type QuestionSplitter struct {
Model gollm.ChatCompletion
ContextualInfo []string
}
func (q QuestionSplitter) SplitQuestion(ctx context.Context, question string) ([]string, error) {
var res []string
req := gollm.Request{
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"questions",
"split the provided question by the user into sub-questions",
func(ctx *gollm.Context, args struct {
Questions []string `description:"The questions to evaluate"`
}) (string, error) {
res = args.Questions
return "", nil
}),
),
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `The user is going to ask you a question, if the question would be better answered split into multiple questions, please do so.
Respond using the "questions" function.
If the question is fine as is, respond with the original question passed to the "questions" function.`,
},
},
}
if len(q.ContextualInfo) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(q.ContextualInfo, "\n"),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: question,
})
resp, err := q.Model.ChatComplete(ctx, req)
if err != nil {
return nil, err
}
if len(resp.Choices) == 0 {
return nil, fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
_, _ = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, nil
}

View File

@@ -0,0 +1,46 @@
package reader
import (
"context"
"fmt"
"net/url"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/cache"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type Agent struct {
// Model is the chat completion model to use
Model gollm.ChatCompletion
// OnNewFunction is called when a new function is created
OnNewFunction func(ctx context.Context, funcName string, question string, parameter string) (any, error)
// OnFunctionFinished is called when a function is finished
OnFunctionFinished func(ctx context.Context, funcName string, question string, parameter string, result string, err error, newFunctionResult any) error
Cache cache.Cache
ContextualInformation []string
}
// Read will try to read the source and return the answer if possible.
func (a Agent) Read(ctx context.Context, question string, source *url.URL) (shared.Knowledge, error) {
if a.Cache == nil {
a.Cache = cache.Nop{}
}
ar, err := extractArticle(ctx, a.Cache, source)
if err != nil {
return shared.Knowledge{}, err
}
if ar.Body == "" {
return shared.Knowledge{}, fmt.Errorf("could not extract body from page")
}
return doesTextAnswerQuestion(ctx, question, ar.Body, source.String(), a)
}

View File

@@ -0,0 +1,142 @@
package reader
import (
"context"
"fmt"
"net/url"
"strings"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/cache"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/extractor"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type article struct {
URL string
Title string
Body string
}
func extractArticle(ctx context.Context, c cache.Cache, u *url.URL) (res article, err error) {
defer func() {
e := recover()
if e != nil {
if e, ok := e.(error); ok {
err = fmt.Errorf("panic: %w", e)
} else {
err = fmt.Errorf("panic: %v", e)
}
}
}()
extractors := extractor.MultiExtractor(
extractor.CacheExtractor{
Cache: c,
Tag: "goose",
Extractor: extractor.GooseExtractor{},
},
extractor.CacheExtractor{
Cache: c,
Tag: "playwright",
Extractor: extractor.PlaywrightExtractor{},
},
)
a, err := extractors.Extract(ctx, u.String())
if err != nil {
return article{
URL: "",
Title: "",
Body: "",
}, err
}
return article{
URL: a.URL,
Title: a.Title,
Body: a.Body,
}, nil
}
type Response struct {
Knowledge []string
Remaining string
}
type Learn struct {
Info string `description:"The information to learn from the text."`
}
func doesTextAnswerQuestion(ctx context.Context, question string, text string, source string, a Agent) (shared.Knowledge, error) {
var knowledge shared.Knowledge
fnAnswer := gollm.NewFunction(
"learn",
`Use learn to pass some relevant information to the model. The model will use this information to answer the question. Use it to learn relevant information from the text. Keep these concise and relevant to the question.`,
func(ctx *gollm.Context, args Learn) (string, error) {
knowledge.Knowledge = append(knowledge.Knowledge, shared.TidBit{Info: args.Info, Source: source})
return "", nil
})
fnNoAnswer := gollm.NewFunction(
"finished",
"Indicate that the text does not answer the question.",
func(ctx *gollm.Context, args struct {
Remaining string `description:"After all the knowledge has been learned, this is the parts of the question that are not answered, if any. Leave this blank if the text fully answers the question."`
}) (string, error) {
knowledge.RemainingQuestions = []string{args.Remaining}
return "", nil
})
req := gollm.Request{
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `Evaluate the given text to see if you can answer any information from it relevant to the question that the user asks.
Use the "learn" function to pass relevant information to the model. You can use the "learn" function multiple times to pass multiple pieces of relevant information to the model.
If the text does not answer the question or you are done using "learn" to pass on knowledge then use the "finished" function and indicate the parts of the question that are not answered by anything learned.
You can call "learn" multiple times before calling "finished".`,
},
{
Role: gollm.RoleSystem,
Text: "The text to evaluate: " + text,
},
},
Toolbox: gollm.NewToolBox(fnAnswer, fnNoAnswer),
}
if len(a.ContextualInformation) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(a.ContextualInformation, "\n"),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: "My question to learn from the text is: " + question,
})
resp, err := a.Model.ChatComplete(ctx, req)
if err != nil {
return knowledge, err
}
if len(resp.Choices) == 0 {
return knowledge, nil
}
choice := resp.Choices[0]
if len(choice.Calls) == 0 {
return knowledge, nil
}
_, err = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return knowledge, err
}

View File

@@ -0,0 +1,101 @@
package agents
import (
"context"
"fmt"
"strings"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type RemainingQuestions struct {
Model gollm.ChatCompletion
ContextualInformation []string
}
// Process takes a knowledge object and processes it into a response string.
func (a RemainingQuestions) Process(ctx context.Context, knowledge shared.Knowledge) ([]string, error) {
originalQuestions := strings.Join(knowledge.OriginalQuestions, "\n")
infoGained := ""
// group all the gained knowledge by source
var m = map[string][]string{}
for _, k := range knowledge.Knowledge {
m[k.Source] = append(m[k.Source], k.Info)
}
// now order them in a list so they can be referenced by index
type source struct {
source string
info []string
}
var sources []source
for k, v := range m {
sources = append(sources, source{
source: k,
info: v,
})
if len(infoGained) > 0 {
infoGained += "\n"
}
infoGained += strings.Join(v, "\n")
}
systemPrompt := `I am trying to answer a question, and I gathered some knowledge in an attempt to do so. Here is what I am trying to answer:
` + originalQuestions + `
Here is the knowledge I have gathered from ` + fmt.Sprint(len(sources)) + ` sources:
` + infoGained
systemPrompt += "\n\nUsing the information gathered, have all of the questions been answered? If not, what questions remain? Use the function 'remaining_questions' to answer this question with 0 or more remaining questions."
var res []string
req := gollm.Request{
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: systemPrompt,
},
},
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"remaining_questions",
"Given the information learned above, the following questions remain unanswered",
func(ctx *gollm.Context, args struct {
RemainingQuestions []string `description:"The questions that remain unanswered, if any"`
}) (string, error) {
res = append(res, args.RemainingQuestions...)
return "ok", nil
})),
}
if len(a.ContextualInformation) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(a.ContextualInformation, "\n"),
})
}
resp, err := a.Model.ChatComplete(ctx, req)
if err != nil {
return nil, fmt.Errorf("failed to chat complete: %w", err)
}
if len(resp.Choices) == 0 {
return nil, fmt.Errorf("no choices returned")
}
choice := resp.Choices[0]
if len(choice.Calls) == 0 {
return nil, fmt.Errorf("no calls returned")
}
_, err = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, err
}

View File

@@ -0,0 +1,65 @@
package agents
import (
"context"
"fmt"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type SearchTerms struct {
Model gollm.ChatCompletion
Context []string
}
// SearchTerms will create search terms for the given question.
// alreadySearched is a list of search terms that have already been used, and should not be used again.
func (q SearchTerms) SearchTerms(ctx context.Context, question string, alreadySearched []string) (string, error) {
var res string
req := gollm.Request{
Toolbox: gollm.NewToolBox(
gollm.NewFunction(
"search_terms",
"search DuckDuckGo with these search terms for the given question",
func(ctx *gollm.Context, args struct {
SearchTerms string `description:"The search terms to use for the search"`
}) (string, error) {
res = args.SearchTerms
return "", nil
}),
),
Messages: []gollm.Message{
{
Role: gollm.RoleSystem,
Text: `You are to generate search terms for a question using DuckDuckGo. The question will be provided by the user.`,
},
},
}
if len(alreadySearched) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: fmt.Sprintf("The following search terms have already been used: %v", alreadySearched),
})
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleUser,
Text: fmt.Sprintf("The question is: %s", question),
})
resp, err := q.Model.ChatComplete(ctx, req)
if err != nil {
return "", err
}
if len(resp.Choices) == 0 {
return "", fmt.Errorf("no choices found")
}
choice := resp.Choices[0]
_, _ = req.Toolbox.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
return res, nil
}

View File

@@ -0,0 +1,296 @@
package searcher
import (
"context"
"fmt"
"log/slog"
"net/url"
"strings"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/reader"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
"gitea.stevedudenhoeffer.com/steve/go-extractor"
"gitea.stevedudenhoeffer.com/steve/go-extractor/sites/duckduckgo"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
// kMaxRuns is the maximum number of calls into the LLM this agent will make.
const kMaxRuns = 30
type Result struct {
// Answer is the answer to the question that was asked.
Answer string
// Sources is a list of sources that were used to find the answer.
Sources []string
// Remaining is the remaining part(s) of the question that was not answered.
Remaining string
}
type Agent struct {
// Model is the chat completion model to use
Model gollm.ChatCompletion
// OnGoingToNextPage is called when the agent is going to the next page
OnGoingToNextPage func(ctx context.Context) error
// OnReadingSearchResult is called when the agent is reading a search result
// url is the URL of the search result that is being read.
// The return value is any data that you want to pass to OnFinishedReadingSearchResult.
OnReadingSearchResult func(ctx context.Context, searchResult duckduckgo.Result) (any, error)
// OnFinishedReadingSearchResult is called when the agent is finished reading a search result.
// newKnowledge is the knowledge that was gained from reading the search result.
// err is any error that occurred while reading the search result.
// onReadingResult is the result of the OnReadingSearchResult function from the same search result.
OnFinishedReadingSearchResult func(ctx context.Context, searchResult duckduckgo.Result, newKnowledge []string, err error, onReadingResult any) error
OnDone func(ctx context.Context, knowledge shared.Knowledge) error
// MaxReads is the maximum number of pages that can be read by the agent. Unlimited if <= 0.
MaxReads int
// MaxNextResults is the maximum number of times that the next_results function can be called. Unlimited if <= 0.
MaxNextResults int
ContextualInformation []string
}
// Search will search duckduckgo for the given question, and then read the results to figure out the answer.
// searchQuery is the query that you want to search for, e.g. "what is the capital of France site:reddit.com"
// question is the question that you are trying to answer when reading the search results.
// If the context contains a "browser" key that is an extractor.Browser, it will use that browser to search, otherwise a
// new one will be created and used for the life of this search and then closed.
func (a Agent) Search(ctx context.Context, searchQuery string, question string) (shared.Knowledge, error) {
var knowledge = shared.Knowledge{
OriginalQuestions: []string{question},
RemainingQuestions: []string{question},
}
var done = false
browser, ok := ctx.Value("browser").(extractor.Browser)
if !ok {
b, err := extractor.NewPlayWrightBrowser(extractor.PlayWrightBrowserOptions{})
if err != nil {
return knowledge, err
}
defer deferClose(browser)
ctx = context.WithValue(ctx, "browser", b)
browser = b
}
cfg := duckduckgo.Config{
SafeSearch: duckduckgo.SafeSearchOff,
Region: "us-en",
}
page, err := cfg.OpenSearch(ctx, browser, searchQuery)
if err != nil {
return knowledge, err
}
defer deferClose(page)
var numberOfReads int
var numberOfNextResults int
var searchResults []duckduckgo.Result
// filterResults will remove any search results that are in oldSearchResults, or are empty
filterResults := func(in []duckduckgo.Result) []duckduckgo.Result {
var res []duckduckgo.Result
for _, r := range in {
if r.URL == "" {
continue
}
res = append(res, r)
}
return res
}
searchResults = filterResults(page.GetResults())
fnNextResults := gollm.NewFunction(
"next_results",
"get the next page of search results",
func(c *gollm.Context,
arg struct {
Ignored string `description:"This is ignored, only included for API requirements."`
}) (string, error) {
if numberOfNextResults >= a.MaxNextResults && a.MaxNextResults > 0 {
return "Max next results reached", nil
}
numberOfNextResults++
searchResults = append(searchResults, filterResults(page.GetResults())...)
// clamp it to the 30 most recent results
if len(searchResults) > 30 {
// remove the first len(searchResults) - 30 elements
searchResults = searchResults[len(searchResults)-30:]
}
return "Got more search results", nil
},
)
fnReadSearchResult := gollm.NewFunction(
"read",
"go to the next page of search results",
func(c *gollm.Context, arg struct {
Num int `description:"The # of the search result to read."`
}) (string, error) {
if numberOfReads >= a.MaxReads && a.MaxReads > 0 {
return "Max reads reached", nil
}
numberOfReads++
r := reader.Agent{
Model: a.Model,
ContextualInformation: a.ContextualInformation,
}
// num is 1 based, we need 0 based
num := arg.Num - 1
// now ensure bounds are good
if num < 0 || num >= len(searchResults) {
return "", fmt.Errorf("search result %d is out of bounds", num)
}
sr := searchResults[num]
// remove that search result from the list
searchResults = append(searchResults[:num], searchResults[num+1:]...)
u, err := url.Parse(sr.URL)
if err != nil {
return "", err
}
var onReadingResult any
if a.OnReadingSearchResult != nil {
onReadingResult, err = a.OnReadingSearchResult(ctx, sr)
if err != nil {
return "", err
}
}
response, err := r.Read(c, question, u)
if err != nil {
return "", err
}
if a.OnFinishedReadingSearchResult != nil {
var newKnowledge []string
for _, k := range response.Knowledge {
newKnowledge = append(newKnowledge, k.Info)
}
err = a.OnFinishedReadingSearchResult(ctx, sr, newKnowledge, err, onReadingResult)
if err != nil {
return "", err
}
}
slog.Info("read finished", "url", u, "knowledge gained", response.Knowledge, "remaining", response.RemainingQuestions)
knowledge.Knowledge = append(knowledge.Knowledge, response.Knowledge...)
knowledge.RemainingQuestions = response.RemainingQuestions
return "ok", nil
})
fnDone := gollm.NewFunction(
"done",
"finish reading search results",
func(c *gollm.Context, arg struct {
Ignored string `description:"This is ignored, only included for API requirements."`
}) (string, error) {
done = true
return "ok", nil
})
for i := 0; i < kMaxRuns && !done; i++ {
tools := gollm.NewToolBox(fnDone)
if numberOfReads < a.MaxReads || a.MaxReads <= 0 {
tools = tools.WithFunction(*fnReadSearchResult)
}
if numberOfNextResults < a.MaxNextResults || a.MaxNextResults <= 0 {
tools = tools.WithFunction(*fnNextResults)
}
var req = gollm.Request{
Toolbox: tools,
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: `You are searching DuckDuckGo for the answer to the question that will be posed by the user. The results will be provided in system messages in the format of: #. "https://url.here" - "Title of Page" - "Description here". For instance:
1. "https://example.com" - "Example Title" - "This is an example description."
2. "https://example2.com" - "Example Title 2" - "This is an example description 2."`,
})
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: fmt.Sprintf(`You can read a search result by using the function "read_search_result" with the # of the page to read,
it will attempt to read the page, and then an LLM will read the page and see if it answers the question. The return value will be if there was an answer or not. You only have %d reads left of your original %d. Try to only pick high quality search results to read.
If you need to see more results from DuckDuckGo you can run the function "next_results" to get the next page of results. You only have %d next_results left of your original %d.
You can also use the function "done" to give up on reading the search results and finish executing, indicating you either have nothing left to answer or do not think any of the sources left will answer.`, max(a.MaxReads-numberOfReads, 0), a.MaxReads, max(a.MaxNextResults-numberOfNextResults, 0), a.MaxNextResults),
})
if len(a.ContextualInformation) > 0 {
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Some contextual information you should be aware of: " + strings.Join(a.ContextualInformation, "\n"),
})
}
searches := ""
for i, r := range searchResults {
if i > 0 {
searches += "\n"
}
searches += fmt.Sprintf("%d. %q - %q - %q", i+1, r.URL, r.Title, r.Description)
}
req.Messages = append(req.Messages, gollm.Message{
Role: gollm.RoleSystem,
Text: "Search results are:\n" + searches,
})
results, err := a.Model.ChatComplete(ctx, req)
if err != nil {
return knowledge, err
}
if len(results.Choices) == 0 {
break
}
choice := results.Choices[0]
_, err = tools.ExecuteCallbacks(gollm.NewContext(ctx, req, &choice, nil), choice.Calls, nil, nil)
if err != nil {
return knowledge, err
}
}
if a.OnDone != nil {
err := a.OnDone(ctx, knowledge)
if err != nil {
return knowledge, err
}
}
return knowledge, nil
}

View File

@@ -0,0 +1,45 @@
package searcher
import (
"fmt"
"io"
"gitea.stevedudenhoeffer.com/steve/go-extractor"
"gitea.stevedudenhoeffer.com/steve/go-extractor/sites/duckduckgo"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
func deferClose(closer io.Closer) {
if closer != nil {
_ = closer.Close()
}
}
type searchResult struct {
Answer string `json:"answer"`
Sources []string `json:"sources"`
}
func fnSearch(ctx *gollm.Context, args struct {
Query string `description:"The search query to perform on duckduckgo"`
Question string `description:"The question(s) you are trying to answer when you read the search results. e.g: "`
}) (string, error) {
browser, ok := ctx.Value("browser").(extractor.Browser)
if !ok {
return "", fmt.Errorf("browser not found")
}
cfg := duckduckgo.Config{
SafeSearch: duckduckgo.SafeSearchOff,
Region: "us-en",
}
page, err := cfg.OpenSearch(ctx, browser, args.Query)
defer deferClose(page)
if err != nil {
return "", fmt.Errorf("failed to search: %w", err)
}
return "", nil
}

View File

@@ -0,0 +1,33 @@
package shared
import (
"strings"
)
// TidBit is a small piece of information that the AI has learned.
type TidBit struct {
Info string
Source string
}
type Knowledge struct {
// OriginalQuestions are the questions that was asked first to the AI before any processing was done.
OriginalQuestions []string
// RemainingQuestions is the questions that are left to find answers for.
RemainingQuestions []string
// Knowledge are the tidbits of information that the AI has learned.
Knowledge []TidBit
}
// ToMessage converts the knowledge to a message that can be sent to the LLM.
func (k Knowledge) ToMessage() string {
var learned []string
for _, t := range k.Knowledge {
learned = append(learned, t.Info)
}
return "Original questions asked:\n" + strings.Join(k.OriginalQuestions, "\n") + "\n" +
"Learned information:\n" + strings.Join(learned, "\n") + "\n" +
"Remaining questions:\n" + strings.Join(k.RemainingQuestions, "\n")
}

View File

@@ -0,0 +1,42 @@
package shared
import (
"context"
"errors"
"sync/atomic"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type ModelTracker struct {
parent gollm.ChatCompletion
maximum int64
calls int64
}
var _ gollm.ChatCompletion = &ModelTracker{}
// NewModelTracker creates a new model tracker that will limit the number of calls to the parent.
// Set to 0 to disable the limit.
func NewModelTracker(parent gollm.ChatCompletion, maximum int64) *ModelTracker {
return &ModelTracker{parent: parent, maximum: maximum}
}
var ErrModelCapacity = errors.New("maximum model capacity reached")
func (m *ModelTracker) ChatComplete(ctx context.Context, req gollm.Request) (gollm.Response, error) {
if m.maximum > 0 && atomic.AddInt64(&m.calls, 1) >= m.maximum {
return gollm.Response{}, ErrModelCapacity
}
return m.parent.ChatComplete(ctx, req)
}
// ResetCalls resets the number of calls made to the parent.
func (m *ModelTracker) ResetCalls() {
atomic.StoreInt64(&m.calls, 0)
}
func (m *ModelTracker) GetCalls() int64 {
return atomic.LoadInt64(&m.calls)
}

View File

@@ -0,0 +1,27 @@
package tools
import (
"go.starlark.net/lib/math"
"go.starlark.net/starlark"
"go.starlark.net/syntax"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
var Calculator = gollm.NewFunction(
"calculator",
"A starlark calculator",
func(ctx *gollm.Context, args struct {
Expression string `description:"The expression to evaluate using starlark"`
}) (string, error) {
val, err := starlark.EvalOptions(&syntax.FileOptions{},
&starlark.Thread{Name: "main"},
"input",
args.Expression,
math.Module.Members)
if err != nil {
return "", err
}
return val.String(), nil
})

View File

@@ -0,0 +1,34 @@
package tools
import (
"github.com/Edw590/go-wolfram"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type WolframFunctions struct {
Imperial *gollm.Function
Metric *gollm.Function
}
func CreateWolframFunctions(appId string) WolframFunctions {
client := &wolfram.Client{AppID: appId}
return WolframFunctions{
Imperial: gollm.NewFunction(
"wolfram",
"Query the Wolfram Alpha API",
func(ctx *gollm.Context, args struct {
Question string `description:"The question to ask Wolfram|Alpha"`
}) (string, error) {
return client.GetShortAnswerQuery(args.Question, wolfram.Imperial, 10)
}),
Metric: gollm.NewFunction(
"wolfram",
"Query the Wolfram Alpha API",
func(ctx *gollm.Context, args struct {
Question string `description:"The question to ask Wolfram|Alpha"`
}) (string, error) {
return client.GetShortAnswerQuery(args.Question, wolfram.Metric, 10)
}),
}
}