answer/pkg/agents/reader/agent.go
Steve Dudenhoeffer 693ac4e6a7 Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
2025-03-21 11:10:48 -04:00

47 lines
1.2 KiB
Go

package reader
import (
"context"
"fmt"
"net/url"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/cache"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type Agent struct {
// Model is the chat completion model to use
Model gollm.ChatCompletion
// OnNewFunction is called when a new function is created
OnNewFunction func(ctx context.Context, funcName string, question string, parameter string) (any, error)
// OnFunctionFinished is called when a function is finished
OnFunctionFinished func(ctx context.Context, funcName string, question string, parameter string, result string, err error, newFunctionResult any) error
Cache cache.Cache
ContextualInformation []string
}
// Read will try to read the source and return the answer if possible.
func (a Agent) Read(ctx context.Context, question string, source *url.URL) (shared.Knowledge, error) {
if a.Cache == nil {
a.Cache = cache.Nop{}
}
ar, err := extractArticle(ctx, a.Cache, source)
if err != nil {
return shared.Knowledge{}, err
}
if ar.Body == "" {
return shared.Knowledge{}, fmt.Errorf("could not extract body from page")
}
return doesTextAnswerQuestion(ctx, question, ar.Body, source.String(), a)
}