Add core implementation for AI-powered question answering

Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
This commit is contained in:
2025-03-21 11:10:48 -04:00
parent 20bcaefaa2
commit 693ac4e6a7
18 changed files with 1893 additions and 18 deletions

View File

@@ -0,0 +1,34 @@
package tools
import (
"github.com/Edw590/go-wolfram"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type WolframFunctions struct {
Imperial *gollm.Function
Metric *gollm.Function
}
func CreateWolframFunctions(appId string) WolframFunctions {
client := &wolfram.Client{AppID: appId}
return WolframFunctions{
Imperial: gollm.NewFunction(
"wolfram",
"Query the Wolfram Alpha API",
func(ctx *gollm.Context, args struct {
Question string `description:"The question to ask Wolfram|Alpha"`
}) (string, error) {
return client.GetShortAnswerQuery(args.Question, wolfram.Imperial, 10)
}),
Metric: gollm.NewFunction(
"wolfram",
"Query the Wolfram Alpha API",
func(ctx *gollm.Context, args struct {
Question string `description:"The question to ask Wolfram|Alpha"`
}) (string, error) {
return client.GetShortAnswerQuery(args.Question, wolfram.Metric, 10)
}),
}
}