Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
This commit is contained in:
34
pkg/agents/tools/wolfram.go
Normal file
34
pkg/agents/tools/wolfram.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package tools
|
||||
|
||||
import (
|
||||
"github.com/Edw590/go-wolfram"
|
||||
|
||||
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
||||
)
|
||||
|
||||
type WolframFunctions struct {
|
||||
Imperial *gollm.Function
|
||||
Metric *gollm.Function
|
||||
}
|
||||
|
||||
func CreateWolframFunctions(appId string) WolframFunctions {
|
||||
client := &wolfram.Client{AppID: appId}
|
||||
return WolframFunctions{
|
||||
Imperial: gollm.NewFunction(
|
||||
"wolfram",
|
||||
"Query the Wolfram Alpha API",
|
||||
func(ctx *gollm.Context, args struct {
|
||||
Question string `description:"The question to ask Wolfram|Alpha"`
|
||||
}) (string, error) {
|
||||
return client.GetShortAnswerQuery(args.Question, wolfram.Imperial, 10)
|
||||
}),
|
||||
Metric: gollm.NewFunction(
|
||||
"wolfram",
|
||||
"Query the Wolfram Alpha API",
|
||||
func(ctx *gollm.Context, args struct {
|
||||
Question string `description:"The question to ask Wolfram|Alpha"`
|
||||
}) (string, error) {
|
||||
return client.GetShortAnswerQuery(args.Question, wolfram.Metric, 10)
|
||||
}),
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user