answer/cmd/agent/cmd.go
Steve Dudenhoeffer 693ac4e6a7 Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
2025-03-21 11:10:48 -04:00

151 lines
3.6 KiB
Go

package main
import (
"context"
"fmt"
"log/slog"
"os"
"strings"
knowledge2 "gitea.stevedudenhoeffer.com/steve/answer/pkg/agents"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
"gitea.stevedudenhoeffer.com/steve/go-extractor/sites/duckduckgo"
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/searcher"
"github.com/joho/godotenv"
"github.com/urfave/cli"
)
func getKey(key string, env string) string {
if key != "" {
return key
}
return os.Getenv(env)
}
func main() {
ctx := context.Background()
// Usage: go run cmd/answer.go question...
// - flags:
// --model=[model string such as openai/gpt-4o, anthropic/claude..., google/gemini-1.5. Default: openai/gpt-4o]
// --search-provider=[search provider string such as google, duckduckgo. Default: google]
// --cache-provider=[cache provider string such as memory, redis, file. Default: memory]
var app = cli.App{
Name: "answer",
Usage: "has an llm search the web for you to answer a question",
Version: "0.1",
Description: "",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "env-file",
Value: ".env",
Usage: "file to read environment variables from",
},
&cli.StringFlag{
Name: "model",
Value: "openai/gpt-4o-mini",
Usage: "model to use for answering the question, syntax: provider/model such as openai/gpt-4o",
},
&cli.StringFlag{
Name: "llm-key",
Value: "",
Usage: "key for the llm model (if empty, will use env var of PROVIDER_API_KEY, such as OPENAI_API_KEY)",
},
},
Action: func(c *cli.Context) error {
// if there is no question to answer, print usage
if c.NArg() == 0 {
return cli.ShowAppHelp(c)
}
if c.String("env-file") != "" {
_ = godotenv.Load(c.String("env-file"))
}
var llm gollm.LLM
model := c.String("model")
a := strings.Split(model, "/")
if len(a) != 2 {
panic("invalid model, expected: provider/model (such as openai/gpt-4o)")
}
switch a[0] {
case "openai":
llm = gollm.OpenAI(getKey(c.String("llm-key"), "OPENAI_API_KEY"))
case "anthropic":
llm = gollm.Anthropic(getKey(c.String("llm-key"), "ANTHROPI_API_KEY"))
case "google":
llm = gollm.Google(getKey(c.String("llm-key"), "GOOGLE_API_KEY"))
default:
panic("unknown model provider")
}
m, err := llm.ModelVersion(a[1])
if err != nil {
panic(err)
}
question := strings.Join(c.Args(), " ")
search := searcher.Agent{
Model: m,
OnGoingToNextPage: func(ctx context.Context) error {
slog.Info("going to next page")
return nil
},
OnReadingSearchResult: func(ctx context.Context, sr duckduckgo.Result) (any, error) {
slog.Info("reading search result", "url", sr.URL, "title", sr.Title, "description", sr.Description)
return nil, nil
},
OnFinishedReadingSearchResult: func(ctx context.Context, sr duckduckgo.Result, newKnowledge []string, err error, onReadingResult any) error {
slog.Info("finished reading search result", "err", err, "newKnowledge", newKnowledge)
return nil
},
OnDone: func(ctx context.Context, knowledge shared.Knowledge) error {
slog.Info("done", "knowledge", knowledge)
return nil
},
MaxReads: 20,
MaxNextResults: 10,
}
processor := knowledge2.KnowledgeProcessor{Model: m}
knowledge, err := search.Search(ctx, question, question)
if err != nil {
panic(err)
}
slog.Info("knowledge", "knowledge", knowledge)
sum, err := processor.ProcessKnowledge(ctx, knowledge)
fmt.Println(sum)
return nil
},
}
err := app.Run(os.Args)
if err != nil {
slog.Error("Error: ", err)
}
}