Removed redundant fields and callbacks in the search agent while introducing concurrent processing for reading search results. Updated logic to enhance readability and modularity, including capped reads and streamlined interaction with search results. Adjusted dependencies and related usage to align with the refactored design.
137 lines
3.0 KiB
Go
137 lines
3.0 KiB
Go
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log/slog"
|
|
"os"
|
|
"strings"
|
|
|
|
knowledge2 "gitea.stevedudenhoeffer.com/steve/answer/pkg/agents"
|
|
|
|
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
|
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/shared"
|
|
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents/searcher"
|
|
|
|
"github.com/joho/godotenv"
|
|
"github.com/urfave/cli"
|
|
)
|
|
|
|
func getKey(key string, env string) string {
|
|
if key != "" {
|
|
return key
|
|
}
|
|
|
|
return os.Getenv(env)
|
|
}
|
|
|
|
func main() {
|
|
ctx := context.Background()
|
|
// Usage: go run cmd/answer.go question...
|
|
// - flags:
|
|
// --model=[model string such as openai/gpt-4o, anthropic/claude..., google/gemini-1.5. Default: openai/gpt-4o]
|
|
// --search-provider=[search provider string such as google, duckduckgo. Default: google]
|
|
// --cache-provider=[cache provider string such as memory, redis, file. Default: memory]
|
|
|
|
var app = cli.App{
|
|
Name: "answer",
|
|
Usage: "has an llm search the web for you to answer a question",
|
|
Version: "0.1",
|
|
Description: "",
|
|
|
|
Flags: []cli.Flag{
|
|
&cli.StringFlag{
|
|
Name: "env-file",
|
|
Value: ".env",
|
|
Usage: "file to read environment variables from",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "model",
|
|
Value: "openai/gpt-4o-mini",
|
|
Usage: "model to use for answering the question, syntax: provider/model such as openai/gpt-4o",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "llm-key",
|
|
Value: "",
|
|
Usage: "key for the llm model (if empty, will use env var of PROVIDER_API_KEY, such as OPENAI_API_KEY)",
|
|
},
|
|
},
|
|
|
|
Action: func(c *cli.Context) error {
|
|
// if there is no question to answer, print usage
|
|
if c.NArg() == 0 {
|
|
return cli.ShowAppHelp(c)
|
|
}
|
|
|
|
if c.String("env-file") != "" {
|
|
_ = godotenv.Load(c.String("env-file"))
|
|
}
|
|
|
|
var llm gollm.LLM
|
|
|
|
model := c.String("model")
|
|
|
|
a := strings.Split(model, "/")
|
|
|
|
if len(a) != 2 {
|
|
panic("invalid model, expected: provider/model (such as openai/gpt-4o)")
|
|
}
|
|
|
|
switch a[0] {
|
|
case "openai":
|
|
llm = gollm.OpenAI(getKey(c.String("llm-key"), "OPENAI_API_KEY"))
|
|
|
|
case "anthropic":
|
|
llm = gollm.Anthropic(getKey(c.String("llm-key"), "ANTHROPI_API_KEY"))
|
|
|
|
case "google":
|
|
llm = gollm.Google(getKey(c.String("llm-key"), "GOOGLE_API_KEY"))
|
|
|
|
default:
|
|
panic("unknown model provider")
|
|
}
|
|
|
|
m, err := llm.ModelVersion(a[1])
|
|
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
question := strings.Join(c.Args(), " ")
|
|
|
|
search := searcher.Agent{
|
|
Model: m,
|
|
|
|
OnDone: func(ctx context.Context, knowledge shared.Knowledge) error {
|
|
slog.Info("done", "knowledge", knowledge)
|
|
return nil
|
|
},
|
|
MaxReads: 20,
|
|
}
|
|
|
|
processor := knowledge2.KnowledgeProcessor{Model: m}
|
|
knowledge, err := search.Search(ctx, question, question)
|
|
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
slog.Info("knowledge", "knowledge", knowledge)
|
|
|
|
sum, err := processor.Process(ctx, knowledge)
|
|
|
|
fmt.Println(sum)
|
|
return nil
|
|
},
|
|
}
|
|
|
|
err := app.Run(os.Args)
|
|
|
|
if err != nil {
|
|
slog.Error("Error: ", err)
|
|
}
|
|
}
|