Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
178 lines
3.8 KiB
Go
178 lines
3.8 KiB
Go
package main
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log/slog"
|
|
"os"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/joho/godotenv"
|
|
"github.com/urfave/cli"
|
|
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/answer"
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/cache"
|
|
"gitea.stevedudenhoeffer.com/steve/answer/pkg/search"
|
|
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
|
|
)
|
|
|
|
func getKey(key string, env string) string {
|
|
if key != "" {
|
|
return key
|
|
}
|
|
|
|
return os.Getenv(env)
|
|
}
|
|
|
|
func main() {
|
|
ctx := context.Background()
|
|
// Usage: go run cmd/answer.go question...
|
|
// - flags:
|
|
// --model=[model string such as openai/gpt-4o, anthropic/claude..., google/gemini-1.5. Default: openai/gpt-4o]
|
|
// --search-provider=[search provider string such as google, duckduckgo. Default: google]
|
|
// --cache-provider=[cache provider string such as memory, redis, file. Default: memory]
|
|
|
|
var app = cli.App{
|
|
Name: "answer",
|
|
Usage: "has an llm search the web for you to answer a question",
|
|
Version: "0.1",
|
|
Description: "",
|
|
|
|
Flags: []cli.Flag{
|
|
&cli.StringFlag{
|
|
Name: "env-file",
|
|
Value: ".env",
|
|
Usage: "file to read environment variables from",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "model",
|
|
Value: "openai/gpt-4o",
|
|
Usage: "model to use for answering the question, syntax: provider/model such as openai/gpt-4o",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "llm-key",
|
|
Value: "",
|
|
Usage: "key for the llm model (if empty, will use env var of PROVIDER_API_KEY, such as OPENAI_API_KEY)",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "search-provider",
|
|
Value: "duckduckgo",
|
|
Usage: "search provider to use for searching the web",
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
Name: "cache-provider",
|
|
Value: "file",
|
|
Usage: "cache provider to use for caching search results",
|
|
},
|
|
},
|
|
|
|
Action: func(c *cli.Context) error {
|
|
// if there is no question to answer, print usage
|
|
if c.NArg() == 0 {
|
|
return cli.ShowAppHelp(c)
|
|
}
|
|
|
|
if c.String("env-file") != "" {
|
|
_ = godotenv.Load(c.String("env-file"))
|
|
}
|
|
|
|
var question answer.Question
|
|
|
|
question.Question = strings.Join(c.Args(), " ")
|
|
|
|
switch c.String("cache-provider") {
|
|
case "memory":
|
|
panic("not implemented")
|
|
|
|
case "redis":
|
|
panic("not implemented")
|
|
|
|
case "file":
|
|
question.Cache = &cache.Directory{
|
|
BaseFolder: "cache",
|
|
MaxLife: 1 * 24 * time.Hour,
|
|
}
|
|
|
|
default:
|
|
panic("unknown cache provider")
|
|
}
|
|
|
|
if question.Cache == nil {
|
|
panic("cache is nil")
|
|
}
|
|
// wrap the cache in a hasher
|
|
question.Cache = cache.ShaWrapper{
|
|
Cache: question.Cache,
|
|
}
|
|
|
|
switch c.String("search-provider") {
|
|
case "google":
|
|
question.Search = search.Google{Cache: question.Cache}
|
|
|
|
case "duckduckgo":
|
|
var err error
|
|
question.Search, err = search.NewDuckDuckGo(question.Cache)
|
|
if err != nil {
|
|
panic("failed to create duckduckgo search: " + err.Error())
|
|
}
|
|
|
|
default:
|
|
panic("unknown search provider")
|
|
}
|
|
|
|
var llm gollm.LLM
|
|
|
|
model := c.String("model")
|
|
|
|
a := strings.Split(model, "/")
|
|
|
|
if len(a) != 2 {
|
|
panic("invalid model, expected: provider/model (such as openai/gpt-4o)")
|
|
}
|
|
|
|
switch a[0] {
|
|
case "openai":
|
|
llm = gollm.OpenAI(getKey(c.String("llm-key"), "OPENAI_API_KEY"))
|
|
|
|
case "anthropic":
|
|
llm = gollm.Anthropic(getKey(c.String("llm-key"), "ANTHROPI_API_KEY"))
|
|
|
|
case "google":
|
|
llm = gollm.Google(getKey(c.String("llm-key"), "GOOGLE_API_KEY"))
|
|
|
|
default:
|
|
panic("unknown model provider")
|
|
}
|
|
|
|
m, err := llm.ModelVersion(a[1])
|
|
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
question.Model = m
|
|
|
|
answers, err := answer.Answer(ctx, question)
|
|
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
fmt.Println(fmt.Sprintf("Question: %s\nAnswer: %q", question.Question, answers))
|
|
|
|
return nil
|
|
},
|
|
}
|
|
|
|
err := app.Run(os.Args)
|
|
|
|
if err != nil {
|
|
slog.Error("Error: ", err)
|
|
}
|
|
}
|