answer/cmd/answer.go
Steve Dudenhoeffer 7a43e3a5c8 Fix unmarshalling issues and adjust logging for debugging
Modify `FunctionCall` struct to handle arguments as strings. Add debugging logs to facilitate error tracing and improve JSON unmarshalling in various functions.
2024-11-11 00:23:00 -05:00

170 lines
3.5 KiB
Go

package main
import (
"answer/pkg/answer"
"answer/pkg/cache"
"answer/pkg/search"
"context"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
"github.com/joho/godotenv"
"github.com/urfave/cli"
"log/slog"
"os"
"strings"
"time"
)
func getKey(key string, env string) string {
if key != "" {
return key
}
return os.Getenv(env)
}
func main() {
ctx := context.Background()
// Usage: go run cmd/answer.go question...
// - flags:
// --model=[model string such as openai/gpt-4o, anthropic/claude..., google/gemini-1.5. Default: openai/gpt-4o]
// --search-provider=[search provider string such as google, duckduckgo. Default: google]
// --cache-provider=[cache provider string such as memory, redis, file. Default: memory]
var app = cli.App{
Name: "answer",
Usage: "has an llm search the web for you to answer a question",
Version: "0.1",
Description: "",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "env-file",
Value: ".env",
Usage: "file to read environment variables from",
},
&cli.StringFlag{
Name: "model",
Value: "openai/gpt-4o",
Usage: "model to use for answering the question, syntax: provider/model such as openai/gpt-4o",
},
&cli.StringFlag{
Name: "llm-key",
Value: "",
Usage: "key for the llm model (if empty, will use env var of PROVIDER_API_KEY, such as OPENAI_API_KEY)",
},
&cli.StringFlag{
Name: "search-provider",
Value: "google",
Usage: "search provider to use for searching the web",
},
&cli.StringFlag{
Name: "cache-provider",
Value: "memory",
Usage: "cache provider to use for caching search results",
},
},
Action: func(c *cli.Context) error {
// if there is no question to answer, print usage
if c.NArg() == 0 {
return cli.ShowAppHelp(c)
}
if c.String("env-file") != "" {
_ = godotenv.Load(c.String("env-file"))
}
var question answer.Question
question.Question = strings.Join(c.Args(), " ")
switch c.String("cache-provider") {
case "memory":
panic("not implemented")
case "redis":
panic("not implemented")
case "file":
question.Cache = &cache.Directory{
BaseFolder: "cache",
MaxLife: 1 * 24 * time.Hour,
}
default:
panic("unknown cache provider")
}
if question.Cache == nil {
panic("cache is nil")
}
// wrap the cache in a hasher
question.Cache = cache.ShaWrapper{
Cache: question.Cache,
}
switch c.String("search-provider") {
case "google":
question.Search = search.Google{Cache: question.Cache}
default:
panic("unknown search provider")
}
var llm gollm.LLM
model := c.String("model")
a := strings.Split(model, "/")
if len(a) != 2 {
panic("invalid model, expected: provider/model (such as openai/gpt-4o)")
}
switch a[0] {
case "openai":
llm = gollm.OpenAI(getKey(c.String("llm-key"), "OPENAI_API_KEY"))
case "anthropic":
llm = gollm.Anthropic(getKey(c.String("llm-key"), "ANTHROPI_API_KEY"))
case "google":
llm = gollm.Google(getKey(c.String("llm-key"), "GOOGLE_API_KEY"))
default:
panic("unknown model provider")
}
m, err := llm.ModelVersion(a[1])
if err != nil {
panic(err)
}
question.Model = m
answers, err := answer.Answer(ctx, question)
if err != nil {
panic(err)
}
for i, a := range answers {
slog.Info("answer", "index", i, "answer", a)
}
return nil
},
}
err := app.Run(os.Args)
if err != nil {
slog.Error("Error: ", err)
}
}