119 lines
		
	
	
		
			2.6 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
			
		
		
	
	
			119 lines
		
	
	
		
			2.6 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
| package main
 | |
| 
 | |
| import (
 | |
| 	"context"
 | |
| 	"fmt"
 | |
| 	"gitea.stevedudenhoeffer.com/steve/answer/pkg/agents"
 | |
| 	"log/slog"
 | |
| 	"os"
 | |
| 	"strings"
 | |
| 
 | |
| 	gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
 | |
| 
 | |
| 	"github.com/joho/godotenv"
 | |
| 	"github.com/urfave/cli"
 | |
| )
 | |
| 
 | |
| func getKey(key string, env string) string {
 | |
| 	if key != "" {
 | |
| 		return key
 | |
| 	}
 | |
| 
 | |
| 	return os.Getenv(env)
 | |
| }
 | |
| 
 | |
| func main() {
 | |
| 	// Usage: go run cmd/answer.go question...
 | |
| 	// - flags:
 | |
| 	//   --model=[model string such as openai/gpt-4o, anthropic/claude..., google/gemini-1.5.  Default: openai/gpt-4o]
 | |
| 	//   --search-provider=[search provider string such as google, duckduckgo.  Default: google]
 | |
| 	//	 --cache-provider=[cache provider string such as memory, redis, file.  Default: memory]
 | |
| 
 | |
| 	var app = cli.App{
 | |
| 		Name:        "console",
 | |
| 		Usage:       "has an llm control a console for you",
 | |
| 		Version:     "0.1",
 | |
| 		Description: "",
 | |
| 
 | |
| 		Flags: []cli.Flag{
 | |
| 			&cli.StringFlag{
 | |
| 				Name:  "env-file",
 | |
| 				Value: ".env",
 | |
| 				Usage: "file to read environment variables from",
 | |
| 			},
 | |
| 
 | |
| 			&cli.StringFlag{
 | |
| 				Name:  "model",
 | |
| 				Value: "openai/gpt-4o-mini",
 | |
| 				Usage: "model to use for answering the question, syntax: provider/model such as openai/gpt-4o",
 | |
| 			},
 | |
| 
 | |
| 			&cli.StringFlag{
 | |
| 				Name:  "llm-key",
 | |
| 				Value: "",
 | |
| 				Usage: "key for the llm model (if empty, will use env var of PROVIDER_API_KEY, such as OPENAI_API_KEY)",
 | |
| 			},
 | |
| 		},
 | |
| 
 | |
| 		Action: func(ctx *cli.Context) error {
 | |
| 			// if there is no question to answer, print usage
 | |
| 			if ctx.NArg() == 0 {
 | |
| 				return cli.ShowAppHelp(ctx)
 | |
| 			}
 | |
| 
 | |
| 			if ctx.String("env-file") != "" {
 | |
| 				_ = godotenv.Load(ctx.String("env-file"))
 | |
| 			}
 | |
| 
 | |
| 			var llm gollm.LLM
 | |
| 
 | |
| 			model := ctx.String("model")
 | |
| 
 | |
| 			a := strings.Split(model, "/")
 | |
| 
 | |
| 			if len(a) != 2 {
 | |
| 				panic("invalid model, expected: provider/model (such as openai/gpt-4o)")
 | |
| 			}
 | |
| 
 | |
| 			switch a[0] {
 | |
| 			case "openai":
 | |
| 				llm = gollm.OpenAI(getKey(ctx.String("llm-key"), "OPENAI_API_KEY"))
 | |
| 
 | |
| 			case "anthropic":
 | |
| 				llm = gollm.Anthropic(getKey(ctx.String("llm-key"), "ANTHROPI_API_KEY"))
 | |
| 
 | |
| 			case "google":
 | |
| 				llm = gollm.Google(getKey(ctx.String("llm-key"), "GOOGLE_API_KEY"))
 | |
| 
 | |
| 			default:
 | |
| 				panic("unknown model provider")
 | |
| 			}
 | |
| 
 | |
| 			m, err := llm.ModelVersion(a[1])
 | |
| 
 | |
| 			if err != nil {
 | |
| 				panic(err)
 | |
| 			}
 | |
| 			question := strings.Join(ctx.Args(), " ")
 | |
| 
 | |
| 			ag := agents.NewAgent(m, nil)
 | |
| 
 | |
| 			steps, err := ag.SplitQuestion(context.Background(), question)
 | |
| 
 | |
| 			fmt.Println("Input question: ", question)
 | |
| 			fmt.Println("Steps: ")
 | |
| 			for i, s := range steps {
 | |
| 				fmt.Println("  - Step ", i+1, ": ", s)
 | |
| 			}
 | |
| 			fmt.Println("Error: ", err)
 | |
| 			return nil
 | |
| 		},
 | |
| 	}
 | |
| 
 | |
| 	err := app.Run(os.Args)
 | |
| 
 | |
| 	if err != nil {
 | |
| 		slog.Error("Error: ", err)
 | |
| 	}
 | |
| }
 |