answer/pkg/agents/shared/modeltracker.go
Steve Dudenhoeffer 693ac4e6a7 Add core implementation for AI-powered question answering
Introduce multiple agents, tools, and utilities for processing, extracting, and answering user-provided questions using LLMs and external data. Key features include knowledge processing, question splitting, search term generation, and contextual knowledge handling.
2025-03-21 11:10:48 -04:00

43 lines
1.0 KiB
Go

package shared
import (
"context"
"errors"
"sync/atomic"
gollm "gitea.stevedudenhoeffer.com/steve/go-llm"
)
type ModelTracker struct {
parent gollm.ChatCompletion
maximum int64
calls int64
}
var _ gollm.ChatCompletion = &ModelTracker{}
// NewModelTracker creates a new model tracker that will limit the number of calls to the parent.
// Set to 0 to disable the limit.
func NewModelTracker(parent gollm.ChatCompletion, maximum int64) *ModelTracker {
return &ModelTracker{parent: parent, maximum: maximum}
}
var ErrModelCapacity = errors.New("maximum model capacity reached")
func (m *ModelTracker) ChatComplete(ctx context.Context, req gollm.Request) (gollm.Response, error) {
if m.maximum > 0 && atomic.AddInt64(&m.calls, 1) >= m.maximum {
return gollm.Response{}, ErrModelCapacity
}
return m.parent.ChatComplete(ctx, req)
}
// ResetCalls resets the number of calls made to the parent.
func (m *ModelTracker) ResetCalls() {
atomic.StoreInt64(&m.calls, 0)
}
func (m *ModelTracker) GetCalls() int64 {
return atomic.LoadInt64(&m.calls)
}