Files
gooseek/backend/internal/llm/client.go
home 06fe57c765 feat: Go backend, enhanced search, new widgets, Docker deploy
Major changes:
- Add Go backend (backend/) with microservices architecture
- Enhanced master-agents-svc: reranker, content-classifier, stealth-crawler,
  proxy-manager, media-search, fastClassifier, language detection
- New web-svc widgets: KnowledgeCard, ProductCard, ProfileCard, VideoCard,
  UnifiedCard, CardGallery, InlineImageGallery, SourcesPanel, RelatedQuestions
- Improved discover-svc with discover-db integration
- Docker deployment improvements (Caddyfile, vendor.sh, BUILD.md)
- Library-svc: project_id schema migration
- Remove deprecated finance-svc and travel-svc
- Localization improvements across services

Made-with: Cursor
2026-02-27 04:15:32 +03:00

146 lines
3.3 KiB
Go

package llm
import (
"context"
"fmt"
"io"
)
type Role string
const (
RoleSystem Role = "system"
RoleUser Role = "user"
RoleAssistant Role = "assistant"
RoleTool Role = "tool"
)
type Message struct {
Role Role `json:"role"`
Content string `json:"content"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
Name string `json:"name,omitempty"`
Images []ImageContent `json:"images,omitempty"`
}
type ImageContent struct {
Type string `json:"type"`
URL string `json:"url,omitempty"`
Data string `json:"data,omitempty"`
IsBase64 bool `json:"isBase64,omitempty"`
}
type ToolCall struct {
ID string `json:"id"`
Name string `json:"name"`
Arguments map[string]interface{} `json:"arguments"`
}
type Tool struct {
Name string `json:"name"`
Description string `json:"description"`
Schema interface{} `json:"schema"`
}
type StreamOptions struct {
MaxTokens int `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
StopWords []string `json:"stop,omitempty"`
}
type StreamChunk struct {
ContentChunk string `json:"content_chunk,omitempty"`
ToolCallChunk []ToolCall `json:"tool_call_chunk,omitempty"`
FinishReason string `json:"finish_reason,omitempty"`
}
type StreamRequest struct {
Messages []Message `json:"messages"`
Tools []Tool `json:"tools,omitempty"`
Options StreamOptions `json:"options,omitempty"`
}
type Client interface {
StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error)
GenerateText(ctx context.Context, req StreamRequest) (string, error)
GetProviderID() string
GetModelKey() string
}
type ProviderConfig struct {
ProviderID string `json:"providerId"`
ModelKey string `json:"key"`
APIKey string `json:"apiKey,omitempty"`
BaseURL string `json:"baseUrl,omitempty"`
AgentAccessID string `json:"agentAccessId,omitempty"`
}
func NewClient(cfg ProviderConfig) (Client, error) {
switch cfg.ProviderID {
case "timeweb":
return NewTimewebClient(TimewebConfig{
BaseURL: cfg.BaseURL,
AgentAccessID: cfg.AgentAccessID,
APIKey: cfg.APIKey,
ModelKey: cfg.ModelKey,
ProxySource: "gooseek",
})
case "openai":
return NewOpenAIClient(cfg)
case "anthropic":
return NewAnthropicClient(cfg)
case "gemini", "google":
return NewGeminiClient(cfg)
default:
return nil, fmt.Errorf("unknown provider: %s", cfg.ProviderID)
}
}
type baseClient struct {
providerID string
modelKey string
}
func (c *baseClient) GetProviderID() string {
return c.providerID
}
func (c *baseClient) GetModelKey() string {
return c.modelKey
}
func readAllChunks(ch <-chan StreamChunk) string {
var result string
for chunk := range ch {
result += chunk.ContentChunk
}
return result
}
type streamReader struct {
ch <-chan StreamChunk
buffer []byte
}
func (r *streamReader) Read(p []byte) (n int, err error) {
if len(r.buffer) > 0 {
n = copy(p, r.buffer)
r.buffer = r.buffer[n:]
return n, nil
}
chunk, ok := <-r.ch
if !ok {
return 0, io.EOF
}
data := []byte(chunk.ContentChunk)
n = copy(p, data)
if n < len(data) {
r.buffer = data[n:]
}
return n, nil
}