Files
gooseek/backend/cmd/agent-svc/main.go
home 06fe57c765 feat: Go backend, enhanced search, new widgets, Docker deploy
Major changes:
- Add Go backend (backend/) with microservices architecture
- Enhanced master-agents-svc: reranker, content-classifier, stealth-crawler,
  proxy-manager, media-search, fastClassifier, language detection
- New web-svc widgets: KnowledgeCard, ProductCard, ProfileCard, VideoCard,
  UnifiedCard, CardGallery, InlineImageGallery, SourcesPanel, RelatedQuestions
- Improved discover-svc with discover-db integration
- Docker deployment improvements (Caddyfile, vendor.sh, BUILD.md)
- Library-svc: project_id schema migration
- Remove deprecated finance-svc and travel-svc
- Localization improvements across services

Made-with: Cursor
2026-02-27 04:15:32 +03:00

210 lines
5.3 KiB
Go

package main
import (
"bufio"
"context"
"fmt"
"log"
"os"
"time"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/cors"
"github.com/gofiber/fiber/v2/middleware/logger"
"github.com/gooseek/backend/internal/agent"
"github.com/gooseek/backend/internal/llm"
"github.com/gooseek/backend/internal/search"
"github.com/gooseek/backend/internal/session"
"github.com/gooseek/backend/pkg/config"
"github.com/gooseek/backend/pkg/ndjson"
)
type SearchRequest struct {
Message struct {
MessageID string `json:"messageId"`
ChatID string `json:"chatId"`
Content string `json:"content"`
} `json:"message"`
OptimizationMode string `json:"optimizationMode"`
Sources []string `json:"sources"`
History [][]string `json:"history"`
Files []string `json:"files"`
ChatModel ChatModel `json:"chatModel"`
SystemInstructions string `json:"systemInstructions"`
Locale string `json:"locale"`
AnswerMode string `json:"answerMode"`
ResponsePrefs *struct {
Format string `json:"format"`
Length string `json:"length"`
Tone string `json:"tone"`
} `json:"responsePrefs"`
LearningMode bool `json:"learningMode"`
}
type ChatModel struct {
ProviderID string `json:"providerId"`
Key string `json:"key"`
}
func main() {
cfg, err := config.Load()
if err != nil {
log.Fatal("Failed to load config:", err)
}
searchClient := search.NewSearXNGClient(cfg)
app := fiber.New(fiber.Config{
StreamRequestBody: true,
BodyLimit: 10 * 1024 * 1024,
ReadTimeout: time.Minute,
WriteTimeout: 5 * time.Minute,
IdleTimeout: 2 * time.Minute,
})
app.Use(logger.New())
app.Use(cors.New())
app.Get("/health", func(c *fiber.Ctx) error {
return c.JSON(fiber.Map{"status": "ok"})
})
app.Post("/api/v1/agents/search", func(c *fiber.Ctx) error {
var req SearchRequest
if err := c.BodyParser(&req); err != nil {
return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"})
}
if req.Message.Content == "" {
return c.Status(400).JSON(fiber.Map{"error": "Message content required"})
}
providerID := req.ChatModel.ProviderID
modelKey := req.ChatModel.Key
if providerID == "" && cfg.TimewebAPIKey != "" {
providerID = "timeweb"
modelKey = "gpt-4o"
} else if providerID == "" {
providerID = "openai"
modelKey = "gpt-4o-mini"
}
llmClient, err := llm.NewClient(llm.ProviderConfig{
ProviderID: providerID,
ModelKey: modelKey,
APIKey: getAPIKey(cfg, providerID),
BaseURL: cfg.TimewebAPIBaseURL,
AgentAccessID: cfg.TimewebAgentAccessID,
})
if err != nil {
return c.Status(500).JSON(fiber.Map{"error": "Failed to create LLM client: " + err.Error()})
}
chatHistory := make([]llm.Message, 0, len(req.History))
for _, h := range req.History {
if len(h) >= 2 {
role := llm.RoleUser
if h[0] == "ai" || h[0] == "assistant" {
role = llm.RoleAssistant
}
chatHistory = append(chatHistory, llm.Message{
Role: role,
Content: h[1],
})
}
}
mode := agent.ModeBalanced
switch req.OptimizationMode {
case "speed":
mode = agent.ModeSpeed
case "quality":
mode = agent.ModeQuality
}
var responsePrefs *agent.ResponsePrefs
if req.ResponsePrefs != nil {
responsePrefs = &agent.ResponsePrefs{
Format: req.ResponsePrefs.Format,
Length: req.ResponsePrefs.Length,
Tone: req.ResponsePrefs.Tone,
}
}
input := agent.OrchestratorInput{
ChatHistory: chatHistory,
FollowUp: req.Message.Content,
Config: agent.OrchestratorConfig{
LLM: llmClient,
SearchClient: searchClient,
Mode: mode,
Sources: req.Sources,
FileIDs: req.Files,
SystemInstructions: req.SystemInstructions,
Locale: req.Locale,
AnswerMode: req.AnswerMode,
ResponsePrefs: responsePrefs,
LearningMode: req.LearningMode,
DiscoverSvcURL: cfg.DiscoverSvcURL,
Crawl4AIURL: cfg.Crawl4AIURL,
},
}
sess := session.NewSession()
c.Set("Content-Type", "application/x-ndjson")
c.Set("Cache-Control", "no-cache")
c.Set("Transfer-Encoding", "chunked")
c.Context().SetBodyStreamWriter(func(w *bufio.Writer) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
writer := ndjson.NewWriter(w)
unsubscribe := sess.Subscribe(func(eventType session.EventType, data interface{}) {
if eventType == session.EventData {
if dataMap, ok := data.(map[string]interface{}); ok {
writer.Write(dataMap)
w.Flush()
}
}
})
defer unsubscribe()
err := agent.RunOrchestrator(ctx, sess, input)
if err != nil {
ndjson.WriteError(writer, err)
}
})
return nil
})
port := cfg.AgentSvcPort
log.Printf("agent-svc listening on :%d", port)
log.Fatal(app.Listen(fmt.Sprintf(":%d", port)))
}
func getAPIKey(cfg *config.Config, providerID string) string {
switch providerID {
case "timeweb":
return cfg.TimewebAPIKey
case "openai":
return cfg.OpenAIAPIKey
case "anthropic":
return cfg.AnthropicAPIKey
case "gemini", "google":
return cfg.GeminiAPIKey
default:
return ""
}
}
func init() {
if os.Getenv("PORT") == "" {
os.Setenv("PORT", "3018")
}
}