feat: Go backend, enhanced search, new widgets, Docker deploy

Major changes:
- Add Go backend (backend/) with microservices architecture
- Enhanced master-agents-svc: reranker, content-classifier, stealth-crawler,
  proxy-manager, media-search, fastClassifier, language detection
- New web-svc widgets: KnowledgeCard, ProductCard, ProfileCard, VideoCard,
  UnifiedCard, CardGallery, InlineImageGallery, SourcesPanel, RelatedQuestions
- Improved discover-svc with discover-db integration
- Docker deployment improvements (Caddyfile, vendor.sh, BUILD.md)
- Library-svc: project_id schema migration
- Remove deprecated finance-svc and travel-svc
- Localization improvements across services

Made-with: Cursor
This commit is contained in:
home
2026-02-27 04:15:32 +03:00
parent 328d968f3f
commit 06fe57c765
285 changed files with 53132 additions and 1871 deletions

View File

@@ -0,0 +1,574 @@
package computer
import (
"context"
"encoding/json"
"fmt"
"strings"
"sync"
"time"
"github.com/gooseek/backend/internal/llm"
"github.com/google/uuid"
"golang.org/x/sync/errgroup"
)
type Executor struct {
router *Router
sandbox *SandboxManager
maxWorkers int
}
func NewExecutor(router *Router, maxWorkers int) *Executor {
if maxWorkers <= 0 {
maxWorkers = 5
}
return &Executor{
router: router,
maxWorkers: maxWorkers,
}
}
func (e *Executor) SetSandbox(sandbox *SandboxManager) {
e.sandbox = sandbox
}
func (e *Executor) ExecuteGroup(ctx context.Context, tasks []SubTask, budget float64) ([]ExecutionResult, error) {
results := make([]ExecutionResult, len(tasks))
var mu sync.Mutex
perTaskBudget := budget / float64(len(tasks))
g, gctx := errgroup.WithContext(ctx)
g.SetLimit(e.maxWorkers)
for i, task := range tasks {
i, task := i, task
g.Go(func() error {
result, err := e.ExecuteTask(gctx, &task, perTaskBudget)
mu.Lock()
if err != nil {
results[i] = ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Error: err,
}
} else {
results[i] = *result
}
mu.Unlock()
return nil
})
}
if err := g.Wait(); err != nil {
return results, err
}
return results, nil
}
func (e *Executor) ExecuteTask(ctx context.Context, task *SubTask, budget float64) (*ExecutionResult, error) {
startTime := time.Now()
client, spec, err := e.router.Route(task, budget)
if err != nil {
return nil, fmt.Errorf("routing failed: %w", err)
}
task.ModelID = spec.ID
now := time.Now()
task.StartedAt = &now
var result *ExecutionResult
switch task.Type {
case TaskResearch:
result, err = e.executeResearch(ctx, client, task)
case TaskCode:
result, err = e.executeCode(ctx, client, task)
case TaskAnalysis:
result, err = e.executeAnalysis(ctx, client, task)
case TaskDesign:
result, err = e.executeDesign(ctx, client, task)
case TaskDeploy:
result, err = e.executeDeploy(ctx, client, task)
case TaskReport:
result, err = e.executeReport(ctx, client, task)
case TaskCommunicate:
result, err = e.executeCommunicate(ctx, client, task)
case TaskTransform:
result, err = e.executeTransform(ctx, client, task)
case TaskValidate:
result, err = e.executeValidate(ctx, client, task)
default:
result, err = e.executeGeneric(ctx, client, task)
}
if err != nil {
return nil, err
}
result.Duration = time.Since(startTime)
result.Cost = e.router.EstimateCost(task, 1000, 500)
return result, nil
}
func (e *Executor) executeResearch(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
prompt := fmt.Sprintf(`You are a research assistant. Complete this research task:
Task: %s
Additional context: %v
Provide a comprehensive research result with:
1. Key findings
2. Sources/references
3. Summary
Respond in JSON:
{
"findings": ["finding 1", "finding 2"],
"sources": ["source 1", "source 2"],
"summary": "...",
"data": {}
}`, task.Description, task.Input)
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 4096},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeCode(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputContext := ""
if task.Input != nil {
inputJSON, _ := json.Marshal(task.Input)
inputContext = fmt.Sprintf("\n\nContext from previous tasks:\n%s", string(inputJSON))
}
prompt := fmt.Sprintf(`You are an expert programmer. Complete this coding task:
Task: %s%s
Requirements:
1. Write clean, production-ready code
2. Include error handling
3. Add necessary imports
4. Follow best practices
Respond in JSON:
{
"language": "python",
"code": "...",
"filename": "main.py",
"dependencies": ["package1", "package2"],
"explanation": "..."
}`, task.Description, inputContext)
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 8192},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
var artifacts []Artifact
if code, ok := output["code"].(string); ok {
filename := "main.py"
if fn, ok := output["filename"].(string); ok {
filename = fn
}
artifacts = append(artifacts, Artifact{
ID: uuid.New().String(),
TaskID: task.ID,
Type: ArtifactTypeCode,
Name: filename,
Content: []byte(code),
Size: int64(len(code)),
CreatedAt: time.Now(),
})
}
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
Artifacts: artifacts,
}, nil
}
func (e *Executor) executeAnalysis(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`You are a data analyst. Analyze this data/information:
Task: %s
Input data:
%s
Provide:
1. Key insights
2. Patterns observed
3. Recommendations
4. Visualizations needed (describe)
Respond in JSON:
{
"insights": ["insight 1", "insight 2"],
"patterns": ["pattern 1"],
"recommendations": ["rec 1"],
"visualizations": ["chart type 1"],
"summary": "..."
}`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 4096},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeDesign(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`You are a software architect. Design a solution:
Task: %s
Context:
%s
Provide:
1. Architecture overview
2. Components and their responsibilities
3. Data flow
4. Technology recommendations
5. Implementation plan
Respond in JSON:
{
"architecture": "...",
"components": [{"name": "...", "responsibility": "..."}],
"dataFlow": "...",
"technologies": ["tech1", "tech2"],
"implementationSteps": ["step1", "step2"],
"diagram": "mermaid diagram code"
}`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 4096},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeDeploy(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
if e.sandbox == nil {
return e.executeGeneric(ctx, client, task)
}
var code string
if task.Input != nil {
if c, ok := task.Input["code"].(string); ok {
code = c
}
}
if code == "" {
return e.executeGeneric(ctx, client, task)
}
sandbox, err := e.sandbox.Create(ctx, task.ID)
if err != nil {
return nil, fmt.Errorf("failed to create sandbox: %w", err)
}
defer e.sandbox.Destroy(ctx, sandbox)
result, err := e.sandbox.Execute(ctx, sandbox, code, "python")
if err != nil {
return nil, fmt.Errorf("sandbox execution failed: %w", err)
}
output := map[string]interface{}{
"stdout": result.Stdout,
"stderr": result.Stderr,
"exitCode": result.ExitCode,
"duration": result.Duration.String(),
}
var artifacts []Artifact
for name, content := range result.Files {
artifacts = append(artifacts, Artifact{
ID: uuid.New().String(),
TaskID: task.ID,
Type: ArtifactTypeFile,
Name: name,
Content: content,
Size: int64(len(content)),
CreatedAt: time.Now(),
})
}
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
Artifacts: artifacts,
}, nil
}
func (e *Executor) executeReport(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`You are a report writer. Generate a comprehensive report:
Task: %s
Data/Context:
%s
Create a well-structured report with:
1. Executive Summary
2. Key Findings
3. Detailed Analysis
4. Conclusions
5. Recommendations
Use markdown formatting.`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 8192},
})
if err != nil {
return nil, err
}
output := map[string]interface{}{
"report": response,
"format": "markdown",
"wordCount": len(strings.Fields(response)),
}
artifacts := []Artifact{
{
ID: uuid.New().String(),
TaskID: task.ID,
Type: ArtifactTypeReport,
Name: "report.md",
Content: []byte(response),
MimeType: "text/markdown",
Size: int64(len(response)),
CreatedAt: time.Now(),
},
}
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
Artifacts: artifacts,
}, nil
}
func (e *Executor) executeCommunicate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`Generate a message/notification:
Task: %s
Context:
%s
Create an appropriate message. Respond in JSON:
{
"subject": "...",
"body": "...",
"format": "text|html",
"priority": "low|normal|high"
}`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 2048},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
output["status"] = "prepared"
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeTransform(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`Transform data as requested:
Task: %s
Input data:
%s
Perform the transformation and return the result in JSON:
{
"transformed": ...,
"format": "...",
"changes": ["change 1", "change 2"]
}`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 4096},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeValidate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`Validate the following:
Task: %s
Data to validate:
%s
Check for:
1. Correctness
2. Completeness
3. Consistency
4. Quality
Respond in JSON:
{
"valid": true|false,
"score": 0-100,
"issues": ["issue 1", "issue 2"],
"suggestions": ["suggestion 1"],
"summary": "..."
}`, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 2048},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func (e *Executor) executeGeneric(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) {
inputJSON, _ := json.Marshal(task.Input)
prompt := fmt.Sprintf(`Complete this task:
Task type: %s
Description: %s
Context:
%s
Provide a comprehensive result in JSON format.`, task.Type, task.Description, string(inputJSON))
response, err := client.GenerateText(ctx, llm.StreamRequest{
Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}},
Options: llm.StreamOptions{MaxTokens: 4096},
})
if err != nil {
return nil, err
}
output := parseJSONOutput(response)
if len(output) == 0 {
output = map[string]interface{}{
"result": response,
}
}
return &ExecutionResult{
TaskID: task.ID,
SubTaskID: task.ID,
Output: output,
}, nil
}
func parseJSONOutput(response string) map[string]interface{} {
startIdx := strings.Index(response, "{")
endIdx := strings.LastIndex(response, "}")
if startIdx == -1 || endIdx == -1 || endIdx <= startIdx {
return map[string]interface{}{"raw": response}
}
jsonStr := response[startIdx : endIdx+1]
var output map[string]interface{}
if err := json.Unmarshal([]byte(jsonStr), &output); err != nil {
return map[string]interface{}{"raw": response}
}
return output
}