feat: Go backend, enhanced search, new widgets, Docker deploy
Major changes: - Add Go backend (backend/) with microservices architecture - Enhanced master-agents-svc: reranker, content-classifier, stealth-crawler, proxy-manager, media-search, fastClassifier, language detection - New web-svc widgets: KnowledgeCard, ProductCard, ProfileCard, VideoCard, UnifiedCard, CardGallery, InlineImageGallery, SourcesPanel, RelatedQuestions - Improved discover-svc with discover-db integration - Docker deployment improvements (Caddyfile, vendor.sh, BUILD.md) - Library-svc: project_id schema migration - Remove deprecated finance-svc and travel-svc - Localization improvements across services Made-with: Cursor
This commit is contained in:
127
backend/internal/prompts/researcher.go
Normal file
127
backend/internal/prompts/researcher.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package prompts
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ResearcherConfig struct {
|
||||
AvailableActions string
|
||||
Mode string
|
||||
Iteration int
|
||||
MaxIterations int
|
||||
Locale string
|
||||
DetectedLanguage string
|
||||
IsArticleSummary bool
|
||||
}
|
||||
|
||||
func GetResearcherPrompt(cfg ResearcherConfig) string {
|
||||
var sb strings.Builder
|
||||
|
||||
sb.WriteString("You are a research agent for GooSeek, an AI search engine.\n\n")
|
||||
|
||||
sb.WriteString("## Your Role\n\n")
|
||||
sb.WriteString("You gather information to answer user queries by:\n")
|
||||
sb.WriteString("1. Searching the web for relevant information\n")
|
||||
sb.WriteString("2. Scraping specific pages for detailed content\n")
|
||||
sb.WriteString("3. Deciding when you have enough information\n\n")
|
||||
|
||||
sb.WriteString("## Available Actions\n\n")
|
||||
sb.WriteString(cfg.AvailableActions)
|
||||
sb.WriteString("\n\n")
|
||||
|
||||
sb.WriteString("## Progress\n\n")
|
||||
sb.WriteString(fmt.Sprintf("Current iteration: %d / %d\n\n", cfg.Iteration+1, cfg.MaxIterations))
|
||||
|
||||
switch cfg.Mode {
|
||||
case "speed":
|
||||
sb.WriteString("## Speed Mode\n\n")
|
||||
sb.WriteString("- Perform ONE search and call done\n")
|
||||
sb.WriteString("- Do NOT scrape pages\n")
|
||||
sb.WriteString("- Use snippets from search results\n\n")
|
||||
case "balanced":
|
||||
sb.WriteString("## Balanced Mode\n\n")
|
||||
sb.WriteString("- Perform 1-3 searches\n")
|
||||
sb.WriteString("- Scrape top 3-5 relevant pages\n")
|
||||
sb.WriteString("- Balance depth vs. speed\n\n")
|
||||
case "quality":
|
||||
sb.WriteString("## Quality Mode\n\n")
|
||||
sb.WriteString("- Perform multiple searches with different queries\n")
|
||||
sb.WriteString("- Scrape 10-15 relevant pages\n")
|
||||
sb.WriteString("- Verify information across sources\n")
|
||||
sb.WriteString("- Be thorough and comprehensive\n\n")
|
||||
}
|
||||
|
||||
if cfg.IsArticleSummary {
|
||||
sb.WriteString("## Article Summary Task (Perplexity Discover-style)\n\n")
|
||||
sb.WriteString("The user requested an article summary (Summary: <url>). This is a multi-source digest request.\n\n")
|
||||
sb.WriteString("**Your goals:**\n")
|
||||
sb.WriteString("1. The main article is already pre-scraped and will be in context\n")
|
||||
sb.WriteString("2. Search for 3-5 related sources that provide context\n")
|
||||
sb.WriteString("3. Look for: related news, background, analysis, reactions\n")
|
||||
sb.WriteString("4. Use news categories: `news`, `science` engines\n")
|
||||
sb.WriteString("5. Max 5 additional sources (article itself is [1])\n\n")
|
||||
sb.WriteString("**Search strategy:**\n")
|
||||
sb.WriteString("- Extract key entities/topics from article title\n")
|
||||
sb.WriteString("- Search for recent news on those topics\n")
|
||||
sb.WriteString("- Find expert opinions or analysis\n")
|
||||
sb.WriteString("- Look for official statements if relevant\n\n")
|
||||
}
|
||||
|
||||
if cfg.DetectedLanguage == "ru" {
|
||||
sb.WriteString("## Language\n\n")
|
||||
sb.WriteString("Пользователь пишет на русском. Формулируй поисковые запросы на русском языке.\n\n")
|
||||
}
|
||||
|
||||
sb.WriteString("## Instructions\n\n")
|
||||
sb.WriteString("1. Analyze the user's query and conversation history\n")
|
||||
sb.WriteString("2. Plan what information you need to gather\n")
|
||||
sb.WriteString("3. Execute actions to gather that information\n")
|
||||
sb.WriteString("4. Call 'done' when you have sufficient information\n\n")
|
||||
|
||||
sb.WriteString("## Important Rules\n\n")
|
||||
sb.WriteString("- Always start with __reasoning_preamble to explain your plan\n")
|
||||
sb.WriteString("- Formulate specific, targeted search queries\n")
|
||||
sb.WriteString("- Avoid redundant searches\n")
|
||||
sb.WriteString("- Call 'done' when information is sufficient\n")
|
||||
sb.WriteString("- Don't exceed the iteration limit\n\n")
|
||||
|
||||
sb.WriteString("Now analyze the conversation and execute the appropriate actions.")
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func GetAvailableActionsDescription() string {
|
||||
return strings.TrimSpace(`
|
||||
### __reasoning_preamble
|
||||
Use this first to explain your research plan.
|
||||
Arguments:
|
||||
- plan (string): Your reasoning about what to search for
|
||||
|
||||
### web_search
|
||||
Search the web for information.
|
||||
Arguments:
|
||||
- query (string): Search query
|
||||
- engines (array, optional): Specific search engines to use
|
||||
|
||||
### academic_search
|
||||
Search academic/scientific sources.
|
||||
Arguments:
|
||||
- query (string): Academic search query
|
||||
|
||||
### social_search
|
||||
Search social media and forums.
|
||||
Arguments:
|
||||
- query (string): Social search query
|
||||
|
||||
### scrape_url
|
||||
Fetch and extract content from a specific URL.
|
||||
Arguments:
|
||||
- url (string): URL to scrape
|
||||
|
||||
### done
|
||||
Signal that research is complete.
|
||||
Arguments:
|
||||
- reason (string): Why research is sufficient
|
||||
`)
|
||||
}
|
||||
Reference in New Issue
Block a user