From 06fe57c76527f48c94897cc209472589bf91cdbe Mon Sep 17 00:00:00 2001 From: home Date: Fri, 27 Feb 2026 04:15:32 +0300 Subject: [PATCH] feat: Go backend, enhanced search, new widgets, Docker deploy Major changes: - Add Go backend (backend/) with microservices architecture - Enhanced master-agents-svc: reranker, content-classifier, stealth-crawler, proxy-manager, media-search, fastClassifier, language detection - New web-svc widgets: KnowledgeCard, ProductCard, ProfileCard, VideoCard, UnifiedCard, CardGallery, InlineImageGallery, SourcesPanel, RelatedQuestions - Improved discover-svc with discover-db integration - Docker deployment improvements (Caddyfile, vendor.sh, BUILD.md) - Library-svc: project_id schema migration - Remove deprecated finance-svc and travel-svc - Localization improvements across services Made-with: Cursor --- .env.example | 163 +- .gitignore | 5 +- CONTINUE.md | 943 +++- TEST_RESULTS.md | 280 ++ api-1.json | 2896 +++++++++++ backend/.env.computer-svc | 65 + backend/Makefile | 86 + backend/README.md | 257 + backend/cmd/agent-svc/main.go | 209 + backend/cmd/api-gateway/main.go | 232 + backend/cmd/browser-svc/main.go | 53 + backend/cmd/chat-svc/main.go | 235 + backend/cmd/collection-svc/main.go | 345 ++ backend/cmd/computer-svc/main.go | 552 +++ backend/cmd/discover-svc/main.go | 511 ++ backend/cmd/file-svc/main.go | 307 ++ backend/cmd/finance-heatmap-svc/main.go | 139 + backend/cmd/labs-svc/main.go | 553 +++ backend/cmd/learning-svc/main.go | 404 ++ backend/cmd/llm-svc/main.go | 191 + backend/cmd/podcast-svc/main.go | 424 ++ backend/cmd/scraper-svc/main.go | 284 ++ backend/cmd/search-svc/main.go | 143 + backend/cmd/thread-svc/main.go | 624 +++ backend/deploy/docker/Dockerfile.agent-svc | 31 + backend/deploy/docker/Dockerfile.all | 50 + backend/deploy/docker/Dockerfile.api-gateway | 32 + backend/deploy/docker/Dockerfile.chat-svc | 31 + backend/deploy/docker/Dockerfile.computer-svc | 32 + backend/deploy/docker/Dockerfile.discover-svc | 31 + backend/deploy/docker/Dockerfile.search-svc | 31 + backend/deploy/docker/docker-compose.yml | 382 ++ backend/deploy/k8s/agent-svc.yaml | 68 + backend/deploy/k8s/api-gateway.yaml | 68 + backend/deploy/k8s/chat-svc.yaml | 68 + backend/deploy/k8s/collection-svc.yaml | 68 + backend/deploy/k8s/computer-svc.yaml | 137 + backend/deploy/k8s/configmap.yaml | 32 + backend/deploy/k8s/deploy.sh | 56 + backend/deploy/k8s/discover-svc.yaml | 68 + backend/deploy/k8s/file-svc.yaml | 90 + backend/deploy/k8s/ingress.yaml | 51 + backend/deploy/k8s/kustomization.yaml | 30 + backend/deploy/k8s/llm-svc.yaml | 68 + backend/deploy/k8s/namespace.yaml | 7 + backend/deploy/k8s/postgres.yaml | 86 + backend/deploy/k8s/redis.yaml | 63 + backend/deploy/k8s/scraper-svc.yaml | 66 + backend/deploy/k8s/search-svc.yaml | 66 + backend/deploy/k8s/thread-svc.yaml | 63 + backend/go.mod | 43 + backend/go.sum | 88 + backend/internal/agent/classifier.go | 233 + backend/internal/agent/deep_research.go | 543 ++ backend/internal/agent/focus_modes.go | 293 ++ backend/internal/agent/orchestrator.go | 950 ++++ backend/internal/agent/researcher.go | 128 + backend/internal/computer/browser/browser.go | 587 +++ backend/internal/computer/browser/server.go | 555 +++ backend/internal/computer/computer.go | 738 +++ .../internal/computer/connectors/connector.go | 104 + backend/internal/computer/connectors/email.go | 215 + .../internal/computer/connectors/storage.go | 432 ++ .../internal/computer/connectors/telegram.go | 263 + .../internal/computer/connectors/webhook.go | 275 + backend/internal/computer/executor.go | 574 +++ backend/internal/computer/memory.go | 377 ++ backend/internal/computer/planner.go | 371 ++ backend/internal/computer/router.go | 244 + backend/internal/computer/sandbox.go | 431 ++ backend/internal/computer/scheduler.go | 386 ++ backend/internal/computer/types.go | 376 ++ backend/internal/db/article_summary_repo.go | 97 + backend/internal/db/collection_repo.go | 204 + backend/internal/db/computer_artifact_repo.go | 322 ++ backend/internal/db/computer_memory_repo.go | 306 ++ backend/internal/db/computer_task_repo.go | 411 ++ backend/internal/db/digest_repo.go | 177 + backend/internal/db/file_repo.go | 149 + backend/internal/db/memory_repo.go | 170 + backend/internal/db/page_repo.go | 219 + backend/internal/db/postgres.go | 134 + backend/internal/db/space_repo.go | 163 + backend/internal/db/thread_repo.go | 270 + backend/internal/db/user_interests_repo.go | 323 ++ backend/internal/discover/personalization.go | 691 +++ backend/internal/files/analyzer.go | 343 ++ backend/internal/finance/heatmap.go | 537 ++ backend/internal/labs/generator.go | 759 +++ backend/internal/labs/types.go | 335 ++ backend/internal/learning/stepper.go | 701 +++ backend/internal/llm/anthropic.go | 182 + backend/internal/llm/client.go | 145 + backend/internal/llm/gemini.go | 193 + backend/internal/llm/openai.go | 166 + backend/internal/llm/registry.go | 229 + backend/internal/llm/timeweb.go | 402 ++ backend/internal/pages/generator.go | 318 ++ backend/internal/podcast/generator.go | 507 ++ backend/internal/prompts/classifier.go | 50 + backend/internal/prompts/researcher.go | 127 + backend/internal/prompts/writer.go | 146 + backend/internal/search/media.go | 215 + backend/internal/search/reranker.go | 163 + backend/internal/search/searxng.go | 177 + backend/internal/session/manager.go | 183 + backend/internal/types/blocks.go | 102 + backend/internal/types/chunks.go | 75 + backend/internal/types/widgets.go | 145 + backend/pkg/cache/redis.go | 183 + backend/pkg/config/config.go | 173 + backend/pkg/middleware/auth.go | 50 + backend/pkg/middleware/jwt.go | 280 ++ backend/pkg/middleware/logging.go | 49 + backend/pkg/middleware/ratelimit.go | 106 + backend/pkg/middleware/ratelimit_redis.go | 213 + backend/pkg/ndjson/writer.go | 113 + backend/webui/.env.example | 7 + backend/webui/Dockerfile | 44 + backend/webui/next-env.d.ts | 5 + backend/webui/next.config.mjs | 18 + backend/webui/package-lock.json | 4408 +++++++++++++++++ backend/webui/package.json | 38 + backend/webui/postcss.config.js | 6 + backend/webui/public/logo.svg | 12 + .../webui/src/app/(main)/computer/page.tsx | 752 +++ .../webui/src/app/(main)/discover/page.tsx | 153 + backend/webui/src/app/(main)/finance/page.tsx | 265 + backend/webui/src/app/(main)/history/page.tsx | 246 + backend/webui/src/app/(main)/layout.tsx | 86 + .../src/app/(main)/learning/new/page.tsx | 201 + .../webui/src/app/(main)/learning/page.tsx | 218 + backend/webui/src/app/(main)/page.tsx | 108 + .../webui/src/app/(main)/settings/page.tsx | 172 + .../src/app/(main)/spaces/[id]/edit/page.tsx | 248 + .../webui/src/app/(main)/spaces/new/page.tsx | 162 + backend/webui/src/app/(main)/spaces/page.tsx | 205 + backend/webui/src/app/globals.css | 522 ++ backend/webui/src/app/layout.tsx | 19 + backend/webui/src/components/ChatInput.tsx | 208 + backend/webui/src/components/ChatMessage.tsx | 275 + backend/webui/src/components/Citation.tsx | 146 + backend/webui/src/components/DiscoverCard.tsx | 152 + backend/webui/src/components/Sidebar.tsx | 198 + backend/webui/src/components/index.ts | 5 + backend/webui/src/lib/api.ts | 579 +++ backend/webui/src/lib/hooks/useChat.ts | 177 + backend/webui/src/lib/types.ts | 441 ++ backend/webui/src/lib/utils.ts | 54 + backend/webui/tailwind.config.ts | 165 + backend/webui/tsconfig.json | 26 + deploy/docker/BUILD.md | 34 + deploy/docker/Caddyfile | 33 +- deploy/docker/Dockerfile.caddy | 7 + deploy/docker/docker-compose.yml | 475 +- deploy/docker/run.sh | 172 +- deploy/docker/searxng/settings.yml | 102 +- deploy/docker/vendor.sh | 99 + package-lock.json | 106 +- services/api-gateway/Dockerfile | 8 +- services/api-gateway/src/index.ts | 6 +- services/audit-svc/Dockerfile | 7 +- services/auth-svc/Dockerfile | 8 +- services/billing-svc/Dockerfile | 7 +- services/cache-worker/Dockerfile | 7 +- services/cache-worker/package.json | 4 +- .../cache-worker/src/lib/crawl4ai-client.ts | 154 + services/cache-worker/src/run.ts | 164 +- services/cache-worker/src/tasks/discover.ts | 529 +- services/chat-svc/Dockerfile | 7 +- services/chat-svc/src/index.ts | 106 + services/create-svc/Dockerfile | 7 +- services/discover-svc/Dockerfile | 9 +- services/discover-svc/package.json | 4 +- services/discover-svc/src/discover-db.ts | 276 ++ services/discover-svc/src/index.ts | 725 ++- services/discover-svc/src/searxng.ts | 2 + services/finance-svc/Dockerfile | 15 - services/finance-svc/package.json | 21 - services/finance-svc/src/index.ts | 419 -- services/finance-svc/tsconfig.json | 13 - services/geo-device-svc/Dockerfile | 7 +- services/geo-device-svc/package.json | 1 + services/library-svc/Dockerfile | 8 +- .../library-svc/drizzle/0002_project_id.sql | 2 + services/library-svc/src/db/push.ts | 2 + services/library-svc/src/db/schema.ts | 1 + services/library-svc/src/index.ts | 33 +- services/llm-svc/Dockerfile | 8 +- services/localization-svc/Dockerfile | 7 +- services/localization-svc/package.json | 1 + services/master-agents-svc/Dockerfile | 7 +- services/master-agents-svc/package.json | 3 + services/master-agents-svc/src/index.ts | 1 + .../src/lib/actions/__reasoning_preamble.ts | 2 +- .../src/lib/actions/academic_search.ts | 26 +- .../master-agents-svc/src/lib/actions/done.ts | 2 +- .../src/lib/actions/registry.ts | 4 +- .../src/lib/actions/scrape_url.ts | 118 +- .../src/lib/actions/social_search.ts | 26 +- .../src/lib/actions/types.ts | 10 +- .../src/lib/actions/web_search.ts | 122 +- .../src/lib/agent/classifier.ts | 3 +- .../src/lib/agent/fastClassifier.ts | 109 + .../src/lib/agent/researcher.ts | 115 +- .../src/lib/agent/searchOrchestrator.ts | 498 +- .../src/lib/content-classifier.ts | 515 ++ .../src/lib/crawl4ai-client.ts | 292 ++ .../master-agents-svc/src/lib/media-search.ts | 176 + .../src/lib/prompts/classifier.ts | 4 +- .../src/lib/prompts/detectLanguage.ts | 29 + .../src/lib/prompts/locale.ts | 30 +- .../src/lib/prompts/master.ts | 4 +- .../src/lib/prompts/researcher.ts | 30 +- .../src/lib/prompts/writer.ts | 29 +- .../src/lib/proxy-manager.ts | 324 ++ .../master-agents-svc/src/lib/reranker.ts | 314 ++ services/master-agents-svc/src/lib/searxng.ts | 51 +- .../src/lib/stealth-crawler.ts | 394 ++ .../master-agents-svc/src/lib/tools/scrape.ts | 80 +- .../src/lib/types/widgets.ts | 414 ++ services/media-svc/DEPRECATED.md | 24 + services/memory-svc/Dockerfile | 7 +- services/notifications-svc/Dockerfile | 7 +- services/profile-svc/Dockerfile | 7 +- services/projects-svc/Dockerfile | 7 +- services/projects-svc/src/index.ts | 99 +- services/search-svc/Dockerfile | 7 +- services/search-svc/src/index.ts | 4 +- services/suggestions-svc/package.json | 1 + .../src/lib/agents/suggestions.ts | 9 +- .../src/lib/prompts/detectLanguage.ts | 29 + .../suggestions-svc/src/lib/prompts/locale.ts | 34 +- .../src/lib/prompts/suggestions.ts | 4 +- services/travel-svc/Dockerfile | 15 - services/travel-svc/package.json | 21 - services/travel-svc/src/index.ts | 270 - services/travel-svc/tsconfig.json | 13 - services/web-svc/Dockerfile | 6 +- services/web-svc/next.config.mjs | 2 +- services/web-svc/src/app/api/chat/route.ts | 85 + services/web-svc/src/app/discover/page.tsx | 408 +- services/web-svc/src/app/layout.tsx | 6 +- services/web-svc/src/app/page.tsx | 13 +- services/web-svc/src/app/profile/page.tsx | 2 +- .../app/spaces/[projectId]/SpaceCarousels.tsx | 47 + .../spaces/[projectId]/chat/[chatId]/page.tsx | 33 + .../src/app/spaces/[projectId]/page.tsx | 583 +++ services/web-svc/src/app/spaces/page.tsx | 181 +- .../web-svc/src/components/AssistantSteps.tsx | 143 +- services/web-svc/src/components/Chat.tsx | 6 +- .../web-svc/src/components/ChatWindow.tsx | 25 +- .../src/components/Discover/MajorNewsCard.tsx | 101 +- .../src/components/Discover/SmallNewsCard.tsx | 76 +- services/web-svc/src/components/EmptyChat.tsx | 4 - .../src/components/GuestWarningBanner.tsx | 20 +- .../web-svc/src/components/MessageBox.tsx | 36 +- .../MessageInputActions/InputBarPlus.tsx | 30 +- .../components/MessageRenderer/Citation.tsx | 164 +- .../src/components/NewsArticleWidget.tsx | 25 +- .../src/components/RelatedQuestions.tsx | 194 + services/web-svc/src/components/Sidebar.tsx | 8 +- .../web-svc/src/components/SourcesPanel.tsx | 264 + .../web-svc/src/components/WeatherWidget.tsx | 32 +- .../src/components/Widgets/CardGallery.tsx | 203 + .../components/Widgets/InlineImageGallery.tsx | 266 + .../src/components/Widgets/KnowledgeCard.tsx | 464 ++ .../src/components/Widgets/ProductCard.tsx | 231 + .../src/components/Widgets/ProfileCard.tsx | 216 + .../src/components/Widgets/PromoCard.tsx | 245 + .../src/components/Widgets/Renderer.tsx | 300 +- .../src/components/Widgets/UnifiedCard.tsx | 229 + .../src/components/Widgets/VideoCard.tsx | 234 + .../src/components/Widgets/VideoEmbed.tsx | 250 + services/web-svc/src/lib/guest-storage.ts | 7 + services/web-svc/src/lib/hooks/useChat.tsx | 123 +- services/web-svc/src/lib/localization.ts | 83 +- .../web-svc/src/lib/localization/context.tsx | 191 +- services/web-svc/src/lib/project-files-db.ts | 103 + services/web-svc/src/lib/project-storage.ts | 79 + services/web-svc/src/lib/types.ts | 21 +- services/web-svc/tsconfig.tsbuildinfo | 2 +- test-chat-proper.sh | 70 + test-chat-via-webui.sh | 58 + test-chat.sh | 42 + 285 files changed, 53132 insertions(+), 1871 deletions(-) create mode 100644 TEST_RESULTS.md create mode 100644 api-1.json create mode 100644 backend/.env.computer-svc create mode 100644 backend/Makefile create mode 100644 backend/README.md create mode 100644 backend/cmd/agent-svc/main.go create mode 100644 backend/cmd/api-gateway/main.go create mode 100644 backend/cmd/browser-svc/main.go create mode 100644 backend/cmd/chat-svc/main.go create mode 100644 backend/cmd/collection-svc/main.go create mode 100644 backend/cmd/computer-svc/main.go create mode 100644 backend/cmd/discover-svc/main.go create mode 100644 backend/cmd/file-svc/main.go create mode 100644 backend/cmd/finance-heatmap-svc/main.go create mode 100644 backend/cmd/labs-svc/main.go create mode 100644 backend/cmd/learning-svc/main.go create mode 100644 backend/cmd/llm-svc/main.go create mode 100644 backend/cmd/podcast-svc/main.go create mode 100644 backend/cmd/scraper-svc/main.go create mode 100644 backend/cmd/search-svc/main.go create mode 100644 backend/cmd/thread-svc/main.go create mode 100644 backend/deploy/docker/Dockerfile.agent-svc create mode 100644 backend/deploy/docker/Dockerfile.all create mode 100644 backend/deploy/docker/Dockerfile.api-gateway create mode 100644 backend/deploy/docker/Dockerfile.chat-svc create mode 100644 backend/deploy/docker/Dockerfile.computer-svc create mode 100644 backend/deploy/docker/Dockerfile.discover-svc create mode 100644 backend/deploy/docker/Dockerfile.search-svc create mode 100644 backend/deploy/docker/docker-compose.yml create mode 100644 backend/deploy/k8s/agent-svc.yaml create mode 100644 backend/deploy/k8s/api-gateway.yaml create mode 100644 backend/deploy/k8s/chat-svc.yaml create mode 100644 backend/deploy/k8s/collection-svc.yaml create mode 100644 backend/deploy/k8s/computer-svc.yaml create mode 100644 backend/deploy/k8s/configmap.yaml create mode 100755 backend/deploy/k8s/deploy.sh create mode 100644 backend/deploy/k8s/discover-svc.yaml create mode 100644 backend/deploy/k8s/file-svc.yaml create mode 100644 backend/deploy/k8s/ingress.yaml create mode 100644 backend/deploy/k8s/kustomization.yaml create mode 100644 backend/deploy/k8s/llm-svc.yaml create mode 100644 backend/deploy/k8s/namespace.yaml create mode 100644 backend/deploy/k8s/postgres.yaml create mode 100644 backend/deploy/k8s/redis.yaml create mode 100644 backend/deploy/k8s/scraper-svc.yaml create mode 100644 backend/deploy/k8s/search-svc.yaml create mode 100644 backend/deploy/k8s/thread-svc.yaml create mode 100644 backend/go.mod create mode 100644 backend/go.sum create mode 100644 backend/internal/agent/classifier.go create mode 100644 backend/internal/agent/deep_research.go create mode 100644 backend/internal/agent/focus_modes.go create mode 100644 backend/internal/agent/orchestrator.go create mode 100644 backend/internal/agent/researcher.go create mode 100644 backend/internal/computer/browser/browser.go create mode 100644 backend/internal/computer/browser/server.go create mode 100644 backend/internal/computer/computer.go create mode 100644 backend/internal/computer/connectors/connector.go create mode 100644 backend/internal/computer/connectors/email.go create mode 100644 backend/internal/computer/connectors/storage.go create mode 100644 backend/internal/computer/connectors/telegram.go create mode 100644 backend/internal/computer/connectors/webhook.go create mode 100644 backend/internal/computer/executor.go create mode 100644 backend/internal/computer/memory.go create mode 100644 backend/internal/computer/planner.go create mode 100644 backend/internal/computer/router.go create mode 100644 backend/internal/computer/sandbox.go create mode 100644 backend/internal/computer/scheduler.go create mode 100644 backend/internal/computer/types.go create mode 100644 backend/internal/db/article_summary_repo.go create mode 100644 backend/internal/db/collection_repo.go create mode 100644 backend/internal/db/computer_artifact_repo.go create mode 100644 backend/internal/db/computer_memory_repo.go create mode 100644 backend/internal/db/computer_task_repo.go create mode 100644 backend/internal/db/digest_repo.go create mode 100644 backend/internal/db/file_repo.go create mode 100644 backend/internal/db/memory_repo.go create mode 100644 backend/internal/db/page_repo.go create mode 100644 backend/internal/db/postgres.go create mode 100644 backend/internal/db/space_repo.go create mode 100644 backend/internal/db/thread_repo.go create mode 100644 backend/internal/db/user_interests_repo.go create mode 100644 backend/internal/discover/personalization.go create mode 100644 backend/internal/files/analyzer.go create mode 100644 backend/internal/finance/heatmap.go create mode 100644 backend/internal/labs/generator.go create mode 100644 backend/internal/labs/types.go create mode 100644 backend/internal/learning/stepper.go create mode 100644 backend/internal/llm/anthropic.go create mode 100644 backend/internal/llm/client.go create mode 100644 backend/internal/llm/gemini.go create mode 100644 backend/internal/llm/openai.go create mode 100644 backend/internal/llm/registry.go create mode 100644 backend/internal/llm/timeweb.go create mode 100644 backend/internal/pages/generator.go create mode 100644 backend/internal/podcast/generator.go create mode 100644 backend/internal/prompts/classifier.go create mode 100644 backend/internal/prompts/researcher.go create mode 100644 backend/internal/prompts/writer.go create mode 100644 backend/internal/search/media.go create mode 100644 backend/internal/search/reranker.go create mode 100644 backend/internal/search/searxng.go create mode 100644 backend/internal/session/manager.go create mode 100644 backend/internal/types/blocks.go create mode 100644 backend/internal/types/chunks.go create mode 100644 backend/internal/types/widgets.go create mode 100644 backend/pkg/cache/redis.go create mode 100644 backend/pkg/config/config.go create mode 100644 backend/pkg/middleware/auth.go create mode 100644 backend/pkg/middleware/jwt.go create mode 100644 backend/pkg/middleware/logging.go create mode 100644 backend/pkg/middleware/ratelimit.go create mode 100644 backend/pkg/middleware/ratelimit_redis.go create mode 100644 backend/pkg/ndjson/writer.go create mode 100644 backend/webui/.env.example create mode 100644 backend/webui/Dockerfile create mode 100644 backend/webui/next-env.d.ts create mode 100644 backend/webui/next.config.mjs create mode 100644 backend/webui/package-lock.json create mode 100644 backend/webui/package.json create mode 100644 backend/webui/postcss.config.js create mode 100644 backend/webui/public/logo.svg create mode 100644 backend/webui/src/app/(main)/computer/page.tsx create mode 100644 backend/webui/src/app/(main)/discover/page.tsx create mode 100644 backend/webui/src/app/(main)/finance/page.tsx create mode 100644 backend/webui/src/app/(main)/history/page.tsx create mode 100644 backend/webui/src/app/(main)/layout.tsx create mode 100644 backend/webui/src/app/(main)/learning/new/page.tsx create mode 100644 backend/webui/src/app/(main)/learning/page.tsx create mode 100644 backend/webui/src/app/(main)/page.tsx create mode 100644 backend/webui/src/app/(main)/settings/page.tsx create mode 100644 backend/webui/src/app/(main)/spaces/[id]/edit/page.tsx create mode 100644 backend/webui/src/app/(main)/spaces/new/page.tsx create mode 100644 backend/webui/src/app/(main)/spaces/page.tsx create mode 100644 backend/webui/src/app/globals.css create mode 100644 backend/webui/src/app/layout.tsx create mode 100644 backend/webui/src/components/ChatInput.tsx create mode 100644 backend/webui/src/components/ChatMessage.tsx create mode 100644 backend/webui/src/components/Citation.tsx create mode 100644 backend/webui/src/components/DiscoverCard.tsx create mode 100644 backend/webui/src/components/Sidebar.tsx create mode 100644 backend/webui/src/components/index.ts create mode 100644 backend/webui/src/lib/api.ts create mode 100644 backend/webui/src/lib/hooks/useChat.ts create mode 100644 backend/webui/src/lib/types.ts create mode 100644 backend/webui/src/lib/utils.ts create mode 100644 backend/webui/tailwind.config.ts create mode 100644 backend/webui/tsconfig.json create mode 100644 deploy/docker/BUILD.md create mode 100644 deploy/docker/Dockerfile.caddy create mode 100755 deploy/docker/vendor.sh create mode 100644 services/cache-worker/src/lib/crawl4ai-client.ts create mode 100644 services/discover-svc/src/discover-db.ts delete mode 100644 services/finance-svc/Dockerfile delete mode 100644 services/finance-svc/package.json delete mode 100644 services/finance-svc/src/index.ts delete mode 100644 services/finance-svc/tsconfig.json create mode 100644 services/library-svc/drizzle/0002_project_id.sql create mode 100644 services/master-agents-svc/src/lib/agent/fastClassifier.ts create mode 100644 services/master-agents-svc/src/lib/content-classifier.ts create mode 100644 services/master-agents-svc/src/lib/crawl4ai-client.ts create mode 100644 services/master-agents-svc/src/lib/media-search.ts create mode 100644 services/master-agents-svc/src/lib/prompts/detectLanguage.ts create mode 100644 services/master-agents-svc/src/lib/proxy-manager.ts create mode 100644 services/master-agents-svc/src/lib/reranker.ts create mode 100644 services/master-agents-svc/src/lib/stealth-crawler.ts create mode 100644 services/master-agents-svc/src/lib/types/widgets.ts create mode 100644 services/media-svc/DEPRECATED.md create mode 100644 services/suggestions-svc/src/lib/prompts/detectLanguage.ts delete mode 100644 services/travel-svc/Dockerfile delete mode 100644 services/travel-svc/package.json delete mode 100644 services/travel-svc/src/index.ts delete mode 100644 services/travel-svc/tsconfig.json create mode 100644 services/web-svc/src/app/api/chat/route.ts create mode 100644 services/web-svc/src/app/spaces/[projectId]/SpaceCarousels.tsx create mode 100644 services/web-svc/src/app/spaces/[projectId]/chat/[chatId]/page.tsx create mode 100644 services/web-svc/src/app/spaces/[projectId]/page.tsx create mode 100644 services/web-svc/src/components/RelatedQuestions.tsx create mode 100644 services/web-svc/src/components/SourcesPanel.tsx create mode 100644 services/web-svc/src/components/Widgets/CardGallery.tsx create mode 100644 services/web-svc/src/components/Widgets/InlineImageGallery.tsx create mode 100644 services/web-svc/src/components/Widgets/KnowledgeCard.tsx create mode 100644 services/web-svc/src/components/Widgets/ProductCard.tsx create mode 100644 services/web-svc/src/components/Widgets/ProfileCard.tsx create mode 100644 services/web-svc/src/components/Widgets/PromoCard.tsx create mode 100644 services/web-svc/src/components/Widgets/UnifiedCard.tsx create mode 100644 services/web-svc/src/components/Widgets/VideoCard.tsx create mode 100644 services/web-svc/src/components/Widgets/VideoEmbed.tsx create mode 100644 services/web-svc/src/lib/project-files-db.ts create mode 100644 services/web-svc/src/lib/project-storage.ts create mode 100755 test-chat-proper.sh create mode 100755 test-chat-via-webui.sh create mode 100755 test-chat.sh diff --git a/.env.example b/.env.example index ba4fb7f..a596ab1 100644 --- a/.env.example +++ b/.env.example @@ -1,23 +1,148 @@ -# Конфиг для Kubernetes. Переменные задаются в ConfigMap/Secret (deploy/k3s). -# См. deploy/k3s/CONFIG.md +# ============================================ +# GooSeek — Environment Variables +# ============================================ +# Конфиг для Docker/Kubernetes. +# Копировать в .env и заполнить значения. -# === LLM (llm-svc-config ConfigMap, llm-credentials Secret) === -# LLM_PROVIDER=ollama | timeweb -# OLLAMA_BASE_URL=http://host.docker.internal:11434 # Docker Desktop -# OPENAI_API_KEY= # Secret llm-credentials -# TIMEWEB_AGENT_ACCESS_ID= # Secret -# TIMEWEB_API_KEY= # Secret +# ============================================ +# === БАЗА ДАННЫХ (ОБЯЗАТЕЛЬНО) === +# ============================================ -# === Auth (auth-svc.yaml env) === -# BETTER_AUTH_URL=http://app.gooseek.local # local -# BETTER_AUTH_URL=https://gooseek.ru # production -# TRUSTED_ORIGINS=... +DATABASE_URL=postgres://gooseek:password@postgres:5432/gooseek?sslmode=disable +REDIS_URL=redis://redis:6379 -# === API Gateway (api-gateway.yaml env) === -# ALLOWED_ORIGINS=http://app.gooseek.local # local -# ALLOWED_ORIGINS=https://gooseek.ru,... # production +# ============================================ +# === LLM ПРОВАЙДЕРЫ (минимум один обязателен) === +# ============================================ -# === Secrets (kubectl create secret) === -# db-credentials: postgresql://user:pass@host:5432/gooseek -# yookassa-credentials: shop_id, secret -# llm-credentials: openai-api-key, timeweb-agent-access-id, timeweb-api-key +# OpenAI (GPT-4o, GPT-4o-mini) +OPENAI_API_KEY=sk-xxx + +# Anthropic (Claude 3 Opus, Claude 3.5 Sonnet) — для reasoning/coding +ANTHROPIC_API_KEY=sk-ant-xxx + +# Google Gemini (Gemini 1.5 Pro, Flash) — для long context/research +GEMINI_API_KEY=xxx + +# Timeweb Cloud AI (альтернатива) +# LLM_PROVIDER=timeweb +# TIMEWEB_API_BASE_URL=https://api.timeweb.cloud +# TIMEWEB_AGENT_ACCESS_ID=xxx +# TIMEWEB_API_KEY=xxx + +# Ollama (локальная модель) +# LLM_PROVIDER=ollama +# OLLAMA_BASE_URL=http://host.docker.internal:11434 +# LLM_CHAT_MODEL=llama3 + +# Модели по умолчанию +DEFAULT_LLM_PROVIDER=openai +DEFAULT_LLM_MODEL=gpt-4o-mini + +# ============================================ +# === АУТЕНТИФИКАЦИЯ === +# ============================================ + +JWT_SECRET=your-secret-key-change-in-production +BETTER_AUTH_URL=https://gooseek.ru +AUTH_SVC_URL=http://auth-svc:3010 + +# ============================================ +# === СЕРВИСЫ (внутренние URL) === +# ============================================ + +# API Gateway +API_GATEWAY_URL=http://api-gateway:3015 +API_GATEWAY_PORT=3015 +ALLOWED_ORIGINS=https://gooseek.ru,http://localhost:3000 + +# Core Services +CHAT_SVC_URL=http://chat-svc:3005 +MASTER_AGENTS_SVC_URL=http://agent-svc:3018 +SEARCH_SVC_URL=http://search-svc:3001 +LLM_SVC_URL=http://llm-svc:3020 +SCRAPER_SVC_URL=http://scraper-svc:3021 +DISCOVER_SVC_URL=http://discover-svc:3002 +COLLECTION_SVC_URL=http://collection-svc:3025 +FILE_SVC_URL=http://file-svc:3026 +THREAD_SVC_URL=http://thread-svc:3027 +COMPUTER_SVC_URL=http://computer-svc:3030 +LIBRARY_SVC_URL=http://library-svc:3009 +MEMORY_SVC_URL=http://memory-svc:3028 + +# External Services +SEARXNG_URL=http://searxng:8080 +CRAWL4AI_URL=http://crawl4ai:11235 + +# ============================================ +# === COMPUTER SERVICE (Perplexity Computer) === +# ============================================ + +COMPUTER_SVC_PORT=3030 + +# Docker Sandbox (для исполнения кода) +SANDBOX_IMAGE=gooseek/sandbox:latest + +# --- Telegram Connector --- +TELEGRAM_BOT_TOKEN=123456789:AABBccDDeeFFggHHiiJJkkLLmmNNoo + +# --- Email Connector (SMTP) --- +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USERNAME=your@gmail.com +SMTP_PASSWORD=app-password +SMTP_FROM=your@gmail.com +SMTP_FROM_NAME=GooSeek Computer + +# --- S3/MinIO Storage Connector --- +S3_ENDPOINT=minio:9000 +S3_ACCESS_KEY=minioadmin +S3_SECRET_KEY=minioadmin +S3_BUCKET=gooseek-artifacts +S3_USE_SSL=false +S3_REGION=us-east-1 +S3_PUBLIC_URL=https://storage.gooseek.ru + +# ============================================ +# === DISCOVER SERVICE === +# ============================================ + +DISCOVER_DB_PATH=/data/discover_articles.db + +# ============================================ +# === GHOST CMS (Discover блог) === +# ============================================ + +GHOST_URL=http://ghost:2368 +GHOST_CONTENT_API_KEY=your-ghost-api-key +GHOST_DB_ROOT_PASSWORD=ghost_root +GHOST_DB_PASSWORD=ghost + +# Ghost Mail (2FA, приглашения) +GHOST_MAIL_USER=2factor@yourdomain.com +GHOST_MAIL_PASSWORD=mail-password +GHOST_MAIL_FROM=GooSeek <2factor@yourdomain.com> +GHOST_MAIL_PORT=465 + +# ============================================ +# === CACHE WORKER === +# ============================================ + +LLM_DISCOVER_PROVIDER=env +LLM_DISCOVER_MODEL=gpt-4o-mini + +# ============================================ +# === TIMEOUTS (ms) === +# ============================================ + +HTTP_TIMEOUT_MS=60000 +LLM_TIMEOUT_MS=120000 +SCRAPE_TIMEOUT_MS=25000 +SEARCH_TIMEOUT_MS=10000 + +# ============================================ +# === SECRETS (для K8s — создаются отдельно) === +# ============================================ +# kubectl create secret generic db-credentials --from-literal=DATABASE_URL=... +# kubectl create secret generic llm-credentials --from-literal=OPENAI_API_KEY=... +# kubectl create secret generic gooseek-secrets --from-env-file=.env diff --git a/.gitignore b/.gitignore index d4238ef..a05f212 100644 --- a/.gitignore +++ b/.gitignore @@ -44,4 +44,7 @@ certificates # SSL backup (приватные ключи не в репо) deploy/k3s/ssl/backup/* -!deploy/k3s/ssl/backup/.gitkeep \ No newline at end of file +!deploy/k3s/ssl/backup/.gitkeep + +# Vendor cache (npm + Docker images для оффлайн-билда) +vendor/ \ No newline at end of file diff --git a/CONTINUE.md b/CONTINUE.md index b1cc79a..f2d08f0 100644 --- a/CONTINUE.md +++ b/CONTINUE.md @@ -1,161 +1,810 @@ -# Недоделки — начать отсюда +# Статус проекта — ВСЁ РАБОТАЕТ -## Задача -Полная переделка сервиса GooSeek по документации docs/architecture (сервисная архитектура СОА, аналог Perplexity.ai). +## Последнее обновление: 27.02.2026 -## Статус: миграция завершена ✅ +### Исправлена ошибка 500 в learning-svc (27.02.2026) -## 2025-02: SQLite → library-svc, api-gateway -- **Удалён SQLite** (web-svc, chat-svc): chats, messages — локальная история -- **library-svc расширен:** thread_messages, GET threads/:id, POST/PATCH messages, export PDF/MD -- **chat-svc:** сохранение в library-svc (при auth), SQLite удалён -- **api-gateway:** новый сервис на порту 3015, прокси всех /api/* на сервисы -- **web-svc:** только UI, rewrites /api/* на api-gateway; config, providers — в chat-svc -- **media-svc (порт 3016):** images + videos — LLM rephrase, SearXNG/search-svc; api-gateway проксирует /api/images, /api/videos -- **suggestions-svc (порт 3017):** AI follow-up suggestions; api-gateway проксирует /api/suggestions -- **master-agents-svc (порт 3018):** Master Agent — адаптируется к задаче, автоматически выбирает инструменты (web_search, scrape_url, calculator, get_stock_quote, image_search) +**Проблема:** При создании урока `POST /api/v1/learning/lesson` возвращалась ошибка 500. -## Сделано (текущая сессия — web-svc только UI) -- **web-svc:** удалены папки agents, models, prompts, uploads, utils, config (server) — остался только UI -- **chat-svc:** GET/POST config, providers CRUD, models CRUD, POST /api/v1/uploads -- **travel-svc:** POST /api/v1/weather (proxy Open-Meteo) -- **api-gateway:** маршруты config, providers, uploads, weather -- **web-svc:** rewrites config/providers/weather → gateway; uploads — тонкий proxy к chat-svc; layout fetch config с gateway +**Причина:** `learning-svc` пытался использовать Timeweb JWT-токен (в переменной `OPENAI_API_KEY`) как ключ OpenAI API. Timeweb JWT имеет формат `eyJ...`, что не является валидным OpenAI ключом. -## Сделано (текущая сессия) -- **chat-svc → memory-svc:** при mode balanced/quality + Authorization — fetch GET /api/v1/memory, инжект memoryContext в writer prompt -- **Profile Personalize:** список AI Memory, Add (key+value), Delete; требует auth -- **web-svc chat proxy:** передача Authorization в chat-svc -- **useChat:** отправка Bearer token при запросе к /api/chat -- **My Connectors:** projects-svc GET/POST/DELETE /api/v1/connectors, Proxy, Profile UI (Google Drive, Dropbox coming soon) -- **Finance tabs:** Overview, Crypto, Gainers & Losers, Watchlist — finance-svc gainers/losers/crypto, localStorage watchlist -- **finance/[ticker] Add to watchlist:** кнопка ★, localStorage gooseek_finance_watchlist -- **Background Assistant (Max):** chat-svc POST/GET /api/v1/tasks (stub), web-svc proxy, ingress -- **finance-svc price-context:** GET /api/v1/finance/price-context/:ticker — LLM-синтез причины движения (OPENAI_API_KEY), fallback news+quote -- **Export thread:** GET /api/v1/library/threads/:id/export?format=pdf|md — web-svc route, данные из SQLite → create-svc -- **Navbar Export:** при наличии chatId — использует thread export API -- **create-svc POST /create:** table/dashboard — LLM генерация (gpt-4o-mini), image — 501 -- **Страница /finance/[ticker]:** блок Price movement context (summary или news) -- **/metrics:** travel-svc, library-svc, memory-svc, create-svc — gooseek_up gauge; K3s Prometheus аннотации -- **Finance heatmap:** страница /finance — блок S&P 500 Sector Heatmap, fetch /api/v1/finance/heatmap -- **Finance-svc heatmap:** fetch с FMP api/v3/sector-performance при FMP_API_KEY -- **Collections на /finance:** Popular Spaces for Finance Research — fetch /api/v1/collections?category=finance -- **deploy/k3s/cache-worker.yaml:** CronJob finance (2m), discover (15m), travel (4h), activeDeadlineSeconds 300/600/1200 -- **finance-svc:** fetchWithRetry для FMP API (3 попытки, backoff 500/1000/1500 ms) -- **docs/RUNBOOK.md:** Runbook оператора — health, Redis, cache-worker, типичные сбои, порты -- **Pro/Deep Search:** AssistantSteps — оценка времени ~30–90 sec -- **K3s Prometheus:** аннотации prometheus.io/scrape, port, path в chat, search, discover, finance -- **CORS:** ALLOWED_ORIGINS в chat-svc, search-svc, discover-svc, finance-svc -- **Медиа:** SearchImages, SearchVideos — timeout 15s, error + Retry -- **Prometheus /metrics:** chat-svc, search-svc, discover-svc, finance-svc (gooseek_up gauge) -- **UI/UX:** DataFetchError + Retry; timeout 15s + Retry для Discover, Finance, Travel -- **File upload:** Cancel, timeout 300s, error handling в Attach и AttachSmall -- **HPA:** travel-svc, memory-svc добавлены в hpa.yaml -- **GuestWarningBanner:** предупреждение гостям, beforeunload, CTA «Save to account» -- **Rate limit 429:** toast в useChat при 429 -- **/finance/predictions/[id]:** finance-svc stub API, страница (Polymarket coming soon) -- **deploy/k3s/hpa.yaml:** HPA для chat, search, discover, finance, travel, memory; PDB для chat, search -- **/spaces/templates:** projects-svc GET /api/v1/templates, страница, прокси, ingress -- **Health/ready probes:** web-svc `/api/health`, `/api/ready`; chat-svc `/ready`; K3s chat-svc readinessProbe → `/ready` -- **PWA:** @ducanh2912/next-pwa, Service worker (sw.js), offline fallback `/offline`, метаданные в layout, `next build --webpack` -- **Исправлен TS:** finance/[ticker] quote.high/quote.low optional -- **finance-svc:** GET /api/v1/finance/quote/:ticker (FMP quote) -- **Страница /finance/[ticker]:** котировка, новости, SEC filings -- **Страница /spaces:** список коллекций -- **Страница /collections/[id]:** детали коллекции -- **Sidebar:** Spaces, переводы nav.spaces -- **Удалён deprecated /api/discover** — дублировал discover-svc -- **Ghost** — опционально для Discover (topic=gooseek), см. docs -- **deploy/k3s:** search-svc.yaml, notifications-svc.yaml, auth-svc.yaml -- **Ingress:** /api/v1/search, /api/v1/notifications -- **discover-svc:** GHOST_URL, GHOST_CONTENT_API_KEY (optional Secret) -- **MIGRATION.md:** сборка образов search/auth/notifications +**Решение:** -## Сделано (ранее) -- Сервисы: discover, search, finance, travel, chat, memory, create, notifications, billing, auth, library, projects -- web-svc: UI + прокси к сервисам -- deploy/k3s: манифесты, ingress -- apps/ удалён — всё в services/ +1. **backend/cmd/learning-svc/main.go** — изменена логика инициализации LLM клиента: + - Приоритет 1: Timeweb Cloud AI (если `TIMEWEB_AGENT_ACCESS_ID` + `TIMEWEB_API_KEY` установлены) + - Приоритет 2: Anthropic (только если ключ не JWT) + - Приоритет 3: OpenAI (только если ключ не JWT) + - Добавлена функция `isJWT()` для детекции JWT-токенов -## Сделано (текущая сессия) -- **Patents page:** DataFetchError компонент вместо кастомного error div (консистентность с Discover, Finance, Travel) -- **Model Council (Max):** параллельный запуск 3 моделей → синтез ответа - - chat-svc: modelCouncil, councilModels в body; councilLlms → SearchAgent - - SearchAgent: runCouncilWritersAndSynthesis — 3× generateText параллельно, synthesis prompt, stream синтеза - - writer.ts: getSynthesisPrompt - - web-svc: body schema, proxy, локальный agent с councilLlms - - useChat: modelCouncil + councilModels из localStorage (fallback: chatModel × 3) - - InputBarPlus: переключатель «Model Council» (Max) +2. **backend/deploy/docker/docker-compose.yml** — добавлены env vars для learning-svc: + - `TIMEWEB_API_BASE_URL` + - `TIMEWEB_AGENT_ACCESS_ID` + - `TIMEWEB_API_KEY` + - `DEFAULT_LLM_MODEL` -## Сделано (последнее) -- **Input bar «+»:** меню режимов, источников, Learn, Create - - Кнопка «+» слева от Optimization — Popover с Mode (Quick/Pro/Deep), Sources (Web/Academic/Social), Step-by-step Learning, Create (подсказка) - - InputBarPlus в EmptyChatMessageInput -- **Inspiration Cards:** LLM в cache-worker (travel task) - - Курируемые темы → gpt-4o-mini → title+summary для 4 карточек - - Без OPENAI_API_KEY — fallback stub - - Redis travel:inspiration TTL 6h -- **create-svc image:** DALL·E 3 генерация изображений - - type: 'image' — вызов OpenAI /v1/images/generations (dall-e-3, 1024x1024, standard, vivid) - - Ответ: { type, url, b64?, format } - - Proxy timeout 120s для image -- **Step-by-step Learning:** learningMode в chat - - Preferences: switch «Step-by-step Learning» — объяснять пошагово, разбивать сложные концепции - - localStorage learningMode → body.learningMode → writer prompt block - - chat-svc, web-svc: learningMode в config, SearchAgentConfig, getWriterPrompt -- **Response preferences:** format, length, tone в chat - - Preferences: Response format (paragraph/bullets/outline), length (short/medium/long), tone (neutral/professional/casual/concise) - - localStorage → responsePrefs в body → writer prompt - - chat-svc, web-svc: responsePrefs в config и getWriterPrompt -- **travel-svc itinerary:** LLM-генерация маршрутов (gpt-4o-mini) - - POST /api/v1/travel/itinerary { query, days? } — Redis travel:itinerary:{hash} TTL 4h - - TravelStepper Route step: выбор длительности (1–14 дней), fetch itinerary, отображение по дням -- **Answer Mode: Travel (и finance, academic, writing):** вертикали ответа в чате - - chat-svc, web-svc: `answerMode` в body, SearchAgentConfig - - writer: travel/finance-специфичные блоки в системном промпте - - AnswerMode UI: селектор Standard | Travel | Finance | Academic | Writing | Focus - - URL: `?answerMode=travel` — автовыбор при переходе с /travel (карточки destinations) -- **Travel Stepper:** сохранение состояния между шагами (Поиск → Места → Маршрут → Отели → Билеты) - - travel-svc: POST/GET `/api/v1/travel/stepper/state`, Redis `travel:stepper:{sessionId}` TTL 24h - - TravelStepper компонент: модальное окно, шаги, persist в API + sessionStorage fallback - - Кнопка «Plan a trip» на /travel -- **NetworkPolicy:** `deploy/k3s/network-policies.yaml` — gooseek-allow-internal (inter-pod traffic) +**Результат:** +- `POST /api/v1/learning/lesson` → 200 (урок создаётся успешно) +- learning-svc использует Timeweb Cloud AI как LLM провайдер -## Сделано (profile-svc) -- **profile-svc (порт 3019):** личные данные и персонализация пользователя - - PostgreSQL: user_profiles (userId, displayName, avatarUrl, timezone, locale, profileData, preferences, personalization) - - GET/PATCH /api/v1/profile — требует Authorization Bearer - - Profile page: редактирование displayName, загрузка из profile-svc - - Settings: preferences и personalization сохраняются в profile-svc при auth (синхронизация между устройствами) - - api-gateway: маршрут /api/v1/profile → profile-svc - - deploy/k3s/profile-svc.yaml, deploy.config.yaml +--- -## llm-svc (порт 3020) -- **llm-svc:** единый сервис провайдеров и генерации LLM — Ollama, OpenAI, Timeweb, Gemini и др. -- API: GET/POST/PATCH/DELETE /api/v1/providers, GET/POST/DELETE /api/v1/providers/:id/models -- **Генерация:** POST /api/v1/generate, POST /api/v1/generate/stream, POST /api/v1/generate/object, POST /api/v1/embeddings -- GET /api/v1/providers/ui-config — UI-конфиг провайдеров для chat-svc -- **chat-svc:** всегда использует llm-svc (LLM_SVC_URL обязателен). LlmClient, EmbeddingClient — HTTP-клиенты. -- Папка models удалена из chat-svc — вся генерация через llm-svc. -- api-gateway: /api/v1/providers → llm-svc -- deploy: llm-svc.yaml, deploy.config.yaml (llm-svc: false по умолчанию) +### Исправлены API ошибки 503/404 (27.02.2026) -## master-agents-svc как единый оркестратор (2025-02) -- **master-agents-svc:** SearchOrchestrator — classify → widgets (weather, stock, calculation) → researcher (web_search, academic_search, social_search, scrape_url, done, __reasoning_preamble) → writer. POST /api/v1/agents/search (NDJSON stream). -- **Режимы:** Quick (speed, 2 итерации), Pro (balanced, 6, reasoning), Deep (quality, 25, reasoning). -- **Параллельные actions:** executeAll использует Promise.all для tool calls. -- **Reasoning:** __reasoning_preamble для Pro/Deep, эмит в research block subSteps. -- **chat-svc proxy:** MASTER_AGENTS_SVC_URL — при задании chat-svc проксирует /api/v1/chat на master-agents-svc. Model Council при прокси не поддерживается. -- **Осталось:** uploads_search (требует embedding + доступ к файлам), library persistence при прокси, suggestions после ответа. +**Проблема:** Frontend получал ошибки: +- `api/v1/spaces` - 503 Service Unavailable +- `api/v1/computer/tasks` - 404 Not Found +- `api/v1/computer/execute` - 404 Not Found -## 2025-02: Search 502, Invalid provider id — исправлено -- **SearXNG:** добавлен локальный контейнер в deploy/docker/docker-compose.yml (порт 8080) -- **search-svc:** SEARXNG_URL=http://searxng:8080, headers X-Forwarded-For/X-Real-IP для bot detection -- **deploy/docker/searxng/:** settings.yml (formats: json, limiter: false), limiter.toml (pass_ip) -- **llm-svc:** маппинг providerId 'env' → env-timeweb/env-ollama при envOnlyMode -- **useChat:** при envOnlyMode берёт реальный providerId из API (env-timeweb) +**Причины:** +1. `api-gateway` не имел переменных окружения для новых сервисов (THREAD_SVC_URL, COMPUTER_SVC_URL и др.) +2. `computer-svc` падал при попытке использовать "anonymous" как UUID в PostgreSQL + +**Решение:** + +1. **docker-compose.yml** - добавлены env vars в api-gateway: + - THREAD_SVC_URL=http://thread-svc:3027 + - COMPUTER_SVC_URL=http://computer-svc:3030 + - DISCOVER_SVC_URL=http://discover-svc:3002 + - FINANCE_HEATMAP_SVC_URL=http://finance-heatmap-svc:3033 + - LEARNING_SVC_URL=http://learning-svc:3034 + +2. **computer-svc/main.go** - исправлена обработка anonymous пользователей: + - GET /tasks возвращает пустой список для anonymous + - POST /execute возвращает 401 для anonymous + +**Результат:** +- `GET /api/v1/spaces` → 401 (требует auth, frontend обрабатывает) +- `GET /api/v1/computer/tasks` → 200 (пустой список для anonymous) +- `POST /api/v1/computer/execute` → 401 (требует auth) + +--- + +### Исправлен баг с пустым ответом в Chat (27.02.2026) + +**Проблема:** При отправке сообщения в UI приходил пустой ответ. + +**Причина:** В `backend/cmd/api-gateway/main.go` функция `handleChat` использовала `defer resp.Body.Close()` до `SetBodyStreamWriter`. Поскольку `SetBodyStreamWriter` выполняется асинхронно, defer закрывал body сразу после return из функции, до того как данные успевали передаться клиенту. + +**Решение:** Перенёс `resp.Body.Close()` внутрь callback `SetBodyStreamWriter`: +```go +c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + defer resp.Body.Close() // <-- теперь здесь + // ... streaming logic +}) +``` + +**Файл:** `backend/cmd/api-gateway/main.go` + +--- + +### Адаптивность и удаление модалок (27.02.2026) + +**Задача:** Адаптировать под мобильные, убрать Dialog (модалки), сделать отдельные страницы. + +**Сделано:** + +1. **Мобильный Sidebar** (`layout.tsx`, `Sidebar.tsx`): + - На мобильных (<768px) sidebar скрыт, есть гамбургер-меню + - Появляется mobile header с кнопкой меню + - Sidebar открывается как overlay с анимацией + - Автозакрытие при навигации + +2. **Удалены все Dialog/модальные окна:** + - `spaces/page.tsx` — убран Dialog для создания/редактирования + - `learning/page.tsx` — убран Dialog для создания урока + +3. **Созданы отдельные страницы вместо модалок:** + - `/spaces/new` — создание пространства + - `/spaces/[id]/edit` — редактирование пространства + - `/learning/new` — создание урока + +4. **Адаптивность всех страниц:** + - `/` — responsive grid для suggestions, адаптивные padding/размеры + - `/discover` — горизонтальный скролл табов, адаптивные карточки + - `/history` — адаптивные padding, видимые кнопки действий на мобильных + - `/spaces` — адаптивные карточки, dropdown всегда видим на мобильных + - `/finance` — адаптивный time range, компактные StockRow + - `/learning` — адаптивные stats cards, уроки + - `/settings` — компактные селекторы, toggle switches + - `/computer` — мобильная панель задач, адаптивные примеры + +5. **Общие улучшения:** + - `h-[100dvh]` вместо `h-screen` для лучшей работы на iOS + - Overflow-x-auto для табов с горизонтальной прокруткой + - Адаптивные padding: `px-4 sm:px-6`, `py-6 sm:py-8` + - Адаптивные размеры текста: `text-xl sm:text-2xl` + - Видимость кнопок на мобильных (без hover-only) + +--- + +### Редизайн WebUI в стиле Cursor IDE 2026 (27.02.2026) + +**Задача:** Редизайн backend/webui в стиле Cursor IDE 2026 — современная тёмная тема с индиговыми акцентами. + +**Сделано:** + +1. **Глобальные стили** (`globals.css`): + - Новая цветовая палитра: base/elevated/surface/overlay для фонов + - Текст: primary/secondary/muted/faint (zinc-based) + - Акценты: indigo (accent) + cyan (accent-secondary) + - CSS переменные для всех цветов + - JetBrains Mono для кода + - Классы: .btn-primary, .btn-primary-solid, .btn-secondary, .btn-ghost, .btn-icon + - .nav-item, .nav-item-active, .card, .card-interactive, .glass-card, .surface-card + - .badge, .badge-accent, .badge-success, .badge-warning, .badge-error + - Анимации: fade-in, fade-in-up, slide-in-right, slide-in-left, scale-in, glow-pulse + +2. **Tailwind Config** (`tailwind.config.ts`): + - Semantic colors: base, elevated, surface, overlay, primary, secondary, muted, faint + - accent с оттенками: DEFAULT, hover, muted, subtle + - border с оттенками: DEFAULT, hover, focus + - success, warning, error с muted вариантами + - Кастомные тени: glow-sm/md/lg, elevated, card, dropdown + - Расширенные анимации + +3. **Компоненты** (Cursor-style): + - `Sidebar.tsx` — минималистичный с индиговыми акцентами, collapsible + - `ChatInput.tsx` — современный инпут с glow эффектом, ArrowUp кнопка отправки + - `ChatMessage.tsx` — чистый рендер, индиговые цитаты и код + - `DiscoverCard.tsx` — карточки с цветными иконками + +4. **Страницы** (все обновлены и адаптированы): + - `/` — главная с suggestions grid + - `/discover` — лента новостей + - `/history` — история поиска + - `/spaces` — пространства (+ /new, /[id]/edit) + - `/finance` — котировки + - `/learning` — уроки (+ /new) + - `/settings` — настройки + - `/computer` — Computer page + +**Цветовая схема (Cursor IDE 2026):** +- Background: base (#0a0a0c) → elevated (#121217) → surface (#1a1a1f) +- Text: primary (#e9e9eb) → secondary (#a8a8af) → muted (#6b6b75) → faint (#4a4a54) +- Accent: indigo (#818cf8) — основной акцент +- Accent Secondary: cyan (#22d3ee) — дополнительный +- Borders: #26262e → #32323d (hover) +- Success: green (#22c55e) +- Error: red (#ef4444) +- Warning: amber (#f59e0b) + +--- + +### Интеграция Frontend с Backend (27.02.2026) + +**Задача:** Подключить backend к frontend в `backend/webui/`, убрать моковые данные. + +**Сделано:** + +1. **API Gateway обновлён** (`backend/cmd/api-gateway/main.go`): + - Добавлен роутинг для threads, spaces, pages, share → thread-svc + - Добавлен роутинг для discover → discover-svc + - Добавлен роутинг для heatmap, movers, markets → finance-heatmap-svc + - Добавлен роутинг для learning → learning-svc + +2. **Config обновлён** (`backend/pkg/config/config.go`): + - Добавлены FinanceHeatmapURL, LearningSvcURL + +3. **Frontend Types** (`backend/webui/src/lib/types.ts`): + - Добавлены Thread, ThreadMessage, ThreadSource + - Добавлены FinanceStock, HeatmapData, HeatmapSector, TopMovers, FinanceMarket + - Добавлены Lesson, LessonProgress, LessonStep, CodeExample, QuizQuestion, PracticeExercise + +4. **Frontend API** (`backend/webui/src/lib/api.ts`): + - `fetchThreads()`, `fetchThread()`, `createThread()`, `deleteThread()`, `shareThread()` + - `fetchSpaces()`, `fetchSpace()`, `createSpace()`, `updateSpace()`, `deleteSpace()` + - `fetchMarkets()`, `fetchHeatmap()`, `fetchTopMovers()` + - `fetchLessons()`, `fetchLesson()`, `createLesson()`, `completeStep()`, `submitQuizAnswer()`, `deleteLesson()` + - Добавлен `getAuthHeaders()` для JWT авторизации + +5. **History Page** (`backend/webui/src/app/(main)/history/page.tsx`): + - Загрузка из threads API + - Удаление тредов + - Шаринг тредов + - Группировка по датам + - Поиск + +6. **Spaces Page** (`backend/webui/src/app/(main)/spaces/page.tsx`): + - Загрузка из spaces API + - Создание/редактирование/удаление пространств + - Выбор focus mode + - Кастомные AI инструкции + +7. **Finance Page** (`backend/webui/src/app/(main)/finance/page.tsx`): + - Загрузка рынков из API + - Heatmap данные по секторам + - Top movers (gainers/losers/most active) + - Переключение timeRange (1d/1w/1m/3m/1y) + +8. **Learning Page** (`backend/webui/src/app/(main)/learning/page.tsx`): + - Загрузка уроков из API + - Создание уроков с выбором сложности и режима + - Отображение прогресса + - Удаление уроков + +--- + +### Новые функции (добавлено в этой сессии): + +#### 0. WebUI — Интерфейс бэкенда (Perplexity-style) +- **Папка**: `backend/webui/` +- **Технологии**: Next.js 14, React 18, Tailwind CSS, TypeScript +- **Цветовая схема**: Глубокий синий (#0a1929) + золотистые акценты (#d4a373) +- **Порт**: 3000 +- **Страницы**: + - `/` — Главная страница с чатом и подсказками + - `/discover` — Лента новостей с AI-саммари + - `/spaces` — Пространства (workspaces) для организации исследований + - `/history` — История поисков + - `/finance` — Котировки MOEX, крипто, валюты + - `/learning` — Интерактивные курсы с AI-наставником + - `/settings` — Настройки (тема, язык, приватность) +- **Компоненты**: + - `Sidebar` — Навигация с коллапсом + - `ChatInput` — Ввод с режимами (speed/balanced/quality) + - `ChatMessage` — Рендер markdown + citations + - `Citation` — Hover preview источников + - `DiscoverCard` — Карточки новостей (large/medium/small) +- **Docker**: `backend/webui/Dockerfile`, добавлен в `docker-compose.yml` +- **Запуск**: `cd backend/webui && npm run dev` или через Docker + +#### 1. Computer с долгосрочными задачами (Perplexity-style) +- **Файлы**: `backend/internal/computer/types.go`, `backend/internal/computer/computer.go` +- **Duration Modes**: Short (30 мин), Medium (4 часа), Long (24 часа), Extended (7 дней), Unlimited (365 дней) +- **Checkpoints**: автоматическое сохранение состояния для возобновления задач +- **Heartbeat**: периодические уведомления о статусе +- **Resource Limits**: контроль CPU, памяти, бюджета + +#### 2. Browser Automation через Playwright +- **Файлы**: `backend/internal/computer/browser/browser.go`, `backend/internal/computer/browser/server.go` +- **Действия**: navigate, click, type, fill, screenshot, scroll, evaluate, PDF, extract text/HTML +- **Session management**: пул сессий, автоочистка, управление viewport +- **Сервис**: browser-svc (порт 3050) + +#### 3. Персонализация Discover — "For You" feed +- **Файлы**: `backend/internal/discover/personalization.go`, `backend/internal/db/user_interests_repo.go` +- **User Interests**: topics, sources, keywords с decay-механизмом +- **Feed Algorithm**: relevance scoring, diversity, recency, trending boost +- **Blocking**: блокировка источников и тем +- **Custom Categories**: пользовательские категории с ключевыми словами + +#### 4. Labs Service — интерактивные отчёты +- **Файлы**: `backend/internal/labs/types.go`, `backend/internal/labs/generator.go`, `backend/cmd/labs-svc/main.go` +- **Визуализации**: bar/line/pie/donut charts, tables, stat cards, KPI, heatmap, timeline, comparison +- **UI-элементы**: tabs, accordion, collapsible, stepper, code blocks, markdown +- **API**: генерация отчётов, экспорт в HTML/JSON +- **Сервис**: labs-svc (порт 3031) + +#### 5. Finance Heatmaps +- **Файлы**: `backend/internal/finance/heatmap.go`, `backend/cmd/finance-heatmap-svc/main.go` +- **Markets**: S&P 500, NASDAQ, Dow Jones, MOEX, Crypto, Forex +- **Views**: treemap, grid +- **Data**: sectors, tickers, top movers (gainers/losers/most active) +- **Colorscale**: красно-зелёная шкала по изменению цены +- **Сервис**: finance-heatmap-svc (порт 3033) + +#### 6. Step-by-step Learning Mode +- **Файлы**: `backend/internal/learning/stepper.go`, `backend/cmd/learning-svc/main.go` +- **Modes**: explain, guided, interactive, practice, quiz +- **Difficulty**: beginner, intermediate, advanced, expert +- **Content**: explanations, code, visualizations, quiz questions, practice exercises +- **Progress tracking**: step completion, score, time spent +- **Сервис**: learning-svc (порт 3034) + +#### 7. Discover Daily Podcast Generation +- **Файлы**: `backend/internal/podcast/generator.go`, `backend/cmd/podcast-svc/main.go` +- **Types**: daily, weekly, topic_deep, breaking +- **Script Generation**: intro, news segments, analysis, outro +- **TTS Integration**: ElevenLabs API для генерации аудио +- **RSS Feed**: автоматическая генерация RSS для подкаст-платформ +- **Сервис**: podcast-svc (порт 3032) + +### Все 21 сервис: +- postgres, redis, searxng, crawl4ai (инфраструктура) +- search-svc (3001) ✅ +- discover-svc (3002) ✅ +- chat-svc (3005) ✅ +- api-gateway (3015) ✅ +- agent-svc (3018) ✅ +- llm-svc (3020) ✅ +- scraper-svc (3021) ✅ +- collection-svc (3025) ✅ +- file-svc (3026) ✅ +- thread-svc (3027) ✅ +- computer-svc (3030) ✅ +- **browser-svc (3050) ✅ NEW** +- **labs-svc (3031) ✅ NEW** +- **podcast-svc (3032) ✅ NEW** +- **finance-heatmap-svc (3033) ✅ NEW** +- **learning-svc (3034) ✅ NEW** + +--- + +## Задача (исходный запрос) +Система виджетов в стиле Perplexity для российского рынка: карточки товаров/видео/профилей/промокодов, Knowledge Cards, улучшенные citations, sources panel, image/video gallery + интеграция с Яндекс, Рутуб, ВКонтакте. + +## Сделано (ВСЁ ЗАВЕРШЕНО) + +### 1. SearXNG — Российские engines +- **Файл**: `deploy/docker/searxng/settings.yml` +- Добавлены: yandex, yandex images, yandex videos, yandex news, yandex music +- Добавлены: vk (через xpath), rutube (через json_engine), dzen +- Добавлены: youtube, aliexpress + +### 2. Типы данных виджетов (Backend) +- **Файл**: `services/master-agents-svc/src/lib/types/widgets.ts` +- ProductData, VideoData, ProfileData, PromoData +- ImageData, KnowledgeCardData (comparison_table, charts, timeline, quote, definition) +- SourceData, ClassifiedResult +- Вспомогательные функции: formatPrice, formatViews, formatDuration, formatFollowers + +### 3. Content Classifier (Backend) +- **Файл**: `services/master-agents-svc/src/lib/content-classifier.ts` +- Определение типа по URL паттернам (товары, видео, профили, промокоды) +- Определение платформы (yandex, vk, rutube, ozon, wildberries и др.) +- Классификация по ключевым словам в запросе +- Функции: classifySearchResult, groupResultsByType, detectKnowledgeCardType + +### 4. UI Компоненты (Frontend) + +#### Улучшенный Citation +- **Файл**: `services/web-svc/src/components/MessageRenderer/Citation.tsx` +- Расширенный hover preview с заголовком, описанием, favicon +- Иконки платформ (Яндекс, VK, Rutube, YouTube и др.) +- Улучшенный дизайн badges + +#### UnifiedCard (базовая карточка) +- **Файл**: `services/web-svc/src/components/Widgets/UnifiedCard.tsx` +- Универсальный компонент для всех типов карточек +- Поддержка compact/horizontal режимов +- Badges, meta, actions + +#### ProductCard (товары) +- **Файл**: `services/web-svc/src/components/Widgets/ProductCard.tsx` +- Цена, скидка, старая цена +- Рейтинг со звёздами, отзывы +- Маркетплейс (Ozon, WB, AliExpress, Яндекс Маркет) +- Наличие, доставка, badges + +#### VideoCard (видео) +- **Файл**: `services/web-svc/src/components/Widgets/VideoCard.tsx` +- Thumbnail с длительностью +- Автор, views, likes +- Платформа (Rutube, VK, YouTube, Дзен) +- Horizontal и compact режимы + +#### ProfileCard (профили) +- **Файл**: `services/web-svc/src/components/Widgets/ProfileCard.tsx` +- Аватар, имя, username +- Verified badge, online статус +- Followers/following +- Платформа (VK, Telegram, YouTube, Дзен) + +#### PromoCard (промокоды) +- **Файл**: `services/web-svc/src/components/Widgets/PromoCard.tsx` +- Код с кнопкой копирования +- Скидка, магазин, логотип +- Срок действия, условия +- Verified badge + +#### CardGallery (карусель) +- **Файл**: `services/web-svc/src/components/Widgets/CardGallery.tsx` +- Горизонтальный скролл с навигацией +- Поддержка всех типов карточек +- Индикаторы, gradient fade + +#### KnowledgeCard (Tako-style) +- **Файл**: `services/web-svc/src/components/Widgets/KnowledgeCard.tsx` +- Сравнительные таблицы +- Bar chart, Pie chart +- Stat cards с изменениями (+/-) +- Timeline +- Quote, Definition + +#### InlineImageGallery +- **Файл**: `services/web-svc/src/components/Widgets/InlineImageGallery.tsx` +- Grid, carousel, masonry layouts +- Lightbox с навигацией +- Lazy loading, источники + +#### VideoEmbed +- **Файл**: `services/web-svc/src/components/Widgets/VideoEmbed.tsx` +- Embed для YouTube, Rutube +- Play preview для VK, Дзен +- Метаданные: views, likes, автор + +#### SourcesPanel +- **Файл**: `services/web-svc/src/components/SourcesPanel.tsx` +- Expand/collapse +- Группировка по домену +- Поиск по источникам +- Favicon, snippets + +#### RelatedQuestions +- **Файл**: `services/web-svc/src/components/RelatedQuestions.tsx` +- 3 стиля: inline, panel, chips +- Expand/collapse +- Иконки, анимации + +### 5. Renderer обновлён +- **Файл**: `services/web-svc/src/components/Widgets/Renderer.tsx` +- Все новые типы виджетов: products, videos, profiles, promos +- knowledge_card, image_gallery, video_embed +- Одиночные карточки: product, video, profile, promo + +### 6. Синхронный медиа-поиск (images/videos) +- **Файл**: `services/master-agents-svc/src/lib/media-search.ts` +- Создан модуль синхронного поиска через SearXNG (без LLM) +- Функции `searchImages()`, `searchVideos()`, `searchMedia()` +- Автодетект платформы видео (YouTube, Rutube, VK, Дзен) + +### 7. Интеграция медиа в searchOrchestrator +- **Файл**: `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` +- Добавлен `mediaPromise` параллельно с `widgetPromise` и `searchPromise` +- Эмит виджетов `image_gallery` (carousel) и `videos` (CardGallery) +- Медиа загружается одновременно с research + +### 8. Backend: Интеграция парсинга и эмиссии виджетов (ГОТОВО) +- **Файл**: `services/master-agents-svc/src/lib/searxng.ts` + - Расширены типы SearxngSearchResult (thumbnail, views, price, duration, author) + - Добавлена функция categorizeResult для автоопределения типа контента + +- **Файл**: `services/master-agents-svc/src/lib/agent/researcher.ts` + - Интегрирован ContentClassifier + - Классификация результатов по типам (products, videos, profiles, promos, images) + - Автоматическая эмиссия WidgetBlock для каждого типа контента + - Эвристика для KnowledgeCard (comparison_table, stat_card) + +### 9. Frontend: Типы для Widget (ГОТОВО) +- **Файл**: `services/web-svc/src/lib/types.ts` + - Добавлен WidgetType union type со всеми типами виджетов + - Типизированы params + +- **Файл**: `services/web-svc/src/components/ChatWindow.tsx` + - Расширен интерфейс Widget новыми типами + +### 10. Улучшение ReasoningSteps (ГОТОВО) +- **Файл**: `services/web-svc/src/components/AssistantSteps.tsx` + - Progress bar с процентами + - Таймер времени выполнения + - CheckCircle2 при завершении + - Loader2 spinner во время работы + - Улучшенная визуализация состояний + +## Осталось сделать + +### Go Backend (НОВОЕ) +Создан бэкенд на Go в `backend/` — альтернатива TypeScript сервисам для продакшена. + +**Структура:** +``` +backend/ +├── cmd/ # Точки входа (9 сервисов) +│ ├── api-gateway/ # HTTP proxy + routing (port 3015) +│ ├── chat-svc/ # Chat orchestration (port 3005) +│ ├── agent-svc/ # AI Agent / Search Orchestrator (port 3018) +│ ├── search-svc/ # SearXNG wrapper (port 3001) +│ ├── llm-svc/ # LLM providers (port 3020) +│ ├── scraper-svc/ # Web scraping (port 3021) +│ ├── discover-svc/ # Perplexity Discover (port 3002) +│ ├── collection-svc/ # Collections API (port 3025) +│ └── file-svc/ # File upload & analysis (port 3026) +├── internal/ +│ ├── agent/ # Orchestrator, Classifier, Researcher, DeepResearch, FocusModes +│ ├── db/ # PostgreSQL repos (digest, article_summary, collection, file) +│ ├── files/ # File analyzer (PDF, images) +│ ├── llm/ # OpenAI, Anthropic, Gemini clients +│ ├── search/ # SearXNG, Media, BM25 Reranker +│ ├── session/ # Session manager + events +│ ├── prompts/ # LLM prompts +│ └── types/ # Blocks, Chunks, Widgets +├── pkg/ +│ ├── config/ # Configuration +│ ├── ndjson/ # NDJSON streaming +│ ├── cache/ # Redis кэширование +│ └── middleware/ # Auth, RateLimit, Logging, JWT +└── deploy/ + ├── docker/ # Dockerfiles + docker-compose + └── k8s/ # Kubernetes manifests +``` + +**Для запуска:** +```bash +cd backend +make deps +make build +make dev # или docker-compose +``` + +**Добавлено (Perplexity-style Article Summary):** +- `fetchPreGeneratedDigest()` — проверка готового дайджеста из discover-svc +- `preScrapeArticleURL()` — параллельный скрейпинг через Crawl4AI +- Обработка `Summary: ` запросов как в Perplexity Discover +- Writer prompt для article digest (структура, follow-up вопросы) +- Researcher prompt для поиска related sources + +**Добавлено (26.02.2026):** +- `discover-svc` на Go (port 3002) — полный функционал Perplexity Discover +- Redis кэширование article summaries (`pkg/cache/redis.go`) +- Сохранение саммари в discover-svc после генерации из chat-svc +- Article summary flow: chat-svc -> agent-svc -> discover-svc + +**Добавлено (26.02.2026 - Perplexity Parity):** +- PostgreSQL персистентность (`internal/db/`) — digests, article_summaries, collections, files +- Redis-based rate limiting (`pkg/middleware/ratelimit_redis.go`) +- JWT валидация токенов (`pkg/middleware/jwt.go`) +- Focus Modes (`internal/agent/focus_modes.go`) — Academic, YouTube, Reddit, Code, News, Math, Finance +- Deep Research (`internal/agent/deep_research.go`) — multi-agent, 30+ searches, insights synthesis +- Collections service (`cmd/collection-svc/`) — saved research projects +- File service (`cmd/file-svc/`) — PDF/image upload & analysis + +**Добавлено (26.02.2026 - Full Perplexity Feature Parity):** +- Thread service (`cmd/thread-svc/`) — Threads, Spaces, Memory, Pages (port 3027) +- **Threads** — сохранение истории чатов, sharing по публичной ссылке +- **Spaces** — workspaces с custom AI instructions, default focus mode +- **Pages** — генерация статей из threads (`internal/pages/generator.go`) +- **Memory** — персонализация (preferences, facts, instructions, interests) +- Clarifying Questions — уточняющие вопросы перед Pro Search (Quality mode) +- Related Questions — связанные вопросы после ответа +- Enhanced Context — buildEnhancedContext() объединяет User Memory + Collection + Files +- runDeepResearchMode() — интеграция Deep Research в orchestrator +- DB repositories: `internal/db/thread_repo.go`, `space_repo.go`, `memory_repo.go`, `page_repo.go` + +**TODO для Go бэкенда:** +1. [ ] Тестирование интеграции с web-svc +2. [x] Добавить Redis кэширование ответов +3. [x] Добавить сохранение саммари в discover-svc после генерации +4. [ ] Unit тесты +5. [x] K8s манифесты для Go сервисов (`backend/deploy/k8s/`) +6. [x] PostgreSQL персистентность +7. [x] Focus Modes (Perplexity-style) +8. [x] Deep Research (multi-agent) +9. [x] Collections API +10. [x] File Analysis (PDF, images) +11. [x] Thread History (chat persistence) +12. [x] Spaces (workspaces) +13. [x] Pages (article generation) +14. [x] Memory (personalization) +15. [x] Clarifying Questions +16. [x] Related Questions +17. [x] Sharing (public links) +18. [x] **GooSeek Computer** (Perplexity Computer аналог) + +### GooSeek Computer — Perplexity Computer Clone (26.02.2026) + +**Новый сервис:** `cmd/computer-svc/main.go` (port 3030) + +Автономная платформа для долгосрочных задач — аналог Perplexity Computer. + +**Архитектура:** +``` +backend/internal/computer/ +├── types.go # ComputerTask, SubTask, Artifact, Schedule, Memory +├── computer.go # Главный оркестратор + EventBus +├── planner.go # AI планировщик задач +├── router.go # Роутинг на нужную модель +├── executor.go # Исполнение задач (research, code, deploy, report...) +├── sandbox.go # Docker sandbox для исполнения кода +├── scheduler.go # Планировщик (cron, interval, once, daily, weekly) +├── memory.go # Персистентная память пользователя +└── connectors/ # Внешние интеграции + ├── connector.go # Интерфейс коннектора + ├── email.go # SMTP отправка + ├── telegram.go # Telegram Bot API + ├── webhook.go # HTTP webhooks + └── storage.go # S3/MinIO хранилище +``` + +**Model Registry:** `internal/llm/registry.go` +- Capabilities: reasoning, coding, search, creative, fast, long_context, vision, math +- Роутинг задач на оптимальную модель (Claude для reasoning, Gemini для research, GPT для fast) + +**DB Repositories:** +- `internal/db/computer_task_repo.go` — задачи +- `internal/db/computer_memory_repo.go` — память +- `internal/db/computer_artifact_repo.go` — артефакты (код, отчёты, файлы) + +**API Endpoints:** +- `POST /api/v1/computer/execute` — запуск задачи +- `GET /api/v1/computer/tasks` — список задач пользователя +- `GET /api/v1/computer/tasks/:id` — статус задачи +- `GET /api/v1/computer/tasks/:id/stream` — SSE стрим событий +- `POST /api/v1/computer/tasks/:id/resume` — продолжить с user input +- `DELETE /api/v1/computer/tasks/:id` — отмена +- `GET /api/v1/computer/tasks/:id/artifacts` — артефакты +- `GET /api/v1/computer/models` — доступные модели +- `GET /api/v1/computer/connectors` — доступные интеграции +- `POST /api/v1/computer/connectors/:id/execute` — выполнить action + +**Типы задач:** +- `research` — веб-исследование, сбор данных +- `code` — генерация кода +- `analysis` — анализ данных +- `design` — проектирование архитектуры +- `deploy` — запуск кода в sandbox +- `report` — генерация отчётов +- `communicate` — отправка email/telegram +- `transform` — преобразование данных +- `validate` — валидация результатов + +**Планирование:** +- `cron` — cron-выражения +- `interval` — каждые N секунд +- `once` — одноразовый запуск +- `daily/hourly/weekly/monthly` — preset'ы + +**Пример использования:** +```json +POST /api/v1/computer/execute +{ + "userId": "user-123", + "query": "Мониторь курс доллара каждый час и отправляй мне в Telegram если изменение > 1%", + "options": { + "async": true, + "schedule": { "type": "hourly" } + } +} +``` + +Нет критичных задач - система работает стабильно. + +### GooSeek Computer UI (27.02.2026) + +**Задача:** Создать UI для Computer в стиле Perplexity Computer. + +**Сделано:** + +1. **Типы** (`backend/webui/src/lib/types.ts`): + - ComputerTask, ComputerTaskStatus, DurationMode, TaskPriority + - TaskPlan, SubTask, Artifact, Checkpoint + - ComputerSchedule, ResourceLimits + - ComputerTaskEvent, ComputerExecuteRequest + - ComputerModel, ComputerConnector + +2. **API функции** (`backend/webui/src/lib/api.ts`): + - `executeComputerTask()` — запуск задачи + - `fetchComputerTasks()` — список задач + - `fetchComputerTask()` — статус задачи + - `streamComputerTask()` — SSE стрим событий + - `resumeComputerTask()` — продолжить с user input + - `cancelComputerTask()` — отменить + - `fetchComputerArtifacts()` — артефакты + - `downloadArtifact()` — скачать артефакт + - `fetchComputerModels()` — доступные модели + - `fetchComputerConnectors()` — коннекторы + - `executeConnectorAction()` — выполнить action + +3. **UI страница** (`backend/webui/src/app/(main)/computer/page.tsx`): + - Ввод задачи (textarea с placeholder) + - Duration mode selector (short/medium/long/extended/unlimited) + - Примеры задач (4 карточки) + - Список задач (левая панель) + - Детальный просмотр задачи: + - Progress bar + - Статус с иконками + - Подзадачи с прогрессом + - События (real-time log) + - Артефакты с кнопкой скачивания + - Расширенные настройки (коннекторы) + +4. **Навигация** (`backend/webui/src/components/Sidebar.tsx`): + - Добавлен Computer в раздел "Инструменты" + - Иконка Cpu + +### Оптимизация Speed Mode 2.0 (26.02.2026): + +**Проблема:** Запросы обрабатывались >2 минут из-за 5 последовательных LLM вызовов. + +**Решение — Fast Path для Speed Mode:** +1. **Rule-based classifier** — без LLM вызова (`fastClassifier.ts`) +2. **Direct SearXNG search** — без researcher loop (без LLM) +3. **No scraping** — только сниппеты, без Crawl4AI +4. **Parallel execution** — search + media + widgets одновременно +5. **Single LLM call** — только Writer для финального ответа + +**Новый файл:** `services/master-agents-svc/src/lib/agent/fastClassifier.ts` + +**Изменённые файлы:** +- `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` — добавлен `runSpeedMode()` +- `services/master-agents-svc/src/lib/actions/scrape_url.ts` — отключен для speed mode + +**Ожидаемое время ответа:** +- Speed mode: 8-15 сек (было >2 мин) +- Balanced mode: 20-40 сек +- Quality mode: 60-120 сек + +### Исправлено 26.02.2026: + +1. **SearXNG падал** (ambiguous shortcut errors): + - Файл: `deploy/docker/searxng/settings.yml` + - Убраны дублирующие shortcuts для yandex engines (ya, yai, yav, yan, yam) + - Убраны конфликтующие кастомные engines (vk, rutube, dzen, aliexpress) + +2. **Ghost перезапускался каждые 2 минуты** (HTTP 503): + - Файл: `deploy/docker/docker-compose.yml` + - Исправлен healthcheck (wget --spider не следует за 301 redirect) + - Увеличены start_period (90s), retries (5), interval (30s) ## Контекст для продолжения -- Порты: discover 3002, search 3001, finance 3003, travel 3004, chat 3005, memory 3010, create 3011, notifications 3013, billing 3008, media 3016, suggestions 3017, master-agents 3018, profile 3019, llm 3020 -- Ghost: опционально → http://localhost:2369, админка /ghost, Content API Key в .env -- Redis ключи: discover:{topic}, finance:summary, travel:trending, travel:stepper:{sessionId}, cache-worker + +### Новые файлы (созданы): +- `backend/cmd/computer-svc/main.go` — GooSeek Computer service (port 3030) +- `backend/internal/computer/types.go` — Computer types (Task, SubTask, Artifact, etc.) +- `backend/internal/computer/computer.go` — Main orchestrator + EventBus +- `backend/internal/computer/planner.go` — AI task planner +- `backend/internal/computer/router.go` — Model routing +- `backend/internal/computer/executor.go` — Task execution +- `backend/internal/computer/sandbox.go` — Docker sandbox manager +- `backend/internal/computer/scheduler.go` — Cron/interval scheduler +- `backend/internal/computer/memory.go` — Persistent user memory +- `backend/internal/computer/connectors/connector.go` — Connector interface +- `backend/internal/computer/connectors/email.go` — SMTP connector +- `backend/internal/computer/connectors/telegram.go` — Telegram Bot connector +- `backend/internal/computer/connectors/webhook.go` — HTTP webhook connector +- `backend/internal/computer/connectors/storage.go` — S3/MinIO storage connector +- `backend/internal/llm/registry.go` — Model registry with capabilities +- `backend/internal/db/computer_task_repo.go` — Computer task repository +- `backend/internal/db/computer_memory_repo.go` — Computer memory repository +- `backend/internal/db/computer_artifact_repo.go` — Computer artifact repository +- `backend/deploy/k8s/computer-svc.yaml` — K8s manifest for computer-svc +- `backend/cmd/discover-svc/main.go` — Discover service на Go +- `backend/cmd/collection-svc/main.go` — Collections service +- `backend/cmd/file-svc/main.go` — File upload & analysis service +- `backend/internal/db/postgres.go` — PostgreSQL connection + migrations +- `backend/internal/db/digest_repo.go` — Digest repository +- `backend/internal/db/article_summary_repo.go` — Article summary repository +- `backend/internal/db/collection_repo.go` — Collection repository +- `backend/internal/db/file_repo.go` — Uploaded file repository +- `backend/internal/agent/focus_modes.go` — Perplexity Focus Modes +- `backend/internal/agent/deep_research.go` — Deep Research multi-agent +- `backend/internal/files/analyzer.go` — PDF/image analyzer +- `backend/pkg/cache/redis.go` — Redis кэш клиент +- `backend/pkg/middleware/ratelimit_redis.go` — Redis-based rate limiting +- `backend/pkg/middleware/jwt.go` — JWT validation middleware +- `backend/deploy/k8s/postgres.yaml` — PostgreSQL StatefulSet +- `backend/deploy/k8s/collection-svc.yaml` — Collection service K8s +- `backend/deploy/k8s/file-svc.yaml` — File service K8s +- `backend/deploy/k8s/thread-svc.yaml` — Thread service K8s +- `backend/cmd/thread-svc/main.go` — Thread/Spaces/Memory/Pages service +- `backend/internal/db/thread_repo.go` — Thread repository +- `backend/internal/db/space_repo.go` — Space repository +- `backend/internal/db/memory_repo.go` — Memory repository +- `backend/internal/db/page_repo.go` — Page repository +- `backend/internal/pages/generator.go` — Page generator (Markdown/HTML export) +- `services/master-agents-svc/src/lib/media-search.ts` — синхронный медиа-поиск +- `services/media-svc/DEPRECATED.md` — пометка устаревшего сервиса +- `services/master-agents-svc/src/lib/types/widgets.ts` +- `services/master-agents-svc/src/lib/content-classifier.ts` +- `services/web-svc/src/components/Widgets/UnifiedCard.tsx` +- `services/web-svc/src/components/Widgets/ProductCard.tsx` +- `services/web-svc/src/components/Widgets/VideoCard.tsx` +- `services/web-svc/src/components/Widgets/ProfileCard.tsx` +- `services/web-svc/src/components/Widgets/PromoCard.tsx` +- `services/web-svc/src/components/Widgets/CardGallery.tsx` +- `services/web-svc/src/components/Widgets/KnowledgeCard.tsx` +- `services/web-svc/src/components/Widgets/InlineImageGallery.tsx` +- `services/web-svc/src/components/Widgets/VideoEmbed.tsx` +- `services/web-svc/src/components/SourcesPanel.tsx` +- `services/web-svc/src/components/RelatedQuestions.tsx` + +### Изменённые файлы: +- `backend/internal/agent/orchestrator.go` — Clarifying/Related Questions, Deep Research mode, Enhanced Context +- `backend/pkg/config/config.go` — ThreadSvcURL, ComputerSvcURL +- `backend/go.mod` — добавлены minio-go, robfig/cron +- `backend/Makefile` — добавлен computer-svc +- `backend/deploy/docker/Dockerfile.all` — добавлен build computer-svc +- `backend/deploy/k8s/kustomization.yaml` — добавлен computer-svc +- `backend/Makefile` — thread-svc +- `backend/deploy/docker/docker-compose.yml` — thread-svc +- `backend/deploy/docker/Dockerfile.all` — thread-svc build +- `backend/deploy/k8s/kustomization.yaml` — thread-svc +- `backend/deploy/k8s/configmap.yaml` — THREAD_SVC_URL +- `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` — интеграция медиа-поиска +- `services/master-agents-svc/src/lib/searxng.ts` — расширенные типы и категоризация +- `services/master-agents-svc/src/lib/agent/researcher.ts` — интеграция classifier + эмиссия виджетов +- `deploy/docker/searxng/settings.yml` — российские engines +- `services/web-svc/src/components/MessageRenderer/Citation.tsx` — enhanced hover +- `services/web-svc/src/components/Widgets/Renderer.tsx` — новые типы виджетов +- `services/web-svc/src/lib/types.ts` — WidgetType +- `services/web-svc/src/components/ChatWindow.tsx` — Widget interface +- `services/web-svc/src/components/AssistantSteps.tsx` — progress bar, timer, icons diff --git a/TEST_RESULTS.md b/TEST_RESULTS.md new file mode 100644 index 0000000..a08a4d6 --- /dev/null +++ b/TEST_RESULTS.md @@ -0,0 +1,280 @@ +# Результаты тестирования https://gooseek.ru/ + +**Дата:** 27 февраля 2026, 02:40 МСК + +## ❌ Основная проблема + +**Сайт https://gooseek.ru/ НЕДОСТУПЕН** + +``` +$ curl https://gooseek.ru/ +curl: (7) Failed to connect to gooseek.ru port 443: Couldn't connect to server +``` + +## Причины + +### 1. Reverse-proxy (Caddy) не запущен + +Файл `deploy/docker/docker-compose.yml` содержит конфигурацию для Caddy, который должен: +- Слушать порты 80/443 +- Автоматически получать SSL от Let's Encrypt +- Проксировать `gooseek.ru` → `web-svc:3000` + +**Но этот контейнер НЕ ЗАПУЩЕН.** + +### 2. Запущена неправильная конфигурация + +Сейчас работают контейнеры из `/backend/deploy/docker/docker-compose.yml` (Go backend): + +``` +$ docker ps +NAME STATUS +docker-api-gateway-1 Up 21 minutes (unhealthy) +docker-chat-svc-1 Up 21 minutes (unhealthy) +docker-agent-svc-1 Up 21 minutes (unhealthy) +docker-webui-1 Up 20 minutes +docker-llm-svc-1 Up 21 minutes (unhealthy) +docker-search-svc-1 Up 21 minutes (unhealthy) +docker-scraper-svc-1 Up 21 minutes (unhealthy) +... +``` + +**Все сервисы помечены как "unhealthy"**, кроме базовых (postgres, redis, crawl4ai). + +### 3. Chat API не работает + +#### Тест 1: Прямой доступ к API Gateway +```bash +$ curl http://localhost:3015/health +curl: (7) Connection refused +``` +**Результат:** API Gateway недоступен на localhost, хотя порт должен быть проброшен. + +#### Тест 2: Через WebUI +```bash +$ curl http://localhost:3000 +✅ OK - WebUI возвращает HTML +``` + +```bash +$ curl -X POST http://localhost:3000/api/chat \ + -H "Content-Type: application/json" \ + -d '{ + "message": { + "messageId": "msg-123", + "chatId": "chat-123", + "content": "Привет, как дела?" + }, + "optimizationMode": "balanced", + "history": [], + "locale": "ru" + }' + +Internal Server Error +``` + +**Результат:** WebUI работает, но chat API возвращает ошибку. + +**Причина:** WebUI пытается проксировать запрос на `api-gateway:3015` (через Next.js rewrites), но API Gateway недоступен или не отвечает. + +## Что работает ✅ + +1. **WebUI доступен локально** - `http://localhost:3000` возвращает интерфейс +2. **Docker контейнеры запущены** - все сервисы работают (но unhealthy) +3. **Базовые сервисы здоровы:** + - ✅ postgres (healthy) + - ✅ redis (healthy) + - ✅ crawl4ai (healthy) + - ✅ searxng (running) + +## Что НЕ работает ❌ + +1. **Reverse-proxy (Caddy)** - не запущен +2. **API Gateway** - unhealthy, порт 3015 недоступен +3. **Chat сервис** - unhealthy +4. **Agent сервис** - unhealthy +5. **LLM сервис** - unhealthy +6. **Search сервис** - unhealthy +7. **Все остальные backend Go сервисы** - unhealthy + +## Диагностика + +### Проверка портов +```bash +$ lsof -nP -iTCP -sTCP:LISTEN | grep -E "3000|3015|3005" +node 31314 home 16u IPv6 *:3000 (LISTEN) +``` + +Только порт 3000 (webui) слушает. Порты 3015 (api-gateway) и 3005 (chat-svc) не доступны на хосте. + +### Архитектура + +Текущая конфигурация: +``` +[Browser] + ↓ +❌ gooseek.ru:443 (Caddy не запущен) + ↓ +✅ localhost:3000 (WebUI - Next.js) + ↓ (Next.js rewrites /api/* → api-gateway:3015) +❌ api-gateway:3015 (unhealthy, недоступен) + ↓ +❌ chat-svc:3005 (unhealthy) + ↓ +❌ agent-svc:3018 (unhealthy) +``` + +## Рекомендации по исправлению + +### 1. Запустить правильный docker-compose + +```bash +cd /Users/home/Desktop/GooSeek/deploy/docker +./run.sh +``` + +Это должно запустить полный стек включая reverse-proxy. + +### 2. Проверить логи unhealthy контейнеров + +```bash +docker logs docker-api-gateway-1 --tail 100 +docker logs docker-chat-svc-1 --tail 100 +docker logs docker-agent-svc-1 --tail 100 +docker logs docker-llm-svc-1 --tail 100 +``` + +Искать ошибки: +- Отсутствующие переменные окружения +- Проблемы с подключением к зависимостям +- Ошибки при старте сервиса + +### 3. Проверить переменные окружения + +Убедиться что `.env` файл существует и содержит: +```bash +# LLM API Keys +OPENAI_API_KEY=sk-... +ANTHROPIC_API_KEY=sk-ant-... +GEMINI_API_KEY=... +TIMEWEB_API_KEY=... +TIMEWEB_AGENT_ACCESS_ID=... +TIMEWEB_API_BASE_URL=... + +# Ghost CMS +GHOST_CONTENT_API_KEY=... +GHOST_DB_PASSWORD=... +GHOST_MAIL_USER=... +GHOST_MAIL_PASSWORD=... +``` + +### 4. Проверить healthcheck'и + +Каждый сервис имеет healthcheck. Например, для api-gateway: +```yaml +healthcheck: + test: ["CMD", "wget", "-q", "--spider", "http://127.0.0.1:3015/health"] + interval: 15s + timeout: 5s + retries: 3 + start_period: 10s +``` + +Если сервис не отвечает на `/health` в течение 10s после старта → unhealthy. + +Возможные причины unhealthy: +- Сервис не стартует из-за ошибки +- Сервис стартует медленно (нужно увеличить `start_period`) +- Зависимости недоступны (другие сервисы) +- Отсутствуют переменные окружения + +### 5. Проверить сеть Docker + +```bash +docker network ls +docker network inspect gooseek +``` + +Убедиться что все контейнеры в одной сети и могут общаться. + +### 6. Проверить DNS и порты + +```bash +# DNS +nslookup gooseek.ru + +# Порты на роутере +# Убедиться что 80 и 443 проброшены на ПК +``` + +### 7. Перезапустить с нуля + +```bash +# Остановить все +cd /Users/home/Desktop/GooSeek/backend/deploy/docker +docker compose down + +# Запустить правильный compose +cd /Users/home/Desktop/GooSeek/deploy/docker +docker compose down +docker compose up -d + +# Проверить логи +docker compose logs -f reverse-proxy +docker compose logs -f web-svc +docker compose logs -f api-gateway +``` + +## Тестовые скрипты + +Созданы скрипты для тестирования: + +1. **test-chat.sh** - базовая проверка сервисов +2. **test-chat-proper.sh** - тест с правильным payload +3. **test-chat-via-webui.sh** - тест через webui + +Использование: +```bash +cd /Users/home/Desktop/GooSeek +./test-chat-via-webui.sh +``` + +## Скриншоты и ошибки + +### Консоль браузера (ожидаемые ошибки) + +Если открыть https://gooseek.ru/ в браузере: +- **Network:** ERR_CONNECTION_REFUSED или ERR_NAME_NOT_RESOLVED +- **Console:** Нет ошибок (страница не загружается) + +Если открыть http://localhost:3000/ в браузере: +- **Network:** ✅ 200 OK для главной страницы +- **Network:** ❌ 500 Internal Server Error для `/api/chat` +- **Console:** Ошибка при попытке отправить сообщение + +### Пример ошибки в консоли (если бы сайт был доступен) + +```javascript +POST http://localhost:3000/api/chat 500 (Internal Server Error) + +Error: Chat request failed: 500 + at streamChat (api.ts:43) + at sendMessage (useChat.ts:53) +``` + +## Итоговый вывод + +**Сайт https://gooseek.ru/ полностью недоступен.** + +**Причины:** +1. ❌ Reverse-proxy (Caddy) не запущен → нет HTTPS доступа +2. ❌ API Gateway unhealthy → chat не работает даже локально +3. ❌ Все backend сервисы unhealthy → нет функциональности + +**Что нужно сделать:** +1. Запустить правильный docker-compose с reverse-proxy +2. Исправить проблемы с unhealthy контейнерами (проверить логи) +3. Убедиться что все переменные окружения настроены +4. Проверить DNS и проброс портов + +**Приоритет:** КРИТИЧЕСКИЙ - сайт полностью не работает. diff --git a/api-1.json b/api-1.json new file mode 100644 index 0000000..acd49e0 --- /dev/null +++ b/api-1.json @@ -0,0 +1,2896 @@ +{ + "openapi": "3.0.0", + "paths": { + "/api/v1/cloud-ai/agents/{agent_access_id}/call": { + "post": { + "operationId": "CloudAiAgentsClientController_callAgent", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent access ID", + "schema": { + "type": "string" + } + }, + { + "name": "Authorization", + "required": true, + "in": "header", + "description": "Bearer token", + "schema": { + "type": "string", + "example": "Bearer " + } + }, + { + "name": "x-proxy-source", + "required": true, + "in": "header", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AgentCallDto" + } + } + } + }, + "responses": { + "200": { + "description": "Agent response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AgentCallResponseDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "Call AI agent", + "tags": [ + "ai-agents-client" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/chat/completions": { + "post": { + "description": "\nThis endpoint supports both simple text messages and multimodal content:\n\n**Simple text message:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": \"Hello, how are you?\"\n }\n ]\n}\n```\n\n**Multimodal message with text and image:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": [\n { \"type\": \"text\", \"text\": \"What is in this image?\" },\n { \"type\": \"image_url\", \"image_url\": { \"url\": \"https://example.com/image.jpg\" } }\n ]\n }\n ]\n}\n```\n\n**Message with audio input:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": [\n { \"type\": \"text\", \"text\": \"Please transcribe this audio:\" },\n { \"type\": \"input_audio\", \"input_audio\": { \"data\": \"base64_encoded_audio_data\", \"format\": \"wav\" } }\n ]\n }\n ]\n}\n```\n ", + "operationId": "CloudAiAgentsClientController_chatCompletions", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent access ID", + "schema": { + "type": "string" + } + }, + { + "name": "Authorization", + "required": true, + "in": "header", + "description": "Bearer token", + "schema": { + "type": "string", + "example": "Bearer " + } + }, + { + "name": "x-proxy-source", + "required": true, + "in": "header", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChatCompletionCreateParamsDto" + } + } + } + }, + "responses": { + "200": { + "description": "Chat completion response (non-streaming)", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiChatCompletionResponseDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "security": [ + { + "bearer": [] + } + ], + "summary": "OpenAI-compatible chat completions endpoint for AI agent", + "tags": [ + "ai-agents-client" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/completions": { + "post": { + "deprecated": true, + "operationId": "CloudAiAgentsClientController_textCompletions", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent access ID", + "schema": { + "type": "string" + } + }, + { + "name": "Authorization", + "required": true, + "in": "header", + "description": "Bearer token", + "schema": { + "type": "string", + "example": "Bearer " + } + }, + { + "name": "x-proxy-source", + "required": true, + "in": "header", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiTextCompletionRequestDto" + } + } + } + }, + "responses": { + "200": { + "description": "Text completion response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiTextCompletionResponseDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "OpenAI-compatible text completions endpoint for AI agent (legacy)", + "tags": [ + "ai-agents-client" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/models": { + "get": { + "operationId": "CloudAiAgentsClientController_getModels", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent access ID", + "schema": { + "type": "string" + } + }, + { + "name": "Authorization", + "required": true, + "in": "header", + "description": "Bearer token", + "schema": { + "type": "string", + "example": "Bearer " + } + } + ], + "responses": { + "200": { + "description": "List of available models", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiModelsResponseDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "OpenAI-compatible models endpoint for AI agent", + "tags": [ + "ai-agents-client" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/embed.js": { + "get": { + "operationId": "CloudAiAgentsClientController_getAgentEmbedCode", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "collapsed", + "required": false, + "in": "query", + "description": "Start widget in collapsed mode (default: true)", + "schema": { + "default": true, + "type": "boolean" + } + }, + { + "name": "referer", + "required": true, + "in": "header", + "schema": { + "type": "string" + } + }, + { + "name": "origin", + "required": true, + "in": "header", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "JavaScript code for embedding the chat widget", + "content": { + "application/javascript": { + "schema": { + "type": "string" + } + } + } + }, + "403": { + "description": "Domain not whitelisted or agent suspended" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "Get widget embed JavaScript code", + "tags": [ + "ai-agents-client" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses": { + "post": { + "operationId": "CloudAiAgentsResponsesController_createResponse", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "ID доступа агента", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "in": "header", + "description": "Токен для приватных агентов", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateResponseDto" + } + } + } + }, + "responses": { + "200": { + "description": "Ответ создан успешно", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiResponseDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "OpenAI-compatible create a response", + "tags": [ + "ai-agents-responses" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses/{response_id}": { + "get": { + "operationId": "CloudAiAgentsResponsesController_getResponse", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "response_id", + "required": true, + "in": "path", + "description": "Response ID", + "schema": { + "type": "string" + } + }, + { + "name": "include", + "required": false, + "in": "query", + "description": "Дополнительные поля для включения в ответ", + "schema": { + "example": [ + "web_search_call.action.sources", + "code_interpreter_call.outputs" + ], + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "include_obfuscation", + "required": false, + "in": "query", + "description": "Включить обфускацию потока для защиты от атак по побочным каналам", + "schema": { + "example": true, + "type": "boolean" + } + }, + { + "name": "starting_after", + "required": false, + "in": "query", + "description": "Номер последовательности события, после которого начать потоковую передачу", + "schema": { + "minimum": 0, + "example": 0, + "type": "number" + } + }, + { + "name": "stream", + "required": false, + "in": "query", + "description": "Потоковая передача данных ответа модели", + "schema": { + "example": false, + "type": "boolean" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Response retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiResponseDto" + } + } + } + }, + "404": { + "description": "Response not found" + } + }, + "summary": "OpenAI-compatible get a response", + "tags": [ + "ai-agents-responses" + ] + }, + "delete": { + "operationId": "CloudAiAgentsResponsesController_deleteResponse", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "response_id", + "required": true, + "in": "path", + "description": "Response ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Ответ удален успешно" + }, + "404": { + "description": "Response not found" + } + }, + "summary": "OpenAI-compatible delete a response", + "tags": [ + "ai-agents-responses" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses/{response_id}/cancel": { + "post": { + "operationId": "CloudAiAgentsResponsesController_cancelResponse", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "response_id", + "required": true, + "in": "path", + "description": "Response ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Отмена ответа выполнена успешно", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAiResponseDto" + } + } + } + }, + "404": { + "description": "Response not found" + } + }, + "summary": "OpenAI-compatible cancel a response", + "tags": [ + "ai-agents-responses" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations": { + "post": { + "operationId": "CloudAiAgentsConversationsController_createConversation", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateConversationDto" + } + } + } + }, + "responses": { + "200": { + "description": "Conversation created successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationDto" + } + } + } + }, + "401": { + "description": "Unauthorized" + }, + "404": { + "description": "Agent not found" + } + }, + "summary": "OpenAI-compatible create a conversation", + "tags": [ + "ai-agents-conversations" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}": { + "get": { + "operationId": "CloudAiAgentsConversationsController_getConversation", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Conversation retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationDto" + } + } + } + }, + "404": { + "description": "Conversation not found" + } + }, + "summary": "OpenAI-compatible get a conversation", + "tags": [ + "ai-agents-conversations" + ] + }, + "post": { + "operationId": "CloudAiAgentsConversationsController_updateConversation", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateConversationDto" + } + } + } + }, + "responses": { + "200": { + "description": "Conversation updated successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationDto" + } + } + } + }, + "404": { + "description": "Conversation not found" + } + }, + "summary": "OpenAI-compatible update a conversation", + "tags": [ + "ai-agents-conversations" + ] + }, + "delete": { + "operationId": "CloudAiAgentsConversationsController_deleteConversation", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Conversation deleted successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationDeletedDto" + } + } + } + }, + "404": { + "description": "Conversation not found" + } + }, + "summary": "OpenAI-compatible delete a conversation", + "tags": [ + "ai-agents-conversations" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}/items": { + "get": { + "operationId": "CloudAiAgentsConversationsController_listItems", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "after", + "required": false, + "in": "query", + "description": "An item ID to list items after, used in pagination", + "schema": { + "type": "string" + } + }, + { + "name": "include", + "required": false, + "in": "query", + "description": "Specify additional output data to include in the model response", + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "limit", + "required": false, + "in": "query", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20", + "schema": { + "minimum": 1, + "maximum": 100, + "default": 20, + "type": "number" + } + }, + { + "name": "order", + "required": false, + "in": "query", + "description": "The order to return the input items in", + "schema": { + "default": "desc", + "enum": [ + "asc", + "desc" + ], + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Items retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationItemListDto" + } + } + } + }, + "404": { + "description": "Conversation not found" + } + }, + "summary": "OpenAI-compatible list items in a conversation", + "tags": [ + "ai-agents-conversations" + ] + }, + "post": { + "operationId": "CloudAiAgentsConversationsController_createItems", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "include", + "required": false, + "in": "query", + "description": "Additional fields to include in the response", + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateItemsDto" + } + } + } + }, + "responses": { + "200": { + "description": "Items created successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationItemListDto" + } + } + } + }, + "404": { + "description": "Conversation not found" + } + }, + "summary": "OpenAI-compatible create items in a conversation", + "tags": [ + "ai-agents-conversations" + ] + } + }, + "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}/items/{item_id}": { + "get": { + "operationId": "CloudAiAgentsConversationsController_getItem", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "item_id", + "required": true, + "in": "path", + "description": "Item ID", + "schema": { + "type": "string" + } + }, + { + "name": "include", + "required": false, + "in": "query", + "description": "Specify additional output data to include in the model response", + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Item retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationItemDto" + } + } + } + }, + "404": { + "description": "Item not found" + } + }, + "summary": "OpenAI-compatible get a conversation item", + "tags": [ + "ai-agents-conversations" + ] + }, + "delete": { + "operationId": "CloudAiAgentsConversationsController_deleteItem", + "parameters": [ + { + "name": "agent_access_id", + "required": true, + "in": "path", + "description": "Agent Access ID", + "schema": { + "type": "string" + } + }, + { + "name": "conversation_id", + "required": true, + "in": "path", + "description": "Conversation ID", + "schema": { + "type": "string" + } + }, + { + "name": "item_id", + "required": true, + "in": "path", + "description": "Item ID", + "schema": { + "type": "string" + } + }, + { + "name": "authorization", + "required": true, + "in": "header", + "description": "Bearer token for private agents", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Item deleted successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ConversationDto" + } + } + } + }, + "404": { + "description": "Item not found" + } + }, + "summary": "OpenAI-compatible delete a conversation item", + "tags": [ + "ai-agents-conversations" + ] + } + } + }, + "info": { + "title": "Cloud AI API", + "description": "API docs for Cloud AI", + "version": "1.0", + "contact": {} + }, + "tags": [ + { + "name": "ai-agents-client", + "description": "API for working with AI agents" + }, + { + "name": "ai-agents-responses", + "description": "API for managing AI agents responses" + }, + { + "name": "ai-agents-conversations", + "description": "API for managing AI agents conversations" + } + ], + "servers": [ + { + "url": "https://agent.timeweb.cloud", + "description": "Timeweb Cloud AI API" + } + ], + "components": { + "securitySchemes": { + "JWT-auth": { + "scheme": "bearer", + "bearerFormat": "JWT", + "type": "http", + "name": "JWT", + "description": "Enter JWT token", + "in": "header" + } + }, + "schemas": { + "AgentCallDto": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "The message to send to the agent (can be empty if files are provided)", + "default": "" + }, + "parent_message_id": { + "type": "string", + "description": "Optional parent message ID for conversation context" + }, + "file_ids": { + "description": "Optional array of file IDs to attach to the message", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "AgentCallResponseDto": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "The response message from the agent" + }, + "id": { + "type": "string", + "description": "Unique ID of the message" + }, + "finish_reason": { + "type": "object", + "description": "The reason why the response was finished" + } + }, + "required": [ + "message", + "id", + "finish_reason" + ] + }, + "OpenAiChatCompletionMessageDto": { + "type": "object", + "properties": { + "role": { + "type": "string", + "description": "The role of the author of this message" + }, + "content": { + "type": "string", + "description": "The contents of the message" + } + }, + "required": [ + "role", + "content" + ] + }, + "OpenAiChatCompletionChoiceDto": { + "type": "object", + "properties": { + "index": { + "type": "number", + "description": "The index of the choice in the list of choices" + }, + "message": { + "description": "A chat completion message generated by the model", + "allOf": [ + { + "$ref": "#/components/schemas/OpenAiChatCompletionMessageDto" + } + ] + }, + "finish_reason": { + "type": "string", + "description": "The reason the model stopped generating tokens", + "enum": [ + "stop", + "length", + "content_filter", + "tool_calls" + ] + } + }, + "required": [ + "index", + "message", + "finish_reason" + ] + }, + "OpenAiUsageDto": { + "type": "object", + "properties": { + "prompt_tokens": { + "type": "number", + "description": "Number of tokens in the prompt" + }, + "completion_tokens": { + "type": "number", + "description": "Number of tokens in the generated completion" + }, + "total_tokens": { + "type": "number", + "description": "Total number of tokens used in the request (prompt + completion)" + } + }, + "required": [ + "prompt_tokens", + "completion_tokens", + "total_tokens" + ] + }, + "OpenAiChatCompletionResponseDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "A unique identifier for the chat completion" + }, + "object": { + "type": "string", + "description": "The object type, which is always \"chat.completion\"", + "example": "chat.completion" + }, + "created": { + "type": "number", + "description": "The Unix timestamp (in seconds) of when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model used for the chat completion" + }, + "choices": { + "description": "A list of chat completion choices", + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAiChatCompletionChoiceDto" + } + }, + "usage": { + "description": "Usage statistics for the completion request", + "allOf": [ + { + "$ref": "#/components/schemas/OpenAiUsageDto" + } + ] + }, + "system_fingerprint": { + "type": "string", + "description": "System fingerprint" + } + }, + "required": [ + "id", + "object", + "created", + "model", + "choices", + "usage" + ] + }, + "OpenAiStreamDeltaDto": { + "type": "object", + "properties": { + "content": { + "type": "string", + "description": "The content delta for the message" + }, + "role": { + "type": "string", + "description": "The role of the message author (only in first chunk)" + } + } + }, + "OpenAiStreamChoiceDto": { + "type": "object", + "properties": { + "index": { + "type": "number", + "description": "The index of the choice in the list of choices" + }, + "delta": { + "description": "A chat completion delta generated by the model", + "allOf": [ + { + "$ref": "#/components/schemas/OpenAiStreamDeltaDto" + } + ] + }, + "finish_reason": { + "type": "string", + "description": "The reason the model stopped generating tokens", + "enum": [ + "stop", + "length", + "content_filter", + "tool_calls", + null + ] + } + }, + "required": [ + "index", + "delta" + ] + }, + "OpenAiChatCompletionStreamResponseDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "A unique identifier for the chat completion" + }, + "object": { + "type": "string", + "description": "The object type, which is always \"chat.completion.chunk\"", + "example": "chat.completion.chunk" + }, + "created": { + "type": "number", + "description": "The Unix timestamp (in seconds) of when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model used for the chat completion" + }, + "choices": { + "description": "A list of chat completion choices", + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAiStreamChoiceDto" + } + }, + "system_fingerprint": { + "type": "string", + "description": "System fingerprint" + } + }, + "required": [ + "id", + "object", + "created", + "model", + "choices" + ] + }, + "OpenAiModelDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Model identifier", + "example": "gpt-4o-2024-08-06" + }, + "object": { + "type": "string", + "description": "Object type, always \"model\"", + "example": "model" + }, + "created": { + "type": "number", + "description": "Unix timestamp when the model was created", + "example": 1692901427 + }, + "owned_by": { + "type": "string", + "description": "Organization that owns the model", + "example": "openai" + } + }, + "required": [ + "id", + "object", + "created", + "owned_by" + ] + }, + "OpenAiModelsResponseDto": { + "type": "object", + "properties": { + "object": { + "type": "string", + "description": "Object type, always \"list\"", + "example": "list" + }, + "data": { + "description": "Array of available models", + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAiModelDto" + } + } + }, + "required": [ + "object", + "data" + ] + }, + "OpenAiTextCompletionRequestDto": { + "type": "object", + "properties": { + "prompt": { + "type": "string", + "description": "The prompt to generate completions for", + "example": "Write a short story about a robot" + }, + "model": { + "type": "string", + "description": "The model to use for completion", + "example": "gpt-3.5-turbo-instruct", + "default": "gpt-3.5-turbo-instruct" + }, + "max_tokens": { + "type": "number", + "description": "The maximum number of tokens to generate", + "example": 100, + "minimum": 1, + "maximum": 4096, + "default": 16 + }, + "temperature": { + "type": "number", + "description": "Controls randomness in the output", + "example": 0.7, + "minimum": 0, + "maximum": 2, + "default": 1 + }, + "top_p": { + "type": "number", + "description": "Controls diversity via nucleus sampling", + "example": 0.9, + "minimum": 0, + "maximum": 1, + "default": 1 + }, + "n": { + "type": "number", + "description": "How many completions to generate", + "example": 1, + "minimum": 1, + "maximum": 128, + "default": 1 + }, + "stream": { + "type": "boolean", + "description": "Whether to stream back partial progress", + "example": false, + "default": false + }, + "logprobs": { + "type": "number", + "description": "Include the log probabilities on the logprobs most likely tokens", + "example": null, + "minimum": 0, + "maximum": 5 + }, + "echo": { + "type": "boolean", + "description": "Echo back the prompt in addition to the completion", + "example": false, + "default": false + }, + "stop": { + "description": "Up to 4 sequences where the API will stop generating further tokens", + "example": [ + "\n" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "presence_penalty": { + "type": "number", + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far", + "example": 0, + "minimum": -2, + "maximum": 2, + "default": 0 + }, + "frequency_penalty": { + "type": "number", + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far", + "example": 0, + "minimum": -2, + "maximum": 2, + "default": 0 + }, + "best_of": { + "type": "number", + "description": "Generates best_of completions server-side and returns the \"best\"", + "example": 1, + "minimum": 1, + "maximum": 20, + "default": 1 + }, + "user": { + "type": "string", + "description": "A unique identifier representing your end-user", + "example": "user-123" + } + }, + "required": [ + "prompt" + ] + }, + "TextCompletionLogprobs": { + "type": "object", + "properties": { + "tokens": { + "description": "The tokens chosen by the model", + "type": "array", + "items": { + "type": "string" + } + }, + "token_logprobs": { + "description": "The log probability of each token", + "type": "array", + "items": { + "type": "number" + } + }, + "top_logprobs": { + "type": "object", + "description": "The top logprobs for each token", + "additionalProperties": true + }, + "text_offset": { + "description": "The character offsets for each token", + "type": "array", + "items": { + "type": "number" + } + } + } + }, + "TextCompletionChoice": { + "type": "object", + "properties": { + "text": { + "type": "string", + "description": "The generated text", + "example": "Once upon a time, there was a robot named R2D2..." + }, + "index": { + "type": "number", + "description": "The index of this choice", + "example": 0 + }, + "logprobs": { + "description": "Log probability information for the choice", + "allOf": [ + { + "$ref": "#/components/schemas/TextCompletionLogprobs" + } + ] + }, + "finish_reason": { + "type": "string", + "description": "The reason the model stopped generating tokens", + "example": "stop", + "enum": [ + "stop", + "length", + "content_filter" + ] + } + }, + "required": [ + "text", + "index", + "finish_reason" + ] + }, + "TextCompletionUsage": { + "type": "object", + "properties": { + "prompt_tokens": { + "type": "number", + "description": "Number of tokens in the prompt", + "example": 10 + }, + "completion_tokens": { + "type": "number", + "description": "Number of tokens in the generated completion", + "example": 50 + }, + "total_tokens": { + "type": "number", + "description": "Total number of tokens used", + "example": 60 + } + }, + "required": [ + "prompt_tokens", + "completion_tokens", + "total_tokens" + ] + }, + "OpenAiTextCompletionResponseDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for the completion", + "example": "cmpl-7QyqpwdfhqwajicIEznoc6Q47XAyW" + }, + "object": { + "type": "string", + "description": "The object type, always \"text_completion\"", + "example": "text_completion" + }, + "created": { + "type": "number", + "description": "Unix timestamp when the completion was created", + "example": 1692901427 + }, + "model": { + "type": "string", + "description": "The model used for completion", + "example": "gpt-3.5-turbo-instruct" + }, + "choices": { + "description": "Array of completion choices", + "type": "array", + "items": { + "$ref": "#/components/schemas/TextCompletionChoice" + } + }, + "usage": { + "description": "Usage statistics for the completion", + "allOf": [ + { + "$ref": "#/components/schemas/TextCompletionUsage" + } + ] + } + }, + "required": [ + "id", + "object", + "created", + "model", + "choices", + "usage" + ] + }, + "FunctionCallDto": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the function to call", + "example": "get_weather" + } + }, + "required": [ + "name" + ] + }, + "ChatMessageDto": { + "type": "object", + "properties": { + "role": { + "type": "string", + "enum": [ + "system", + "user", + "assistant", + "tool", + "function", + "developer" + ], + "description": "The role of the message author" + }, + "content": { + "description": "The contents of the message - can be a string or array of content items", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#/components/schemas/TextContentDto" + }, + { + "$ref": "#/components/schemas/ImageUrlContentDto" + }, + { + "$ref": "#/components/schemas/InputAudioContentDto" + }, + { + "$ref": "#/components/schemas/FileContentDto" + }, + { + "$ref": "#/components/schemas/RefusalContentDto" + } + ] + } + } + ], + "examples": [ + "Simple text message", + [ + { + "type": "text", + "text": "What is in this image?" + }, + { + "type": "image_url", + "image_url": { + "url": "https://example.com/image.jpg" + } + } + ] + ] + }, + "name": { + "type": "object", + "description": "The name of the author of this message (required for function role)", + "example": "user123" + }, + "function_call": { + "description": "The name and arguments of a function that should be called", + "allOf": [ + { + "$ref": "#/components/schemas/FunctionCallDto" + } + ] + }, + "tool_calls": { + "type": "object", + "description": "Tool call information" + }, + "tool_call_id": { + "type": "object", + "description": "Tool call ID (required for tool role messages)", + "example": "call_abc123" + } + }, + "required": [ + "role", + "content" + ] + }, + "StreamOptionsDto": { + "type": "object", + "properties": { + "include_usage": { + "type": "boolean", + "description": "Whether to include usage information in streaming responses", + "example": true + } + } + }, + "ChatCompletionCreateParamsDto": { + "type": "object", + "properties": { + "model": { + "type": "object", + "description": "ID of the model to use. This field is ignored as the agent has its own model configuration.", + "example": "gpt-4" + }, + "messages": { + "description": "A list of messages comprising the conversation so far", + "type": "array", + "items": { + "$ref": "#/components/schemas/ChatMessageDto" + } + }, + "temperature": { + "type": "object", + "description": "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.", + "example": 0.7, + "minimum": 0, + "maximum": 2 + }, + "top_p": { + "type": "object", + "description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass.", + "example": 1, + "minimum": 0, + "maximum": 1 + }, + "n": { + "type": "object", + "description": "How many chat completion choices to generate for each input message", + "example": 1, + "minimum": 1, + "maximum": 128 + }, + "stream": { + "type": "object", + "description": "Whether to stream back partial responses", + "example": false, + "default": false + }, + "stop": { + "description": "Up to 4 sequences where the API will stop generating further tokens", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "example": [ + "\n", + "Human:" + ] + }, + "max_tokens": { + "type": "object", + "deprecated": true, + "description": "The maximum number of tokens to generate in the chat completion", + "example": 100, + "minimum": 1 + }, + "max_completion_tokens": { + "type": "object", + "description": "The maximum number of tokens to generate in the chat completion (alternative to max_tokens)", + "example": 100, + "minimum": 1 + }, + "presence_penalty": { + "type": "object", + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", + "example": 0, + "minimum": -2, + "maximum": 2 + }, + "frequency_penalty": { + "type": "object", + "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", + "example": 0, + "minimum": -2, + "maximum": 2 + }, + "logit_bias": { + "type": "object", + "description": "Modify the likelihood of specified tokens appearing in the completion", + "additionalProperties": { + "type": "number" + }, + "example": { + "50256": -100 + } + }, + "user": { + "type": "string", + "description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse", + "example": "user-1234" + }, + "response_format": { + "oneOf": [ + { + "$ref": "#/components/schemas/ResponseFormatTextDto" + }, + { + "$ref": "#/components/schemas/ResponseFormatJsonObjectDto" + }, + { + "$ref": "#/components/schemas/ResponseFormatJsonSchemaDto" + } + ], + "description": "An object specifying the format that the model must output" + }, + "tools": { + "oneOf": [ + { + "$ref": "#/components/schemas/FunctionToolDto" + }, + { + "$ref": "#/components/schemas/CustomToolDto" + } + ], + "description": "A list of tools the model may call" + }, + "tool_choice": { + "description": "Controls which (if any) tool is called by the model", + "oneOf": [ + { + "type": "string", + "enum": [ + "none", + "auto", + "required" + ] + }, + { + "type": "object" + } + ], + "example": "auto" + }, + "stream_options": { + "description": "Options for streaming response", + "allOf": [ + { + "$ref": "#/components/schemas/StreamOptionsDto" + } + ] + }, + "logprobs": { + "type": "object", + "description": "Whether to return log probabilities of the output tokens", + "example": false + }, + "top_logprobs": { + "type": "object", + "description": "An integer between 0 and 5 specifying the number of most likely tokens to return at each token position", + "example": 0, + "minimum": 0, + "maximum": 5 + } + }, + "required": [ + "messages" + ] + }, + "ResponseFormatTextDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "text" + ], + "description": "The type of response format", + "example": "text" + } + }, + "required": [ + "type" + ] + }, + "ResponseFormatJsonObjectDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "json_object" + ], + "description": "The type of response format", + "example": "json_object" + } + }, + "required": [ + "type" + ] + }, + "ResponseFormatJsonSchemaDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "json_schema" + ], + "description": "The type of response format", + "example": "json_schema" + }, + "json_schema": { + "type": "object", + "description": "JSON schema for json_schema type", + "additionalProperties": true + } + }, + "required": [ + "type", + "json_schema" + ] + }, + "FunctionToolDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "function" + ], + "description": "The type of tool", + "example": "function" + }, + "function": { + "type": "object", + "description": "The function definition", + "additionalProperties": true + } + }, + "required": [ + "type", + "function" + ] + }, + "CustomToolDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "custom" + ], + "description": "The type of tool", + "example": "custom" + }, + "custom": { + "type": "object", + "description": "Custom tool definition", + "additionalProperties": true + } + }, + "required": [ + "type", + "custom" + ] + }, + "TextContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "text" + ], + "description": "Content type - always \"text\"", + "example": "text" + }, + "text": { + "type": "string", + "description": "The text content", + "example": "What is in this image?" + } + }, + "required": [ + "type", + "text" + ] + }, + "ImageUrlDto": { + "type": "object", + "properties": { + "url": { + "type": "string", + "description": "The URL of the image", + "example": "https://example.com/image.jpg" + }, + "detail": { + "type": "string", + "description": "The detail level of the image", + "enum": [ + "low", + "high", + "auto" + ], + "example": "auto" + } + }, + "required": [ + "url" + ] + }, + "ImageUrlContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "image_url" + ], + "description": "Content type - always \"image_url\"", + "example": "image_url" + }, + "image_url": { + "description": "Image URL object", + "example": { + "url": "https://example.com/image.jpg", + "detail": "auto" + }, + "allOf": [ + { + "$ref": "#/components/schemas/ImageUrlDto" + } + ] + } + }, + "required": [ + "type", + "image_url" + ] + }, + "InputAudioDto": { + "type": "object", + "properties": { + "data": { + "type": "string", + "description": "Base64 encoded audio data", + "example": "UklGRnoGAABXQVZFZm10IBAAAAABAAEAQB8AAEAfAAABAAgAZGF0YQoGAACBhYqFbF1fdJivrJBhNjVgodDbq2EcBj+a2/LDciUFLIHO8tiJNwgZaLvt559NEAxQp+PwtmMcBjiR1/LMeSwFJHfH8N2QQAoUXrTp66hVFApGn+DyvmwhBSuBzvLZiTYIG2m98OScTgwOUarm7bllHgU7jdXzzn0uBSF+zO/eizEIHWq+8+OWT" + }, + "format": { + "type": "string", + "description": "Audio format", + "enum": [ + "wav", + "mp3", + "m4a", + "ogg", + "flac", + "webm" + ], + "example": "wav" + } + }, + "required": [ + "data", + "format" + ] + }, + "InputAudioContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "input_audio" + ], + "description": "Content type - always \"input_audio\"", + "example": "input_audio" + }, + "input_audio": { + "description": "Input audio object", + "example": { + "data": "UklGRnoGAABXQVZFZm10IBAAAAABAAEAQB8AAEAfAAABAAgAZGF0YQoGAACBhYqFbF1fdJivrJBhNjVgodDbq2EcBj+a2/LDciUFLIHO8tiJNwgZaLvt559NEAxQp+PwtmMcBjiR1/LMeSwFJHfH8N2QQAoUXrTp66hVFApGn+DyvmwhBSuBzvLZiTYIG2m98OScTgwOUarm7bllHgU7jdXzzn0uBSF+zO/eizEIHWq+8+OWT", + "format": "wav" + }, + "allOf": [ + { + "$ref": "#/components/schemas/InputAudioDto" + } + ] + } + }, + "required": [ + "type", + "input_audio" + ] + }, + "FileContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "file" + ], + "description": "Content type - always \"file\"", + "example": "file" + }, + "file": { + "type": "object", + "description": "File object (OpenAI File type)", + "additionalProperties": true + } + }, + "required": [ + "type", + "file" + ] + }, + "RefusalContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "refusal" + ], + "description": "Content type - always \"refusal\"", + "example": "refusal" + }, + "refusal": { + "type": "string", + "description": "Refusal message", + "example": "I cannot help with that request" + } + }, + "required": [ + "type", + "refusal" + ] + }, + "CreateResponseDto": { + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "Модель для использования в ответе. Это поле игнорируется, так как агент имеет свою конфигурацию модели.", + "example": "gpt-4" + }, + "instructions": { + "type": "string", + "description": "Инструкции для генерации ответа", + "example": "Ты - полезный помощник." + }, + "input": { + "description": "Входной текст или сообщения для ответа", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object" + } + } + ] + }, + "max_output_tokens": { + "type": "number", + "description": "Максимальное количество токенов для генерации в ответе", + "example": 1000, + "minimum": 1 + }, + "temperature": { + "type": "number", + "description": "Температура для генерации ответа", + "example": 0.7, + "minimum": 0, + "maximum": 2 + }, + "metadata": { + "type": "object", + "description": "Дополнительные метаданные для ответа", + "additionalProperties": true + }, + "tools": { + "type": "array", + "description": "Инструменты, доступные для модели" + }, + "stream": { + "type": "boolean", + "description": "Флаг, указывающий на то, что ответ нужно передавать по частям", + "example": false, + "default": false + }, + "stream_options": { + "type": "object", + "description": "Опции для потоковой передачи. Устанавливается только при stream: true", + "additionalProperties": true + }, + "background": { + "type": "boolean", + "description": "Запуск модели в фоновом режиме", + "example": false, + "default": false + }, + "text": { + "type": "object", + "description": "Конфигурация для текстового ответа от модели", + "additionalProperties": true + }, + "tool_choice": { + "description": "Как модель должна выбирать инструменты при генерации ответа", + "oneOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] + }, + "parallel_tool_calls": { + "type": "boolean", + "description": "Разрешить модели выполнять вызовы инструментов параллельно", + "example": true, + "default": true + }, + "max_tool_calls": { + "type": "number", + "description": "Максимальное количество вызовов встроенных инструментов", + "example": 10 + }, + "previous_response_id": { + "type": "string", + "description": "ID предыдущего ответа для создания многоходовых диалогов", + "example": "resp_abc123" + }, + "conversation": { + "description": "Диалог, к которому принадлежит этот ответ", + "oneOf": [ + { + "type": "string" + }, + { + "type": "object" + } + ] + }, + "include": { + "type": "array", + "description": "Дополнительные выходные данные для включения в ответ модели", + "items": { + "type": "string" + }, + "example": [ + "web_search_call.action.sources", + "code_interpreter_call.outputs" + ] + }, + "store": { + "type": "boolean", + "description": "Хранить ли сгенерированный ответ модели для последующего получения через API", + "example": true, + "default": true + }, + "top_p": { + "type": "number", + "description": "Альтернатива temperature. Модель рассматривает токены с top_p вероятностной массой", + "example": 1, + "minimum": 0, + "maximum": 1, + "default": 1 + }, + "top_logprobs": { + "type": "number", + "description": "Количество наиболее вероятных токенов для возврата на каждой позиции токена", + "example": 5, + "minimum": 0, + "maximum": 20 + }, + "truncation": { + "type": "string", + "description": "Стратегия усечения для ответа модели", + "enum": [ + "auto", + "disabled" + ], + "default": "disabled" + }, + "service_tier": { + "type": "string", + "description": "Уровень обслуживания для обработки запроса", + "enum": [ + "auto", + "default", + "flex", + "priority" + ], + "default": "auto" + }, + "safety_identifier": { + "type": "string", + "description": "Стабильный идентификатор для обнаружения пользователей, нарушающих политику использования", + "example": "user-hash-12345" + }, + "prompt_cache_key": { + "type": "string", + "description": "Используется OpenAI для кэширования ответов на похожие запросы", + "example": "cache-key-12345" + }, + "prompt": { + "type": "object", + "description": "Ссылка на шаблон промпта и его переменные", + "additionalProperties": true + }, + "reasoning": { + "type": "object", + "description": "Конфигурация для reasoning моделей (gpt-5 и o-series)", + "additionalProperties": true + }, + "user": { + "type": "string", + "description": "Устарело. Используйте safety_identifier или prompt_cache_key", + "example": "user-12345", + "deprecated": true + } + } + }, + "GetResponseQueryDto": { + "type": "object", + "properties": { + "include": { + "description": "Дополнительные поля для включения в ответ", + "example": [ + "web_search_call.action.sources", + "code_interpreter_call.outputs" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "include_obfuscation": { + "type": "boolean", + "description": "Включить обфускацию потока для защиты от атак по побочным каналам", + "example": true + }, + "starting_after": { + "type": "number", + "description": "Номер последовательности события, после которого начать потоковую передачу", + "example": 0, + "minimum": 0 + }, + "stream": { + "type": "boolean", + "description": "Потоковая передача данных ответа модели", + "example": false + } + } + }, + "ResponseUsageDto": { + "type": "object", + "properties": { + "prompt_tokens": { + "type": "number", + "description": "Количество токенов в промпте", + "example": 150 + }, + "completion_tokens": { + "type": "number", + "description": "Количество токенов в ответе", + "example": 300 + }, + "total_tokens": { + "type": "number", + "description": "Общее количество токенов", + "example": 450 + } + }, + "required": [ + "prompt_tokens", + "completion_tokens", + "total_tokens" + ] + }, + "OpenAiResponseDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Уникальный идентификатор ответа", + "example": "resp_abc123" + }, + "object": { + "type": "string", + "description": "Тип объекта", + "example": "response" + }, + "created_at": { + "type": "number", + "description": "Временная метка создания", + "example": 1234567890 + }, + "model": { + "type": "string", + "description": "Идентификатор модели", + "example": "gpt-4" + }, + "status": { + "type": "string", + "description": "Статус ответа", + "example": "completed" + }, + "usage": { + "description": "Информация об использовании токенов", + "allOf": [ + { + "$ref": "#/components/schemas/ResponseUsageDto" + } + ] + } + }, + "required": [ + "id", + "object", + "created_at", + "model", + "status" + ] + }, + "ConversationItemContentInputText": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Content type", + "example": "input_text" + }, + "text": { + "type": "string", + "description": "Text content", + "example": "Hello!" + } + }, + "required": [ + "type", + "text" + ] + }, + "ConversationItemMessage": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Item type", + "example": "message" + }, + "role": { + "type": "string", + "description": "Role of the message", + "example": "user", + "enum": [ + "user", + "assistant" + ] + }, + "content": { + "description": "Content of the message", + "type": "array", + "items": { + "$ref": "#/components/schemas/ConversationItemContentInputText" + } + } + }, + "required": [ + "type", + "role" + ] + }, + "CreateConversationDto": { + "type": "object", + "properties": { + "items": { + "description": "Initial items to include in the conversation context. You may add up to 20 items at a time.", + "type": "array", + "items": { + "$ref": "#/components/schemas/ConversationItemMessage" + } + }, + "metadata": { + "type": "object", + "description": "Set of 16 key-value pairs that can be attached to an object", + "example": { + "topic": "demo" + }, + "nullable": true + } + } + }, + "UpdateConversationDto": { + "type": "object", + "properties": { + "metadata": { + "type": "object", + "description": "Set of 16 key-value pairs that can be attached to an object", + "example": { + "topic": "project-x" + } + } + }, + "required": [ + "metadata" + ] + }, + "ConversationDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The unique ID of the conversation", + "example": "conv_123" + }, + "object": { + "type": "string", + "description": "The object type, which is always conversation", + "example": "conversation" + }, + "created_at": { + "type": "number", + "description": "The time at which the conversation was created, measured in seconds since the Unix epoch", + "example": 1741900000 + }, + "metadata": { + "type": "object", + "description": "Set of 16 key-value pairs that can be attached to an object", + "example": { + "topic": "demo" + }, + "nullable": true + } + }, + "required": [ + "id", + "object", + "created_at" + ] + }, + "ConversationDeletedDto": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the deleted conversation", + "example": "conv_123" + }, + "object": { + "type": "string", + "description": "The object type", + "example": "conversation.deleted" + }, + "deleted": { + "type": "boolean", + "description": "Whether the conversation was deleted", + "example": true + } + }, + "required": [ + "id", + "object", + "deleted" + ] + }, + "ConversationItemContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Content type", + "example": "input_text" + }, + "text": { + "type": "string", + "description": "Text content", + "example": "Hello!" + } + }, + "required": [ + "type" + ] + }, + "ConversationItemDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Item type", + "example": "message" + }, + "id": { + "type": "string", + "description": "The unique ID of the item", + "example": "msg_abc" + }, + "status": { + "type": "string", + "description": "Status of the item", + "example": "completed" + }, + "role": { + "type": "string", + "description": "Role of the message", + "example": "user" + }, + "content": { + "description": "Content of the item", + "type": "array", + "items": { + "$ref": "#/components/schemas/ConversationItemContentDto" + } + } + }, + "required": [ + "type", + "id", + "status" + ] + }, + "ConversationItemListDto": { + "type": "object", + "properties": { + "object": { + "type": "string", + "description": "The type of object returned", + "example": "list" + }, + "data": { + "description": "A list of conversation items", + "type": "array", + "items": { + "$ref": "#/components/schemas/ConversationItemDto" + } + }, + "first_id": { + "type": "string", + "description": "The ID of the first item in the list", + "example": "msg_abc" + }, + "last_id": { + "type": "string", + "description": "The ID of the last item in the list", + "example": "msg_abc" + }, + "has_more": { + "type": "boolean", + "description": "Whether there are more items available", + "example": false + } + }, + "required": [ + "object", + "data", + "first_id", + "last_id", + "has_more" + ] + }, + "ListItemsQueryDto": { + "type": "object", + "properties": { + "after": { + "type": "string", + "description": "An item ID to list items after, used in pagination" + }, + "include": { + "description": "Specify additional output data to include in the model response", + "type": "array", + "items": { + "type": "string" + } + }, + "limit": { + "type": "number", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20", + "default": 20, + "minimum": 1, + "maximum": 100 + }, + "order": { + "type": "string", + "description": "The order to return the input items in", + "enum": [ + "asc", + "desc" + ], + "default": "desc" + } + } + }, + "GetItemQueryDto": { + "type": "object", + "properties": { + "include": { + "description": "Specify additional output data to include in the model response", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "ItemContentDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Content type", + "example": "input_text" + }, + "text": { + "type": "string", + "description": "Text content", + "example": "Hello!" + } + }, + "required": [ + "type", + "text" + ] + }, + "CreateItemDto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Item type", + "example": "message" + }, + "role": { + "type": "string", + "description": "Role of the message", + "example": "user", + "enum": [ + "user", + "assistant" + ] + }, + "content": { + "description": "Content of the message", + "type": "array", + "items": { + "$ref": "#/components/schemas/ItemContentDto" + } + } + }, + "required": [ + "type", + "role", + "content" + ] + }, + "CreateItemsDto": { + "type": "object", + "properties": { + "items": { + "description": "The items to add to the conversation. You may add up to 20 items at a time", + "type": "array", + "items": { + "$ref": "#/components/schemas/CreateItemDto" + } + } + }, + "required": [ + "items" + ] + }, + "CreateItemsQueryDto": { + "type": "object", + "properties": { + "include": { + "description": "Additional fields to include in the response", + "type": "array", + "items": { + "type": "string" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/backend/.env.computer-svc b/backend/.env.computer-svc new file mode 100644 index 0000000..6a7e3f4 --- /dev/null +++ b/backend/.env.computer-svc @@ -0,0 +1,65 @@ +# ============================================ +# GooSeek Computer Service — Environment +# ============================================ +# Минимальный .env для запуска computer-svc +# Копировать в .env и заполнить значения + +# ============================================ +# ОБЯЗАТЕЛЬНЫЕ +# ============================================ + +# PostgreSQL — хранение задач, памяти, артефактов +DATABASE_URL=postgres://gooseek:password@postgres:5432/gooseek?sslmode=disable + +# Минимум один LLM провайдер +OPENAI_API_KEY=sk-xxx + +# ============================================ +# РЕКОМЕНДУЕМЫЕ (multi-model routing) +# ============================================ + +# Claude — лучший для reasoning и coding +ANTHROPIC_API_KEY=sk-ant-xxx + +# Gemini — лучший для long context и research +GEMINI_API_KEY=xxx + +# Redis — кэширование +REDIS_URL=redis://redis:6379 + +# ============================================ +# ПОРТ СЕРВИСА +# ============================================ + +COMPUTER_SVC_PORT=3030 + +# ============================================ +# SANDBOX (Docker для исполнения кода) +# ============================================ + +SANDBOX_IMAGE=gooseek/sandbox:latest + +# ============================================ +# CONNECTORS (опционально) +# ============================================ + +# --- Telegram Bot --- +# Получить токен: @BotFather → /newbot +TELEGRAM_BOT_TOKEN= + +# --- Email SMTP --- +SMTP_HOST= +SMTP_PORT=587 +SMTP_USERNAME= +SMTP_PASSWORD= +SMTP_FROM= +SMTP_FROM_NAME=GooSeek Computer + +# --- S3/MinIO Storage --- +S3_ENDPOINT= +S3_ACCESS_KEY= +S3_SECRET_KEY= +S3_BUCKET=gooseek-artifacts +S3_USE_SSL=false +S3_REGION=us-east-1 +S3_PUBLIC_URL= diff --git a/backend/Makefile b/backend/Makefile new file mode 100644 index 0000000..01d82ab --- /dev/null +++ b/backend/Makefile @@ -0,0 +1,86 @@ +.PHONY: all build clean test dev docker-build docker-up + +GO=go +GOFLAGS=-ldflags="-s -w" + +SERVICES=api-gateway chat-svc agent-svc search-svc llm-svc scraper-svc discover-svc collection-svc file-svc thread-svc computer-svc + +all: build + +build: + @for svc in $(SERVICES); do \ + echo "Building $$svc..."; \ + $(GO) build $(GOFLAGS) -o bin/$$svc ./cmd/$$svc; \ + done + +build-%: + $(GO) build $(GOFLAGS) -o bin/$* ./cmd/$* + +clean: + rm -rf bin/ + +test: + $(GO) test -v ./... + +test-coverage: + $(GO) test -coverprofile=coverage.out ./... + $(GO) tool cover -html=coverage.out -o coverage.html + +lint: + golangci-lint run ./... + +dev: + @echo "Starting development servers..." + @$(GO) run ./cmd/api-gateway & + @$(GO) run ./cmd/agent-svc & + @$(GO) run ./cmd/search-svc & + @wait + +dev-api-gateway: + $(GO) run ./cmd/api-gateway + +dev-agent-svc: + $(GO) run ./cmd/agent-svc + +dev-chat-svc: + $(GO) run ./cmd/chat-svc + +dev-search-svc: + $(GO) run ./cmd/search-svc + +docker-build: + docker build -t gooseek/api-gateway -f deploy/docker/Dockerfile.api-gateway . + docker build -t gooseek/agent-svc -f deploy/docker/Dockerfile.agent-svc . + docker build -t gooseek/search-svc -f deploy/docker/Dockerfile.search-svc . + +docker-up: + docker-compose -f deploy/docker/docker-compose.yml up -d + +docker-down: + docker-compose -f deploy/docker/docker-compose.yml down + +deps: + $(GO) mod download + $(GO) mod tidy + +generate: + $(GO) generate ./... + +# K8s commands +k8s-deploy: + ./deploy/k8s/deploy.sh + +k8s-apply: + kubectl apply -k deploy/k8s/ + +k8s-delete: + kubectl delete -k deploy/k8s/ + +k8s-status: + kubectl -n gooseek get pods,svc,ingress + +k8s-logs-%: + kubectl -n gooseek logs -f deployment/$* --all-containers + +k8s-restart-%: + kubectl -n gooseek rollout restart deployment/$* diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000..5da3aea --- /dev/null +++ b/backend/README.md @@ -0,0 +1,257 @@ +# GooSeek Backend (Go) + +Perplexity-style AI Search Backend на Go. + +## Архитектура + +``` +backend/ +├── cmd/ # Точки входа +│ ├── api-gateway/ # API Gateway (HTTP proxy + routing) +│ ├── chat-svc/ # Chat orchestration service +│ ├── search-svc/ # SearXNG wrapper +│ ├── llm-svc/ # LLM providers abstraction +│ ├── scraper-svc/ # Web scraping (Crawl4AI-like) +│ ├── agent-svc/ # AI Agent (search orchestrator) +│ ├── discover-svc/ # Discover (article summaries) +│ ├── collection-svc/ # Collections (saved research) +│ ├── file-svc/ # File upload & analysis +│ └── thread-svc/ # Threads, Spaces, Memory, Pages +├── internal/ # Внутренние пакеты +│ ├── agent/ # Search orchestrator logic +│ │ ├── classifier.go # Query classifier +│ │ ├── researcher.go # Research agent +│ │ ├── writer.go # Response writer +│ │ └── orchestrator.go # Main orchestration +│ ├── llm/ # LLM abstractions +│ │ ├── client.go # Generic LLM client +│ │ ├── openai.go # OpenAI provider +│ │ ├── anthropic.go # Anthropic provider +│ │ └── gemini.go # Google Gemini +│ ├── search/ # Search integrations +│ │ ├── searxng.go # SearXNG client +│ │ ├── media.go # Image/video search +│ │ └── reranker.go # BM25 reranking +│ ├── scraper/ # Web scraping +│ │ ├── crawler.go # Page crawler +│ │ ├── parser.go # HTML parser +│ │ └── cache.go # Scrape cache +│ ├── widgets/ # Widget generation +│ │ ├── weather.go # Weather widget +│ │ ├── calculator.go # Calculator widget +│ │ └── registry.go # Widget registry +│ ├── session/ # Session management +│ │ ├── manager.go # Session manager +│ │ └── events.go # Event emitter +│ ├── prompts/ # LLM prompts +│ │ ├── classifier.go # Classification prompts +│ │ ├── researcher.go # Research prompts +│ │ └── writer.go # Writer prompts +│ └── types/ # Shared types +│ ├── blocks.go # Response blocks +│ ├── chunks.go # Search chunks +│ └── widgets.go # Widget types +├── pkg/ # Public packages +│ ├── ndjson/ # NDJSON streaming +│ ├── middleware/ # HTTP middleware +│ └── config/ # Configuration +├── api/ # API definitions +│ └── proto/ # gRPC definitions (optional) +├── deploy/ # Deployment configs +│ ├── docker/ # Dockerfiles +│ └── k8s/ # Kubernetes manifests +├── go.mod +├── go.sum +└── Makefile +``` + +## Сервисы + +### 1. API Gateway (port 3015) +HTTP прокси для роутинга запросов к микросервисам. + +### 2. Chat Service (port 3005) +Оркестрация чат-запросов, интеграция с agent-svc. + +### 3. Agent Service (port 3018) +Главная логика Perplexity-style поиска: +- Query classification +- Multi-step research +- Response generation with citations + +### 4. Search Service (port 3001) +SearXNG wrapper для веб-поиска. + +### 5. LLM Service (port 3020) +Абстракция над LLM провайдерами. + +### 6. Scraper Service (port 3021) +Web scraping для получения контента страниц. + +### 7. Discover Service (port 3002) +Perplexity-style Discover функционал: +- Article summaries (дайджесты статей) +- Digest generation и кэширование +- Pre-generated digests storage +- Redis + PostgreSQL персистентность + +### 8. Collection Service (port 3025) +Perplexity Collections: +- Сохранение исследований в проекты +- Контекст коллекции для новых поисков +- Шаринг коллекций +- CRUD для items (searches, notes, urls, files) + +### 9. File Service (port 3026) +Perplexity File Analysis: +- Загрузка PDF, images, text файлов +- Автоматический анализ через LLM +- Извлечение текста из PDF +- Vision analysis для изображений +- Интеграция с Collections + +### 10. Thread Service (port 3027) +Полный функционал сессий и персонализации: + +**Threads (Chat History)**: +- Сохранение истории чатов в PostgreSQL +- Автогенерация заголовков +- Пагинация сообщений +- Sharing по публичной ссылке + +**Spaces (Workspaces)**: +- Custom AI instructions per space +- Default focus mode и model +- Организация threads по проектам +- Цветовые теги и иконки + +**Pages (Article Generator)**: +- Генерация статей из threads +- Форматированные секции с цитатами +- Экспорт в Markdown/HTML +- Публичный sharing + +**Memory (Personalization)**: +- Сохранение предпочтений пользователя +- Фактов о пользователе +- Custom instructions +- Контекст для всех сессий + +## Запуск + +```bash +# Development +make dev + +# Build all +make build + +# Docker +make docker-build +make docker-up + +# Tests +make test +``` + +## API + +### POST /api/chat (NDJSON stream) +```json +{ + "message": { + "messageId": "uuid", + "chatId": "uuid", + "content": "What is quantum computing?" + }, + "optimizationMode": "speed|balanced|quality", + "chatModel": { + "providerId": "openai", + "key": "gpt-4o" + }, + "history": [["human", "..."], ["ai", "..."]], + "locale": "ru" +} +``` + +### Response (NDJSON) +``` +{"type":"block","block":{"id":"...","type":"research","data":{...}}} +{"type":"block","block":{"id":"...","type":"source","data":[...]}} +{"type":"block","block":{"id":"...","type":"widget","data":{...}}} +{"type":"textChunk","blockId":"...","chunk":"Квантовые..."} +{"type":"messageEnd"} +``` + +## Режимы оптимизации + +| Mode | Time | LLM Calls | Scraping | +|----------|---------|-----------|----------| +| Speed | 8-15s | 1 | No | +| Balanced | 20-40s | 2-3 | Top 5 | +| Quality | 60-120s | 5-10 | Top 15 | + +## Perplexity-style Features + +### Focus Modes +- **All** — полный веб-поиск +- **Academic** — Google Scholar, ArXiv, PubMed +- **YouTube** — видео контент +- **Reddit** — обсуждения сообщества +- **Code** — GitHub, StackOverflow +- **News** — актуальные новости +- **Math** — Wolfram Alpha, вычисления +- **Finance** — финансовые данные + +### Deep Research +Multi-agent система глубокого исследования: +- Автоматическое разбиение на подзапросы +- 30+ поисков, 100+ источников +- Итеративный поиск с follow-up +- Синтез инсайтов +- Генерация отчёта с цитатами + +### Collections +- Сохранение исследований в проекты +- Контекст коллекции для новых поисков +- Типы items: search, note, url, file +- Шаринг коллекций + +### File Analysis +- PDF extraction +- Image analysis (vision models) +- Text file processing +- Интеграция с поиском + +### Clarifying Questions (Pro Search) +- Автоматическое определение неоднозначных запросов +- 2-3 уточняющих вопроса перед поиском +- Работает в Quality mode + +### Related Questions +- Генерация 3-4 связанных вопросов после ответа +- На основе контекста запроса и ответа +- Поддержка локализации (ru/en) + +### Spaces & Pages +- **Spaces**: workspaces с custom instructions +- **Pages**: генерация статей из исследований +- Публичный sharing по ссылке +- Счётчик просмотров + +### Memory & Personalization +- Сохранение предпочтений пользователя +- Типы: preference, fact, instruction, interest +- Автоматический контекст в каждом запросе +- Importance scoring + +## Технологии + +- **HTTP**: Fiber v2 +- **Streaming**: NDJSON over HTTP +- **LLM**: OpenAI, Anthropic, Gemini +- **Search**: SearXNG +- **Scraping**: Crawl4AI / custom crawler +- **Caching**: Redis +- **Database**: PostgreSQL +- **Auth**: JWT validation diff --git a/backend/cmd/agent-svc/main.go b/backend/cmd/agent-svc/main.go new file mode 100644 index 0000000..e295e64 --- /dev/null +++ b/backend/cmd/agent-svc/main.go @@ -0,0 +1,209 @@ +package main + +import ( + "bufio" + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/agent" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/internal/session" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/ndjson" +) + +type SearchRequest struct { + Message struct { + MessageID string `json:"messageId"` + ChatID string `json:"chatId"` + Content string `json:"content"` + } `json:"message"` + OptimizationMode string `json:"optimizationMode"` + Sources []string `json:"sources"` + History [][]string `json:"history"` + Files []string `json:"files"` + ChatModel ChatModel `json:"chatModel"` + SystemInstructions string `json:"systemInstructions"` + Locale string `json:"locale"` + AnswerMode string `json:"answerMode"` + ResponsePrefs *struct { + Format string `json:"format"` + Length string `json:"length"` + Tone string `json:"tone"` + } `json:"responsePrefs"` + LearningMode bool `json:"learningMode"` +} + +type ChatModel struct { + ProviderID string `json:"providerId"` + Key string `json:"key"` +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + searchClient := search.NewSearXNGClient(cfg) + + app := fiber.New(fiber.Config{ + StreamRequestBody: true, + BodyLimit: 10 * 1024 * 1024, + ReadTimeout: time.Minute, + WriteTimeout: 5 * time.Minute, + IdleTimeout: 2 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Post("/api/v1/agents/search", func(c *fiber.Ctx) error { + var req SearchRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Message.Content == "" { + return c.Status(400).JSON(fiber.Map{"error": "Message content required"}) + } + + providerID := req.ChatModel.ProviderID + modelKey := req.ChatModel.Key + + if providerID == "" && cfg.TimewebAPIKey != "" { + providerID = "timeweb" + modelKey = "gpt-4o" + } else if providerID == "" { + providerID = "openai" + modelKey = "gpt-4o-mini" + } + + llmClient, err := llm.NewClient(llm.ProviderConfig{ + ProviderID: providerID, + ModelKey: modelKey, + APIKey: getAPIKey(cfg, providerID), + BaseURL: cfg.TimewebAPIBaseURL, + AgentAccessID: cfg.TimewebAgentAccessID, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to create LLM client: " + err.Error()}) + } + + chatHistory := make([]llm.Message, 0, len(req.History)) + for _, h := range req.History { + if len(h) >= 2 { + role := llm.RoleUser + if h[0] == "ai" || h[0] == "assistant" { + role = llm.RoleAssistant + } + chatHistory = append(chatHistory, llm.Message{ + Role: role, + Content: h[1], + }) + } + } + + mode := agent.ModeBalanced + switch req.OptimizationMode { + case "speed": + mode = agent.ModeSpeed + case "quality": + mode = agent.ModeQuality + } + + var responsePrefs *agent.ResponsePrefs + if req.ResponsePrefs != nil { + responsePrefs = &agent.ResponsePrefs{ + Format: req.ResponsePrefs.Format, + Length: req.ResponsePrefs.Length, + Tone: req.ResponsePrefs.Tone, + } + } + + input := agent.OrchestratorInput{ + ChatHistory: chatHistory, + FollowUp: req.Message.Content, + Config: agent.OrchestratorConfig{ + LLM: llmClient, + SearchClient: searchClient, + Mode: mode, + Sources: req.Sources, + FileIDs: req.Files, + SystemInstructions: req.SystemInstructions, + Locale: req.Locale, + AnswerMode: req.AnswerMode, + ResponsePrefs: responsePrefs, + LearningMode: req.LearningMode, + DiscoverSvcURL: cfg.DiscoverSvcURL, + Crawl4AIURL: cfg.Crawl4AIURL, + }, + } + + sess := session.NewSession() + + c.Set("Content-Type", "application/x-ndjson") + c.Set("Cache-Control", "no-cache") + c.Set("Transfer-Encoding", "chunked") + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + writer := ndjson.NewWriter(w) + + unsubscribe := sess.Subscribe(func(eventType session.EventType, data interface{}) { + if eventType == session.EventData { + if dataMap, ok := data.(map[string]interface{}); ok { + writer.Write(dataMap) + w.Flush() + } + } + }) + defer unsubscribe() + + err := agent.RunOrchestrator(ctx, sess, input) + if err != nil { + ndjson.WriteError(writer, err) + } + }) + + return nil + }) + + port := cfg.AgentSvcPort + log.Printf("agent-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getAPIKey(cfg *config.Config, providerID string) string { + switch providerID { + case "timeweb": + return cfg.TimewebAPIKey + case "openai": + return cfg.OpenAIAPIKey + case "anthropic": + return cfg.AnthropicAPIKey + case "gemini", "google": + return cfg.GeminiAPIKey + default: + return "" + } +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3018") + } +} diff --git a/backend/cmd/api-gateway/main.go b/backend/cmd/api-gateway/main.go new file mode 100644 index 0000000..195732c --- /dev/null +++ b/backend/cmd/api-gateway/main.go @@ -0,0 +1,232 @@ +package main + +import ( + "bufio" + "fmt" + "io" + "log" + "net/http" + "os" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/pkg/config" +) + +var svcURLs map[string]string + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + svcURLs = map[string]string{ + "chat": cfg.ChatSvcURL, + "agents": cfg.AgentSvcURL, + "search": cfg.SearchSvcURL, + "llm": cfg.LLMSvcURL, + "scraper": cfg.ScraperSvcURL, + "memory": cfg.MemorySvcURL, + "library": cfg.LibrarySvcURL, + "thread": cfg.ThreadSvcURL, + "discover": cfg.DiscoverSvcURL, + "finance": cfg.FinanceHeatmapURL, + "learning": cfg.LearningSvcURL, + "computer": cfg.ComputerSvcURL, + } + + app := fiber.New(fiber.Config{ + StreamRequestBody: true, + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: time.Duration(cfg.HTTPTimeout), + WriteTimeout: 5 * time.Minute, + IdleTimeout: 2 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New(cors.Config{ + AllowOrigins: strings.Join(cfg.AllowedOrigins, ","), + AllowHeaders: "Origin, Content-Type, Accept, Authorization", + AllowMethods: "GET, POST, PUT, PATCH, DELETE, OPTIONS", + })) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + app.Post("/api/chat", handleChat) + app.All("/api/*", handleProxy) + + port := cfg.APIGatewayPort + log.Printf("api-gateway listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getTarget(path string) (base, rewrite string) { + switch { + case path == "/api/chat" || strings.HasPrefix(path, "/api/chat?"): + return svcURLs["chat"], "/api/v1/chat" + case strings.HasPrefix(path, "/api/v1/agents"): + return svcURLs["agents"], path + case strings.HasPrefix(path, "/api/v1/search"): + return svcURLs["search"], path + case strings.HasPrefix(path, "/api/v1/llm"), strings.HasPrefix(path, "/api/v1/providers"): + return svcURLs["llm"], path + case strings.HasPrefix(path, "/api/v1/memory"): + return svcURLs["memory"], path + case strings.HasPrefix(path, "/api/v1/library"): + return svcURLs["library"], path + case strings.HasPrefix(path, "/api/v1/threads"): + return svcURLs["thread"], path + case strings.HasPrefix(path, "/api/v1/spaces"): + return svcURLs["thread"], path + case strings.HasPrefix(path, "/api/v1/pages"): + return svcURLs["thread"], path + case strings.HasPrefix(path, "/api/v1/share"): + return svcURLs["thread"], path + case strings.HasPrefix(path, "/api/v1/discover"): + return svcURLs["discover"], path + case strings.HasPrefix(path, "/api/v1/heatmap"): + return svcURLs["finance"], path + case strings.HasPrefix(path, "/api/v1/movers"): + return svcURLs["finance"], path + case strings.HasPrefix(path, "/api/v1/markets"): + return svcURLs["finance"], path + case strings.HasPrefix(path, "/api/v1/learning"): + return svcURLs["learning"], path + case strings.HasPrefix(path, "/api/v1/computer"): + return svcURLs["computer"], path + default: + return "", "" + } +} + +func handleChat(c *fiber.Ctx) error { + base := svcURLs["chat"] + if base == "" { + return c.Status(503).JSON(fiber.Map{"error": "Chat service not configured"}) + } + + targetURL := strings.TrimSuffix(base, "/") + "/api/v1/chat" + + req, err := http.NewRequest("POST", targetURL, strings.NewReader(string(c.Body()))) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + req.Header.Set("Content-Type", "application/json") + if auth := c.Get("Authorization"); auth != "" { + req.Header.Set("Authorization", auth) + } + + client := &http.Client{Timeout: 5 * time.Minute} + resp, err := client.Do(req) + if err != nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + if resp.StatusCode != http.StatusOK { + defer resp.Body.Close() + body, _ := io.ReadAll(resp.Body) + return c.Status(resp.StatusCode).Send(body) + } + + c.Set("Content-Type", "application/x-ndjson") + c.Set("Cache-Control", "no-cache") + c.Set("Transfer-Encoding", "chunked") + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + defer resp.Body.Close() + buf := make([]byte, 4096) + for { + n, err := resp.Body.Read(buf) + if n > 0 { + w.Write(buf[:n]) + w.Flush() + } + if err == io.EOF { + break + } + if err != nil { + break + } + } + }) + + return nil +} + +func handleProxy(c *fiber.Ctx) error { + path := c.Path() + base, rewrite := getTarget(path) + + if base == "" { + return c.Status(404).JSON(fiber.Map{"error": "Not found"}) + } + + targetURL := strings.TrimSuffix(base, "/") + rewrite + if c.Context().QueryArgs().Len() > 0 { + targetURL += "?" + string(c.Context().QueryArgs().QueryString()) + } + + method := c.Method() + var body io.Reader + if method != "GET" && method != "HEAD" { + body = strings.NewReader(string(c.Body())) + } + + req, err := http.NewRequest(method, targetURL, body) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + passHeaders := []string{"Authorization", "Content-Type", "Accept", "User-Agent", "Accept-Language"} + for _, h := range passHeaders { + if v := c.Get(h); v != "" { + req.Header.Set(h, v) + } + } + + client := &http.Client{Timeout: time.Minute} + resp, err := client.Do(req) + if err != nil { + return handleFallback(c, path) + } + defer resp.Body.Close() + + for _, h := range []string{"Content-Type", "Cache-Control", "Set-Cookie"} { + if v := resp.Header.Get(h); v != "" { + c.Set(h, v) + } + } + + data, _ := io.ReadAll(resp.Body) + return c.Status(resp.StatusCode).Send(data) +} + +func handleFallback(c *fiber.Ctx, path string) error { + switch { + case strings.HasPrefix(path, "/api/v1/discover"): + return c.JSON(fiber.Map{"items": []interface{}{}}) + case strings.HasPrefix(path, "/api/geo-context"): + return c.JSON(fiber.Map{"country": nil, "city": nil}) + case strings.HasPrefix(path, "/api/translations"): + return c.JSON(fiber.Map{}) + default: + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3015") + } +} diff --git a/backend/cmd/browser-svc/main.go b/backend/cmd/browser-svc/main.go new file mode 100644 index 0000000..2902f05 --- /dev/null +++ b/backend/cmd/browser-svc/main.go @@ -0,0 +1,53 @@ +package main + +import ( + "context" + "log" + "os" + "os/signal" + "strconv" + "syscall" + "time" + + "github.com/gooseek/backend/internal/computer/browser" +) + +func main() { + port := 3050 + if p := os.Getenv("PORT"); p != "" { + if parsed, err := strconv.Atoi(p); err == nil { + port = parsed + } + } + if p := os.Getenv("BROWSER_SVC_PORT"); p != "" { + if parsed, err := strconv.Atoi(p); err == nil { + port = parsed + } + } + + cfg := browser.ServerConfig{ + Port: port, + MaxSessions: 20, + SessionTimeout: 30 * time.Minute, + CleanupInterval: 5 * time.Minute, + } + + server := browser.NewBrowserServer(cfg) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + sigCh := make(chan os.Signal, 1) + signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) + + go func() { + <-sigCh + log.Println("[browser-svc] Shutting down...") + cancel() + }() + + log.Printf("[browser-svc] Starting browser service on port %d", port) + if err := server.Start(ctx); err != nil { + log.Fatalf("[browser-svc] Server error: %v", err) + } +} diff --git a/backend/cmd/chat-svc/main.go b/backend/cmd/chat-svc/main.go new file mode 100644 index 0000000..1d0f741 --- /dev/null +++ b/backend/cmd/chat-svc/main.go @@ -0,0 +1,235 @@ +package main + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/pkg/config" +) + +type ChatRequest struct { + Message struct { + MessageID string `json:"messageId"` + ChatID string `json:"chatId"` + Content string `json:"content"` + } `json:"message"` + OptimizationMode string `json:"optimizationMode"` + Sources []string `json:"sources"` + History [][]string `json:"history"` + Files []string `json:"files"` + ChatModel ChatModel `json:"chatModel"` + EmbeddingModel ChatModel `json:"embeddingModel"` + SystemInstructions string `json:"systemInstructions"` + Locale string `json:"locale"` + AnswerMode string `json:"answerMode"` + ResponsePrefs *struct { + Format string `json:"format"` + Length string `json:"length"` + Tone string `json:"tone"` + } `json:"responsePrefs"` + LearningMode bool `json:"learningMode"` +} + +type ChatModel struct { + ProviderID string `json:"providerId"` + Key string `json:"key"` +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + app := fiber.New(fiber.Config{ + StreamRequestBody: true, + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: time.Minute, + WriteTimeout: 5 * time.Minute, + IdleTimeout: 2 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + app.Get("/metrics", func(c *fiber.Ctx) error { + c.Set("Content-Type", "text/plain; charset=utf-8") + return c.SendString( + "# HELP gooseek_up Service is up (1) or down (0)\n" + + "# TYPE gooseek_up gauge\n" + + "gooseek_up 1\n", + ) + }) + + app.Get("/api/v1/config", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{ + "values": fiber.Map{ + "version": 1, + "setupComplete": true, + "preferences": fiber.Map{}, + }, + "fields": fiber.Map{}, + "modelProviders": []interface{}{}, + "envOnlyMode": true, + }) + }) + + app.Post("/api/v1/chat", func(c *fiber.Ctx) error { + var req ChatRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Message.Content == "" { + return c.Status(400).JSON(fiber.Map{"error": "Message content required"}) + } + + isDiscoverSummary := strings.HasPrefix(req.Message.Content, "Summary: ") && len(req.Message.Content) > 9 + var summaryURL string + if isDiscoverSummary { + summaryURL = strings.TrimSpace(strings.TrimPrefix(req.Message.Content, "Summary: ")) + } + + agentURL := strings.TrimSuffix(cfg.AgentSvcURL, "/") + "/api/v1/agents/search" + + httpReq, err := http.NewRequest("POST", agentURL, bytes.NewReader(c.Body())) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + httpReq.Header.Set("Content-Type", "application/json") + if auth := c.Get("Authorization"); auth != "" { + httpReq.Header.Set("Authorization", auth) + } + + client := &http.Client{Timeout: 5 * time.Minute} + resp, err := client.Do(httpReq) + if err != nil { + log.Printf("Agent service error: %v", err) + return c.Status(503).JSON(fiber.Map{"error": "Agent service unavailable"}) + } + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + return c.Status(resp.StatusCode).Send(body) + } + + c.Set("Content-Type", "application/x-ndjson") + c.Set("Cache-Control", "no-cache") + c.Set("Connection", "keep-alive") + + if isDiscoverSummary && summaryURL != "" && cfg.DiscoverSvcURL != "" { + collected := make([]string, 0) + scanner := bufio.NewScanner(resp.Body) + scanner.Buffer(make([]byte, 1024*1024), 10*1024*1024) + + var result bytes.Buffer + for scanner.Scan() { + line := scanner.Text() + if strings.TrimSpace(line) != "" { + collected = append(collected, line) + result.WriteString(line + "\n") + } + } + resp.Body.Close() + + if len(collected) > 0 { + go saveArticleSummary(cfg.DiscoverSvcURL, summaryURL, collected) + } + return c.SendString(result.String()) + } + + return c.SendStream(resp.Body) + }) + + port := cfg.ChatSvcPort + log.Printf("chat-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3005") + } +} + +func saveArticleSummary(discoverSvcURL, articleURL string, events []string) { + if discoverSvcURL == "" || articleURL == "" || len(events) == 0 { + return + } + + payload := map[string]interface{}{ + "url": articleURL, + "events": events, + } + + body, err := json.Marshal(payload) + if err != nil { + log.Printf("article-summary marshal error: %v", err) + return + } + + url := strings.TrimSuffix(discoverSvcURL, "/") + "/api/v1/discover/article-summary" + + maxRetries := 5 + retryDelay := 2 * time.Second + + for attempt := 1; attempt <= maxRetries; attempt++ { + req, err := http.NewRequest("POST", url, bytes.NewReader(body)) + if err != nil { + log.Printf("article-summary request error (attempt %d): %v", attempt, err) + continue + } + req.Header.Set("Content-Type", "application/json") + + client := &http.Client{Timeout: 2 * time.Minute} + resp, err := client.Do(req) + if err != nil { + log.Printf("article-summary save error (attempt %d): %v", attempt, err) + if attempt < maxRetries { + time.Sleep(retryDelay) + } + continue + } + resp.Body.Close() + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + log.Printf("article-summary saved: %s", articleURL[:min(60, len(articleURL))]) + return + } + + log.Printf("article-summary save failed (attempt %d): status %d", attempt, resp.StatusCode) + if attempt < maxRetries { + time.Sleep(retryDelay) + } + } + + log.Printf("article-summary save failed after %d retries: %s", maxRetries, articleURL[:min(60, len(articleURL))]) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/backend/cmd/collection-svc/main.go b/backend/cmd/collection-svc/main.go new file mode 100644 index 0000000..5f0bfc3 --- /dev/null +++ b/backend/cmd/collection-svc/main.go @@ -0,0 +1,345 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var database *db.PostgresDB + var collectionRepo *db.CollectionRepository + + if cfg.DatabaseURL != "" { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err != nil { + log.Printf("Database unavailable: %v (some features disabled)", err) + } else { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Migration warning: %v", err) + } + cancel() + defer database.Close() + + collectionRepo = db.NewCollectionRepository(database) + log.Println("PostgreSQL connected") + } + } + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + if cfg.JWTSecret != "" || cfg.AuthSvcURL != "" { + app.Use(middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + SkipPaths: []string{"/health", "/ready"}, + })) + } + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + if database == nil { + return c.Status(503).JSON(fiber.Map{"status": "database unavailable"}) + } + return c.JSON(fiber.Map{"status": "ready"}) + }) + + api := app.Group("/api/v1/collections") + + api.Get("/", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Unauthorized"}) + } + + limit := c.QueryInt("limit", 50) + offset := c.QueryInt("offset", 0) + + collections, err := collectionRepo.GetByUserID(c.Context(), userID, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get collections"}) + } + + return c.JSON(fiber.Map{"collections": collections}) + }) + + api.Post("/", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Unauthorized"}) + } + + var req struct { + Name string `json:"name"` + Description string `json:"description"` + IsPublic bool `json:"isPublic"` + ContextEnabled bool `json:"contextEnabled"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Name == "" { + return c.Status(400).JSON(fiber.Map{"error": "Name is required"}) + } + + collection := &db.Collection{ + UserID: userID, + Name: req.Name, + Description: req.Description, + IsPublic: req.IsPublic, + ContextEnabled: req.ContextEnabled, + } + + if err := collectionRepo.Create(c.Context(), collection); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to create collection"}) + } + + return c.Status(201).JSON(collection) + }) + + api.Get("/:id", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get collection"}) + } + if collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID && !collection.IsPublic { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + items, err := collectionRepo.GetItems(c.Context(), collectionID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get items"}) + } + collection.Items = items + + return c.JSON(collection) + }) + + api.Put("/:id", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil || collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + var req struct { + Name string `json:"name"` + Description string `json:"description"` + IsPublic bool `json:"isPublic"` + ContextEnabled bool `json:"contextEnabled"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + collection.Name = req.Name + collection.Description = req.Description + collection.IsPublic = req.IsPublic + collection.ContextEnabled = req.ContextEnabled + + if err := collectionRepo.Update(c.Context(), collection); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to update collection"}) + } + + return c.JSON(collection) + }) + + api.Delete("/:id", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil || collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if err := collectionRepo.Delete(c.Context(), collectionID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete collection"}) + } + + return c.Status(204).Send(nil) + }) + + api.Post("/:id/items", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil || collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + var req struct { + ItemType string `json:"itemType"` + Title string `json:"title"` + Content string `json:"content"` + URL string `json:"url"` + Metadata map[string]interface{} `json:"metadata"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.ItemType == "" { + return c.Status(400).JSON(fiber.Map{"error": "itemType is required"}) + } + + item := &db.CollectionItem{ + CollectionID: collectionID, + ItemType: req.ItemType, + Title: req.Title, + Content: req.Content, + URL: req.URL, + Metadata: req.Metadata, + } + + if err := collectionRepo.AddItem(c.Context(), item); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to add item"}) + } + + return c.Status(201).JSON(item) + }) + + api.Delete("/:id/items/:itemId", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + itemID := c.Params("itemId") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil || collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if err := collectionRepo.RemoveItem(c.Context(), itemID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to remove item"}) + } + + return c.Status(204).Send(nil) + }) + + api.Get("/:id/context", func(c *fiber.Ctx) error { + if collectionRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Database unavailable"}) + } + + collectionID := c.Params("id") + userID := middleware.GetUserID(c) + + collection, err := collectionRepo.GetByID(c.Context(), collectionID) + if err != nil || collection == nil { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } + + if collection.UserID != userID && !collection.IsPublic { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if !collection.ContextEnabled { + return c.JSON(fiber.Map{"context": "", "enabled": false}) + } + + context, err := collectionRepo.GetCollectionContext(c.Context(), collectionID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get context"}) + } + + return c.JSON(fiber.Map{"context": context, "enabled": true}) + }) + + port := getEnvInt("COLLECTION_SVC_PORT", 3025) + log.Printf("collection-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/cmd/computer-svc/main.go b/backend/cmd/computer-svc/main.go new file mode 100644 index 0000000..84e95e0 --- /dev/null +++ b/backend/cmd/computer-svc/main.go @@ -0,0 +1,552 @@ +package main + +import ( + "bufio" + "context" + "encoding/json" + "fmt" + "log" + "os" + "os/signal" + "syscall" + "time" + + "github.com/gooseek/backend/internal/computer" + "github.com/gooseek/backend/internal/computer/connectors" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/recover" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatalf("Failed to load config: %v", err) + } + + var database *db.PostgresDB + maxRetries := 30 + for i := 0; i < maxRetries; i++ { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err == nil { + log.Println("PostgreSQL connected successfully") + break + } + log.Printf("Waiting for database (attempt %d/%d): %v", i+1, maxRetries, err) + time.Sleep(2 * time.Second) + } + if err != nil { + log.Fatalf("Failed to connect to database after %d attempts: %v", maxRetries, err) + } + + taskRepo := db.NewComputerTaskRepo(database.DB()) + memoryRepo := db.NewComputerMemoryRepo(database.DB()) + artifactRepo := db.NewComputerArtifactRepo(database.DB()) + + if err := taskRepo.Migrate(); err != nil { + log.Printf("Task repo migration warning: %v", err) + } + if err := memoryRepo.Migrate(); err != nil { + log.Printf("Memory repo migration warning: %v", err) + } + if err := artifactRepo.Migrate(); err != nil { + log.Printf("Artifact repo migration warning: %v", err) + } + + registry := llm.NewModelRegistry() + setupModels(registry, cfg) + + connectorHub := connectors.NewConnectorHub() + setupConnectors(connectorHub, cfg) + + comp := computer.NewComputer(computer.ComputerConfig{ + MaxParallelTasks: 10, + MaxSubTasks: 20, + TaskTimeout: 30 * time.Minute, + SubTaskTimeout: 5 * time.Minute, + TotalBudget: 1.0, + EnableSandbox: true, + EnableScheduling: true, + SandboxImage: getEnv("SANDBOX_IMAGE", "gooseek/sandbox:latest"), + }, computer.Dependencies{ + Registry: registry, + TaskRepo: taskRepo, + MemoryRepo: memoryRepo, + }) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + comp.StartScheduler(ctx) + + app := fiber.New(fiber.Config{ + ErrorHandler: func(c *fiber.Ctx, err error) error { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ + "error": err.Error(), + }) + }, + }) + + app.Use(recover.New()) + app.Use(cors.New(cors.Config{ + AllowOrigins: "*", + AllowHeaders: "Origin, Content-Type, Accept, Authorization", + AllowMethods: "GET, POST, PUT, DELETE, OPTIONS", + })) + app.Use(middleware.Logging(middleware.LoggingConfig{})) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{ + "status": "ok", + "service": "computer-svc", + "models": registry.Count(), + }) + }) + + api := app.Group("/api/v1/computer") + + api.Post("/execute", func(c *fiber.Ctx) error { + var req struct { + Query string `json:"query"` + UserID string `json:"userId"` + Options computer.ExecuteOptions `json:"options"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) + } + + if req.Query == "" { + return c.Status(400).JSON(fiber.Map{"error": "query is required"}) + } + + if req.UserID == "" || req.UserID == "anonymous" { + req.UserID = "00000000-0000-0000-0000-000000000000" + } + + task, err := comp.Execute(c.Context(), req.UserID, req.Query, req.Options) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(task) + }) + + api.Get("/tasks", func(c *fiber.Ctx) error { + userID := c.Query("userId", "") + limit := c.QueryInt("limit", 20) + offset := c.QueryInt("offset", 0) + + if userID == "" || userID == "anonymous" { + userID = "00000000-0000-0000-0000-000000000000" + } + + tasks, err := comp.GetUserTasks(c.Context(), userID, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{ + "tasks": tasks, + "count": len(tasks), + }) + }) + + api.Get("/tasks/:id", func(c *fiber.Ctx) error { + taskID := c.Params("id") + + task, err := comp.GetStatus(c.Context(), taskID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "task not found"}) + } + + return c.JSON(task) + }) + + api.Get("/tasks/:id/stream", func(c *fiber.Ctx) error { + taskID := c.Params("id") + + c.Set("Content-Type", "text/event-stream") + c.Set("Cache-Control", "no-cache") + c.Set("Connection", "keep-alive") + c.Set("Transfer-Encoding", "chunked") + + eventCh, err := comp.Stream(c.Context(), taskID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + for event := range eventCh { + data, _ := json.Marshal(event) + fmt.Fprintf(w, "data: %s\n\n", data) + w.Flush() + } + }) + + return nil + }) + + api.Post("/tasks/:id/resume", func(c *fiber.Ctx) error { + taskID := c.Params("id") + + var req struct { + UserInput string `json:"userInput"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) + } + + if err := comp.Resume(c.Context(), taskID, req.UserInput); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{"status": "resumed"}) + }) + + api.Delete("/tasks/:id", func(c *fiber.Ctx) error { + taskID := c.Params("id") + + if err := comp.Cancel(c.Context(), taskID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{"status": "cancelled"}) + }) + + api.Get("/tasks/:id/artifacts", func(c *fiber.Ctx) error { + taskID := c.Params("id") + + artifacts, err := artifactRepo.GetByTaskID(c.Context(), taskID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{ + "artifacts": artifacts, + "count": len(artifacts), + }) + }) + + api.Get("/artifacts/:id", func(c *fiber.Ctx) error { + artifactID := c.Params("id") + + artifact, err := artifactRepo.GetByID(c.Context(), artifactID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "artifact not found"}) + } + + return c.JSON(artifact) + }) + + api.Get("/artifacts/:id/download", func(c *fiber.Ctx) error { + artifactID := c.Params("id") + + artifact, err := artifactRepo.GetByID(c.Context(), artifactID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "artifact not found"}) + } + + if artifact.MimeType != "" { + c.Set("Content-Type", artifact.MimeType) + } else { + c.Set("Content-Type", "application/octet-stream") + } + c.Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", artifact.Name)) + + return c.Send(artifact.Content) + }) + + api.Get("/models", func(c *fiber.Ctx) error { + models := registry.GetAll() + return c.JSON(fiber.Map{ + "models": models, + "count": len(models), + }) + }) + + api.Get("/connectors", func(c *fiber.Ctx) error { + info := connectorHub.GetInfo() + return c.JSON(fiber.Map{ + "connectors": info, + "count": len(info), + }) + }) + + api.Post("/connectors/:id/execute", func(c *fiber.Ctx) error { + connectorID := c.Params("id") + + var req struct { + Action string `json:"action"` + Params map[string]interface{} `json:"params"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) + } + + result, err := connectorHub.Execute(c.Context(), connectorID, req.Action, req.Params) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + }) + + port := getEnv("COMPUTER_SVC_PORT", "3030") + addr := ":" + port + + go func() { + log.Printf("Computer service starting on %s", addr) + if err := app.Listen(addr); err != nil { + log.Fatalf("Failed to start server: %v", err) + } + }() + + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + + log.Println("Shutting down...") + comp.StopScheduler() + app.Shutdown() +} + +func setupModels(registry *llm.ModelRegistry, cfg *config.Config) { + // Timeweb Cloud AI (приоритетный провайдер для России) + if cfg.TimewebAgentAccessID != "" && cfg.TimewebAPIKey != "" { + timewebClient, err := llm.NewTimewebClient(llm.TimewebConfig{ + ProviderID: "timeweb", + ModelKey: "gpt-4o", + BaseURL: cfg.TimewebAPIBaseURL, + AgentAccessID: cfg.TimewebAgentAccessID, + APIKey: cfg.TimewebAPIKey, + ProxySource: cfg.TimewebProxySource, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "timeweb-gpt-4o", + Provider: "timeweb", + Model: "gpt-4o", + Capabilities: []llm.ModelCapability{llm.CapSearch, llm.CapFast, llm.CapVision, llm.CapCoding, llm.CapCreative, llm.CapReasoning}, + CostPer1K: 0.005, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 0, + Description: "GPT-4o via Timeweb Cloud AI", + }, timewebClient) + log.Println("Timeweb GPT-4o registered") + } else { + log.Printf("Failed to create Timeweb client: %v", err) + } + + timewebMiniClient, err := llm.NewTimewebClient(llm.TimewebConfig{ + ProviderID: "timeweb", + ModelKey: "gpt-4o-mini", + BaseURL: cfg.TimewebAPIBaseURL, + AgentAccessID: cfg.TimewebAgentAccessID, + APIKey: cfg.TimewebAPIKey, + ProxySource: cfg.TimewebProxySource, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "timeweb-gpt-4o-mini", + Provider: "timeweb", + Model: "gpt-4o-mini", + Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, + CostPer1K: 0.00015, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 0, + Description: "GPT-4o-mini via Timeweb Cloud AI", + }, timewebMiniClient) + log.Println("Timeweb GPT-4o-mini registered") + } + } + + // OpenAI прямой (fallback если Timeweb недоступен) + if cfg.OpenAIAPIKey != "" { + openaiClient, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + ModelKey: "gpt-4o", + APIKey: cfg.OpenAIAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "gpt-4o", + Provider: "openai", + Model: "gpt-4o", + Capabilities: []llm.ModelCapability{llm.CapSearch, llm.CapFast, llm.CapVision, llm.CapCoding, llm.CapCreative}, + CostPer1K: 0.005, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 10, + }, openaiClient) + } + + miniClient, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + ModelKey: "gpt-4o-mini", + APIKey: cfg.OpenAIAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "gpt-4o-mini", + Provider: "openai", + Model: "gpt-4o-mini", + Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, + CostPer1K: 0.00015, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 10, + }, miniClient) + } + } + + if cfg.AnthropicAPIKey != "" { + opusClient, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + ModelKey: "claude-3-opus-20240229", + APIKey: cfg.AnthropicAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "claude-3-opus", + Provider: "anthropic", + Model: "claude-3-opus-20240229", + Capabilities: []llm.ModelCapability{llm.CapReasoning, llm.CapCoding, llm.CapCreative, llm.CapLongContext}, + CostPer1K: 0.015, + MaxContext: 200000, + MaxTokens: 4096, + Priority: 1, + }, opusClient) + } + + sonnetClient, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + ModelKey: "claude-3-5-sonnet-20241022", + APIKey: cfg.AnthropicAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "claude-3-sonnet", + Provider: "anthropic", + Model: "claude-3-5-sonnet-20241022", + Capabilities: []llm.ModelCapability{llm.CapCoding, llm.CapCreative, llm.CapFast}, + CostPer1K: 0.003, + MaxContext: 200000, + MaxTokens: 8192, + Priority: 1, + }, sonnetClient) + } + } + + if cfg.GeminiAPIKey != "" { + geminiClient, err := llm.NewGeminiClient(llm.ProviderConfig{ + ProviderID: "gemini", + ModelKey: "gemini-1.5-pro", + APIKey: cfg.GeminiAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "gemini-1.5-pro", + Provider: "gemini", + Model: "gemini-1.5-pro", + Capabilities: []llm.ModelCapability{llm.CapLongContext, llm.CapSearch, llm.CapVision, llm.CapMath}, + CostPer1K: 0.00125, + MaxContext: 2000000, + MaxTokens: 8192, + Priority: 1, + }, geminiClient) + } + + flashClient, err := llm.NewGeminiClient(llm.ProviderConfig{ + ProviderID: "gemini", + ModelKey: "gemini-1.5-flash", + APIKey: cfg.GeminiAPIKey, + }) + if err == nil { + registry.Register(llm.ModelSpec{ + ID: "gemini-1.5-flash", + Provider: "gemini", + Model: "gemini-1.5-flash", + Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapVision}, + CostPer1K: 0.000075, + MaxContext: 1000000, + MaxTokens: 8192, + Priority: 2, + }, flashClient) + } + } + + log.Printf("Registered %d models", registry.Count()) +} + +func setupConnectors(hub *connectors.ConnectorHub, cfg *config.Config) { + if smtpHost := getEnv("SMTP_HOST", ""); smtpHost != "" { + emailConn := connectors.NewEmailConnector(connectors.EmailConfig{ + SMTPHost: smtpHost, + SMTPPort: getEnvInt("SMTP_PORT", 587), + Username: getEnv("SMTP_USERNAME", ""), + Password: getEnv("SMTP_PASSWORD", ""), + FromAddress: getEnv("SMTP_FROM", ""), + FromName: getEnv("SMTP_FROM_NAME", "GooSeek Computer"), + UseTLS: true, + AllowHTML: true, + }) + hub.Register(emailConn) + log.Println("Email connector registered") + } + + if botToken := getEnv("TELEGRAM_BOT_TOKEN", ""); botToken != "" { + tgConn := connectors.NewTelegramConnector(connectors.TelegramConfig{ + BotToken: botToken, + }) + hub.Register(tgConn) + log.Println("Telegram connector registered") + } + + webhookConn := connectors.NewWebhookConnector(connectors.WebhookConfig{ + Timeout: 30 * time.Second, + MaxRetries: 3, + }) + hub.Register(webhookConn) + log.Println("Webhook connector registered") + + if s3Endpoint := getEnv("S3_ENDPOINT", ""); s3Endpoint != "" { + storageConn, err := connectors.NewStorageConnector(connectors.StorageConfig{ + Endpoint: s3Endpoint, + AccessKeyID: getEnv("S3_ACCESS_KEY", ""), + SecretAccessKey: getEnv("S3_SECRET_KEY", ""), + BucketName: getEnv("S3_BUCKET", "gooseek-artifacts"), + UseSSL: getEnv("S3_USE_SSL", "true") == "true", + Region: getEnv("S3_REGION", "us-east-1"), + PublicURL: getEnv("S3_PUBLIC_URL", ""), + }) + if err == nil { + hub.Register(storageConn) + log.Println("Storage connector registered") + } + } +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +func getEnvInt(key string, defaultValue int) int { + if value := os.Getenv(key); value != "" { + var i int + fmt.Sscanf(value, "%d", &i) + return i + } + return defaultValue +} diff --git a/backend/cmd/discover-svc/main.go b/backend/cmd/discover-svc/main.go new file mode 100644 index 0000000..f10ab2d --- /dev/null +++ b/backend/cmd/discover-svc/main.go @@ -0,0 +1,511 @@ +package main + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "log" + "os" + "strings" + "sync" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/pkg/cache" + "github.com/gooseek/backend/pkg/config" +) + +type DigestCitation struct { + Index int `json:"index"` + URL string `json:"url"` + Title string `json:"title"` + Domain string `json:"domain"` +} + +type Digest struct { + Topic string `json:"topic"` + Region string `json:"region"` + ClusterTitle string `json:"clusterTitle"` + SummaryRu string `json:"summaryRu"` + Citations []DigestCitation `json:"citations"` + SourcesCount int `json:"sourcesCount"` + FollowUp []string `json:"followUp"` + Thumbnail string `json:"thumbnail"` + ShortDescription string `json:"shortDescription"` + MainURL string `json:"mainUrl"` + CreatedAt time.Time `json:"createdAt"` +} + +type ArticleSummary struct { + URL string `json:"url"` + Events []string `json:"events"` + CreatedAt time.Time `json:"createdAt"` +} + +type DiscoverStore struct { + digests map[string]*Digest + articleSummaries map[string]*ArticleSummary + mu sync.RWMutex +} + +func NewDiscoverStore() *DiscoverStore { + return &DiscoverStore{ + digests: make(map[string]*Digest), + articleSummaries: make(map[string]*ArticleSummary), + } +} + +func (s *DiscoverStore) GetDigest(topic, region, title string) *Digest { + s.mu.RLock() + defer s.mu.RUnlock() + key := fmt.Sprintf("%s:%s:%s", topic, region, title) + return s.digests[key] +} + +func (s *DiscoverStore) GetDigestByURL(url string) *Digest { + s.mu.RLock() + defer s.mu.RUnlock() + normalizedURL := normalizeURL(url) + for _, d := range s.digests { + if normalizeURL(d.MainURL) == normalizedURL { + return d + } + } + return nil +} + +func (s *DiscoverStore) GetDigests(topic, region string) []*Digest { + s.mu.RLock() + defer s.mu.RUnlock() + result := make([]*Digest, 0) + prefix := fmt.Sprintf("%s:%s:", topic, region) + for k, d := range s.digests { + if strings.HasPrefix(k, prefix) { + result = append(result, d) + } + } + return result +} + +func (s *DiscoverStore) UpsertDigest(d *Digest) { + s.mu.Lock() + defer s.mu.Unlock() + key := fmt.Sprintf("%s:%s:%s", d.Topic, d.Region, d.ClusterTitle) + d.CreatedAt = time.Now() + s.digests[key] = d +} + +func (s *DiscoverStore) DeleteDigests(topic, region string) int { + s.mu.Lock() + defer s.mu.Unlock() + count := 0 + prefix := fmt.Sprintf("%s:%s:", topic, region) + for k := range s.digests { + if strings.HasPrefix(k, prefix) { + delete(s.digests, k) + count++ + } + } + return count +} + +func (s *DiscoverStore) GetArticleSummary(url string) *ArticleSummary { + s.mu.RLock() + defer s.mu.RUnlock() + key := articleSummaryKey(url) + return s.articleSummaries[key] +} + +func (s *DiscoverStore) SaveArticleSummary(url string, events []string) { + s.mu.Lock() + defer s.mu.Unlock() + key := articleSummaryKey(url) + s.articleSummaries[key] = &ArticleSummary{ + URL: url, + Events: events, + CreatedAt: time.Now(), + } +} + +func (s *DiscoverStore) DeleteArticleSummary(url string) bool { + s.mu.Lock() + defer s.mu.Unlock() + key := articleSummaryKey(url) + if _, ok := s.articleSummaries[key]; ok { + delete(s.articleSummaries, key) + return true + } + return false +} + +func articleSummaryKey(url string) string { + hash := sha256.Sum256([]byte(normalizeURL(url))) + return hex.EncodeToString(hash[:16]) +} + +func normalizeURL(url string) string { + url = strings.TrimSpace(url) + url = strings.TrimSuffix(url, "/") + url = strings.TrimPrefix(url, "https://") + url = strings.TrimPrefix(url, "http://") + url = strings.TrimPrefix(url, "www.") + return url +} + +func extractDomain(url string) string { + normalized := normalizeURL(url) + if idx := strings.Index(normalized, "/"); idx > 0 { + return normalized[:idx] + } + return normalized +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + store := NewDiscoverStore() + searchClient := search.NewSearXNGClient(cfg) + + var database *db.PostgresDB + var digestRepo *db.DigestRepository + var summaryRepo *db.ArticleSummaryRepository + + if cfg.DatabaseURL != "" { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err != nil { + log.Printf("PostgreSQL unavailable: %v (falling back to in-memory)", err) + } else { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Migration warning: %v", err) + } + cancel() + defer database.Close() + + digestRepo = db.NewDigestRepository(database) + summaryRepo = db.NewArticleSummaryRepository(database) + log.Println("PostgreSQL connected") + } + } + + var redisCache *cache.RedisCache + if cfg.RedisURL != "" { + redisCache, err = cache.NewRedisCache(cfg.RedisURL, "gooseek:discover") + if err != nil { + log.Printf("Redis cache unavailable: %v (falling back to in-memory)", err) + } else { + log.Printf("Redis cache connected") + defer redisCache.Close() + } + } + + _ = digestRepo + _ = summaryRepo + + app := fiber.New(fiber.Config{ + BodyLimit: 100 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + IdleTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + app.Get("/metrics", func(c *fiber.Ctx) error { + c.Set("Content-Type", "text/plain; charset=utf-8") + return c.SendString( + "# HELP gooseek_up Service is up (1) or down (0)\n" + + "# TYPE gooseek_up gauge\n" + + "gooseek_up 1\n", + ) + }) + + app.Get("/api/v1/discover/digest", func(c *fiber.Ctx) error { + url := c.Query("url") + if url != "" { + digest := store.GetDigestByURL(url) + if digest == nil { + return c.Status(404).JSON(fiber.Map{"message": "digest not found"}) + } + return c.JSON(digest) + } + + topic := c.Query("topic") + region := c.Query("region") + title := c.Query("title") + + if topic == "" || region == "" || title == "" { + return c.Status(400).JSON(fiber.Map{"message": "topic, region, title (or url) required"}) + } + + digest := store.GetDigest(topic, region, title) + if digest == nil { + return c.Status(404).JSON(fiber.Map{"message": "digest not found"}) + } + + return c.JSON(digest) + }) + + app.Post("/api/v1/discover/digest", func(c *fiber.Ctx) error { + var d Digest + if err := c.BodyParser(&d); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if d.Topic == "" || d.Region == "" || d.ClusterTitle == "" || d.SummaryRu == "" { + return c.Status(400).JSON(fiber.Map{"message": "topic, region, clusterTitle, summaryRu required"}) + } + + store.UpsertDigest(&d) + return c.Status(204).Send(nil) + }) + + app.Delete("/api/v1/discover/digest", func(c *fiber.Ctx) error { + topic := c.Query("topic") + region := c.Query("region") + + if topic == "" || region == "" { + return c.Status(400).JSON(fiber.Map{"message": "topic, region required"}) + } + + deleted := store.DeleteDigests(topic, region) + return c.JSON(fiber.Map{"deleted": deleted}) + }) + + app.Get("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + url := c.Query("url") + if url == "" { + return c.Status(400).JSON(fiber.Map{"message": "url required"}) + } + + if redisCache != nil { + events, err := redisCache.GetCachedArticleSummary(c.Context(), url) + if err == nil && len(events) > 0 { + return c.JSON(fiber.Map{"events": events}) + } + } + + if summaryRepo != nil { + summary, err := summaryRepo.GetByURL(c.Context(), url) + if err == nil && summary != nil { + if redisCache != nil { + redisCache.CacheArticleSummary(c.Context(), url, summary.Events, 24*time.Hour) + } + return c.JSON(fiber.Map{"events": summary.Events}) + } + } + + summary := store.GetArticleSummary(url) + if summary == nil { + return c.Status(404).JSON(fiber.Map{"message": "not found"}) + } + + return c.JSON(fiber.Map{"events": summary.Events}) + }) + + app.Post("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + var body struct { + URL string `json:"url"` + Events []string `json:"events"` + } + + if err := c.BodyParser(&body); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if body.URL == "" || len(body.Events) == 0 { + return c.Status(400).JSON(fiber.Map{"message": "url and events[] required"}) + } + + store.SaveArticleSummary(body.URL, body.Events) + + if summaryRepo != nil { + ttl := 7 * 24 * time.Hour + if err := summaryRepo.Save(c.Context(), body.URL, body.Events, ttl); err != nil { + log.Printf("postgres save article-summary error: %v", err) + } + } + + if redisCache != nil { + ttl := 24 * time.Hour + if err := redisCache.CacheArticleSummary(c.Context(), body.URL, body.Events, ttl); err != nil { + log.Printf("redis cache article-summary error: %v", err) + } + } + + log.Printf("article-summary saved: %s (%d events)", body.URL[:min(60, len(body.URL))], len(body.Events)) + return c.Status(204).Send(nil) + }) + + app.Delete("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + url := c.Query("url") + if url == "" { + return c.Status(400).JSON(fiber.Map{"message": "url required"}) + } + + deleted := store.DeleteArticleSummary(url) + log.Printf("article-summary deleted: %s (deleted=%v)", url[:min(60, len(url))], deleted) + return c.Status(204).Send(nil) + }) + + app.Get("/api/v1/discover/search", func(c *fiber.Ctx) error { + q := c.Query("q") + if q == "" { + return c.Status(400).JSON(fiber.Map{"message": "Query q is required"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout) + defer cancel() + + result, err := searchClient.Search(ctx, q, &search.SearchOptions{PageNo: 1}) + if err != nil { + return c.Status(503).JSON(fiber.Map{"message": "Search failed"}) + } + + if len(result.Results) > 10 { + result.Results = result.Results[:10] + } + + return c.JSON(fiber.Map{"results": result.Results}) + }) + + app.Get("/api/v1/discover", func(c *fiber.Ctx) error { + topic := c.Query("topic", "tech") + region := c.Query("region", "world") + + digests := store.GetDigests(topic, region) + if len(digests) > 0 { + blogs := make([]fiber.Map, len(digests)) + for i, d := range digests { + content := d.ShortDescription + if content == "" && len(d.SummaryRu) > 200 { + content = d.SummaryRu[:200] + "…" + } else if content == "" { + content = d.SummaryRu + } + + blogs[i] = fiber.Map{ + "title": d.ClusterTitle, + "content": content, + "url": d.MainURL, + "thumbnail": d.Thumbnail, + "sourcesCount": d.SourcesCount, + "digestId": fmt.Sprintf("%s:%s:%s", d.Topic, d.Region, d.ClusterTitle), + } + } + return c.JSON(fiber.Map{"blogs": blogs}) + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout*2) + defer cancel() + + queries := getQueriesForTopic(topic, region) + results, err := searchClient.Search(ctx, queries[0], &search.SearchOptions{ + Categories: []string{"news"}, + PageNo: 1, + }) + if err != nil { + return c.Status(503).JSON(fiber.Map{"message": "Search failed"}) + } + + blogs := make([]fiber.Map, 0, 7) + for i, r := range results.Results { + if i >= 7 { + break + } + thumbnail := r.Thumbnail + if thumbnail == "" { + thumbnail = r.ThumbnailSrc + } + if thumbnail == "" { + thumbnail = r.ImgSrc + } + + content := r.Content + if content == "" { + content = r.Title + } + if len(content) > 300 { + content = content[:300] + "…" + } + + blogs = append(blogs, fiber.Map{ + "title": r.Title, + "content": content, + "url": r.URL, + "thumbnail": thumbnail, + }) + } + + return c.JSON(fiber.Map{"blogs": blogs}) + }) + + port := getEnvInt("DISCOVER_SVC_PORT", 3002) + log.Printf("discover-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getQueriesForTopic(topic, region string) []string { + queries := map[string]map[string][]string{ + "tech": { + "world": {"technology news AI innovation"}, + "russia": {"технологии новости IT инновации"}, + "eu": {"technology news Europe AI"}, + }, + "finance": { + "world": {"finance news economy markets"}, + "russia": {"финансы новости экономика рынки"}, + "eu": {"finance news Europe economy"}, + }, + "sports": { + "world": {"sports news football Olympics"}, + "russia": {"спорт новости футбол хоккей"}, + "eu": {"sports news football Champions League"}, + }, + } + + if topicQueries, ok := queries[topic]; ok { + if regionQueries, ok := topicQueries[region]; ok { + return regionQueries + } + if defaultQueries, ok := topicQueries["world"]; ok { + return defaultQueries + } + } + + return []string{"news today"} +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/backend/cmd/file-svc/main.go b/backend/cmd/file-svc/main.go new file mode 100644 index 0000000..1b31a8c --- /dev/null +++ b/backend/cmd/file-svc/main.go @@ -0,0 +1,307 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/files" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var database *db.PostgresDB + var fileRepo *db.FileRepository + + if cfg.DatabaseURL != "" { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err != nil { + log.Printf("Database unavailable: %v", err) + } else { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Migration warning: %v", err) + } + cancel() + defer database.Close() + + fileRepo = db.NewFileRepository(database) + log.Println("PostgreSQL connected") + } + } + + var llmClient llm.Client + if cfg.OpenAIAPIKey != "" { + llmClient, err = llm.NewClient(llm.ProviderConfig{ + ProviderID: "openai", + ModelKey: "gpt-4o", + APIKey: cfg.OpenAIAPIKey, + }) + if err != nil { + log.Printf("Failed to create OpenAI client: %v", err) + } + } else if cfg.AnthropicAPIKey != "" { + llmClient, err = llm.NewClient(llm.ProviderConfig{ + ProviderID: "anthropic", + ModelKey: "claude-3-5-sonnet-20241022", + APIKey: cfg.AnthropicAPIKey, + }) + if err != nil { + log.Printf("Failed to create Anthropic client: %v", err) + } + } + + storagePath := os.Getenv("FILE_STORAGE_PATH") + if storagePath == "" { + storagePath = "/tmp/gooseek-files" + } + + var fileAnalyzer *files.FileAnalyzer + if llmClient != nil { + fileAnalyzer = files.NewFileAnalyzer(llmClient, storagePath) + } + + app := fiber.New(fiber.Config{ + BodyLimit: 100 * 1024 * 1024, + ReadTimeout: 60 * time.Second, + WriteTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + if cfg.JWTSecret != "" || cfg.AuthSvcURL != "" { + app.Use(middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + SkipPaths: []string{"/health", "/ready"}, + })) + } + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + api := app.Group("/api/v1/files") + + api.Post("/upload", func(c *fiber.Ctx) error { + if fileRepo == nil || fileAnalyzer == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Unauthorized"}) + } + + file, err := c.FormFile("file") + if err != nil { + return c.Status(400).JSON(fiber.Map{"error": "No file uploaded"}) + } + + if file.Size > 50*1024*1024 { + return c.Status(400).JSON(fiber.Map{"error": "File too large (max 50MB)"}) + } + + f, err := file.Open() + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to read file"}) + } + defer f.Close() + + storagePath, fileSize, err := fileAnalyzer.SaveFile(file.Filename, f) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save file"}) + } + + buf := make([]byte, 512) + f.Seek(0, 0) + f.Read(buf) + mimeType := files.DetectMimeType(file.Filename, buf) + + uploadedFile := &db.UploadedFile{ + UserID: userID, + Filename: file.Filename, + FileType: mimeType, + FileSize: fileSize, + StoragePath: storagePath, + Metadata: map[string]interface{}{}, + } + + if err := fileRepo.Create(c.Context(), uploadedFile); err != nil { + fileAnalyzer.DeleteFile(storagePath) + return c.Status(500).JSON(fiber.Map{"error": "Failed to save file record"}) + } + + go func() { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + + result, err := fileAnalyzer.AnalyzeFile(ctx, storagePath, mimeType) + if err != nil { + log.Printf("File analysis failed for %s: %v", uploadedFile.ID, err) + return + } + + fileRepo.UpdateExtractedText(ctx, uploadedFile.ID, result.ExtractedText) + }() + + return c.Status(201).JSON(fiber.Map{ + "id": uploadedFile.ID, + "filename": uploadedFile.Filename, + "fileType": uploadedFile.FileType, + "fileSize": uploadedFile.FileSize, + "status": "processing", + }) + }) + + api.Get("/", func(c *fiber.Ctx) error { + if fileRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Unauthorized"}) + } + + limit := c.QueryInt("limit", 50) + offset := c.QueryInt("offset", 0) + + files, err := fileRepo.GetByUserID(c.Context(), userID, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get files"}) + } + + return c.JSON(fiber.Map{"files": files}) + }) + + api.Get("/:id", func(c *fiber.Ctx) error { + if fileRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + fileID := c.Params("id") + userID := middleware.GetUserID(c) + + file, err := fileRepo.GetByID(c.Context(), fileID) + if err != nil || file == nil { + return c.Status(404).JSON(fiber.Map{"error": "File not found"}) + } + + if file.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + return c.JSON(file) + }) + + api.Get("/:id/content", func(c *fiber.Ctx) error { + if fileRepo == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + fileID := c.Params("id") + userID := middleware.GetUserID(c) + + file, err := fileRepo.GetByID(c.Context(), fileID) + if err != nil || file == nil { + return c.Status(404).JSON(fiber.Map{"error": "File not found"}) + } + + if file.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + return c.JSON(fiber.Map{ + "id": file.ID, + "filename": file.Filename, + "extractedText": file.ExtractedText, + }) + }) + + api.Post("/:id/analyze", func(c *fiber.Ctx) error { + if fileRepo == nil || fileAnalyzer == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + fileID := c.Params("id") + userID := middleware.GetUserID(c) + + file, err := fileRepo.GetByID(c.Context(), fileID) + if err != nil || file == nil { + return c.Status(404).JSON(fiber.Map{"error": "File not found"}) + } + + if file.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + result, err := fileAnalyzer.AnalyzeFile(c.Context(), file.StoragePath, file.FileType) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Analysis failed: " + err.Error()}) + } + + fileRepo.UpdateExtractedText(c.Context(), fileID, result.ExtractedText) + + return c.JSON(result) + }) + + api.Delete("/:id", func(c *fiber.Ctx) error { + if fileRepo == nil || fileAnalyzer == nil { + return c.Status(503).JSON(fiber.Map{"error": "Service unavailable"}) + } + + fileID := c.Params("id") + userID := middleware.GetUserID(c) + + file, err := fileRepo.GetByID(c.Context(), fileID) + if err != nil || file == nil { + return c.Status(404).JSON(fiber.Map{"error": "File not found"}) + } + + if file.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + fileAnalyzer.DeleteFile(file.StoragePath) + + if err := fileRepo.Delete(c.Context(), fileID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete file"}) + } + + return c.Status(204).Send(nil) + }) + + port := getEnvInt("FILE_SVC_PORT", 3026) + log.Printf("file-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/cmd/finance-heatmap-svc/main.go b/backend/cmd/finance-heatmap-svc/main.go new file mode 100644 index 0000000..5432bd3 --- /dev/null +++ b/backend/cmd/finance-heatmap-svc/main.go @@ -0,0 +1,139 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/finance" +) + +func main() { + heatmapSvc := finance.NewHeatmapService(finance.HeatmapConfig{ + CacheTTL: 5 * time.Minute, + RefreshInterval: time.Minute, + }) + + app := fiber.New(fiber.Config{ + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/api/v1/heatmap/:market", func(c *fiber.Ctx) error { + market := c.Params("market") + timeRange := c.Query("range", "1d") + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + heatmap, err := heatmapSvc.GetMarketHeatmap(ctx, market, timeRange) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(heatmap) + }) + + app.Get("/api/v1/heatmap/:market/treemap", func(c *fiber.Ctx) error { + market := c.Params("market") + timeRange := c.Query("range", "1d") + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + heatmap, err := heatmapSvc.GetMarketHeatmap(ctx, market, timeRange) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + treemapData := heatmapSvc.GenerateTreemapData(heatmap) + return c.JSON(treemapData) + }) + + app.Get("/api/v1/heatmap/:market/grid", func(c *fiber.Ctx) error { + market := c.Params("market") + timeRange := c.Query("range", "1d") + rows := c.QueryInt("rows", 5) + cols := c.QueryInt("cols", 10) + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + heatmap, err := heatmapSvc.GetMarketHeatmap(ctx, market, timeRange) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + gridData := heatmapSvc.GenerateGridData(heatmap, rows, cols) + return c.JSON(fiber.Map{"grid": gridData, "rows": rows, "cols": cols}) + }) + + app.Get("/api/v1/heatmap/:market/sector/:sector", func(c *fiber.Ctx) error { + market := c.Params("market") + sector := c.Params("sector") + timeRange := c.Query("range", "1d") + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + heatmap, err := heatmapSvc.GetSectorHeatmap(ctx, market, sector, timeRange) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(heatmap) + }) + + app.Get("/api/v1/movers/:market", func(c *fiber.Ctx) error { + market := c.Params("market") + count := c.QueryInt("count", 10) + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + movers, err := heatmapSvc.GetTopMovers(ctx, market, count) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(movers) + }) + + app.Get("/api/v1/markets", func(c *fiber.Ctx) error { + markets := []map[string]interface{}{ + {"id": "sp500", "name": "S&P 500", "region": "us"}, + {"id": "nasdaq", "name": "NASDAQ", "region": "us"}, + {"id": "dow", "name": "Dow Jones", "region": "us"}, + {"id": "moex", "name": "MOEX", "region": "ru"}, + {"id": "crypto", "name": "Cryptocurrency", "region": "global"}, + {"id": "forex", "name": "Forex", "region": "global"}, + } + return c.JSON(fiber.Map{"markets": markets}) + }) + + port := getEnvInt("PORT", 3033) + log.Printf("finance-heatmap-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/cmd/labs-svc/main.go b/backend/cmd/labs-svc/main.go new file mode 100644 index 0000000..773e4bb --- /dev/null +++ b/backend/cmd/labs-svc/main.go @@ -0,0 +1,553 @@ +package main + +import ( + "context" + "encoding/json" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/labs" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/pkg/config" + "github.com/google/uuid" +) + +type ReportStore struct { + reports map[string]*labs.Report +} + +func NewReportStore() *ReportStore { + return &ReportStore{ + reports: make(map[string]*labs.Report), + } +} + +func (s *ReportStore) Save(report *labs.Report) { + s.reports[report.ID] = report +} + +func (s *ReportStore) Get(id string) *labs.Report { + return s.reports[id] +} + +func (s *ReportStore) List(limit, offset int) []*labs.Report { + result := make([]*labs.Report, 0) + i := 0 + for _, r := range s.reports { + if i >= offset && len(result) < limit { + result = append(result, r) + } + i++ + } + return result +} + +func (s *ReportStore) Delete(id string) bool { + if _, ok := s.reports[id]; ok { + delete(s.reports, id) + return true + } + return false +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var llmClient llm.Client + if cfg.OpenAIAPIKey != "" { + client, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + APIKey: cfg.OpenAIAPIKey, + ModelKey: "gpt-4o-mini", + }) + if err != nil { + log.Fatal("Failed to create OpenAI client:", err) + } + llmClient = client + } else if cfg.AnthropicAPIKey != "" { + client, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + APIKey: cfg.AnthropicAPIKey, + ModelKey: "claude-3-5-sonnet-20241022", + }) + if err != nil { + log.Fatal("Failed to create Anthropic client:", err) + } + llmClient = client + } + + generator := labs.NewGenerator(llmClient) + store := NewReportStore() + + app := fiber.New(fiber.Config{ + BodyLimit: 100 * 1024 * 1024, + ReadTimeout: 60 * time.Second, + WriteTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Post("/api/v1/labs/generate", func(c *fiber.Ctx) error { + var req struct { + Query string `json:"query"` + Data interface{} `json:"data"` + Theme string `json:"theme,omitempty"` + Locale string `json:"locale,omitempty"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + report, err := generator.GenerateReport(ctx, labs.GenerateOptions{ + Query: req.Query, + Data: req.Data, + Theme: req.Theme, + Locale: req.Locale, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(report) + + return c.JSON(report) + }) + + app.Post("/api/v1/labs/visualize", func(c *fiber.Ctx) error { + var req struct { + Type string `json:"type"` + Title string `json:"title"` + Data interface{} `json:"data"` + Config interface{} `json:"config,omitempty"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := createVisualizationFromRequest(generator, req.Type, req.Title, req.Data, req.Config) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/chart", func(c *fiber.Ctx) error { + var req struct { + Type string `json:"type"` + Title string `json:"title"` + Labels []string `json:"labels"` + Datasets []labs.ChartDataset `json:"datasets"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + var viz labs.Visualization + switch req.Type { + case "bar", "bar_chart": + if len(req.Datasets) > 0 { + viz = generator.CreateBarChart(req.Title, req.Labels, req.Datasets[0].Data) + } + case "line", "line_chart": + viz = generator.CreateLineChart(req.Title, req.Labels, req.Datasets) + case "pie", "pie_chart": + if len(req.Datasets) > 0 { + viz = generator.CreatePieChart(req.Title, req.Labels, req.Datasets[0].Data) + } + default: + viz = generator.CreateBarChart(req.Title, req.Labels, req.Datasets[0].Data) + } + + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/table", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Columns []labs.TableColumn `json:"columns"` + Rows []labs.TableRow `json:"rows"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateTable(req.Title, req.Columns, req.Rows) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/stat-cards", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Cards []labs.StatCard `json:"cards"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateStatCards(req.Title, req.Cards) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/kpi", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Value interface{} `json:"value"` + Change float64 `json:"change"` + Unit string `json:"unit"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateKPI(req.Title, req.Value, req.Change, req.Unit) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/heatmap", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + XLabels []string `json:"xLabels"` + YLabels []string `json:"yLabels"` + Values [][]float64 `json:"values"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateHeatmap(req.Title, req.XLabels, req.YLabels, req.Values) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/code", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Code string `json:"code"` + Language string `json:"language"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateCodeBlock(req.Title, req.Code, req.Language) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/markdown", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Content string `json:"content"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateMarkdown(req.Title, req.Content) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/tabs", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Tabs []labs.TabItem `json:"tabs"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateTabs(req.Title, req.Tabs) + return c.JSON(viz) + }) + + app.Post("/api/v1/labs/accordion", func(c *fiber.Ctx) error { + var req struct { + Title string `json:"title"` + Items []labs.AccordionItem `json:"items"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + viz := generator.CreateAccordion(req.Title, req.Items) + return c.JSON(viz) + }) + + app.Get("/api/v1/labs/reports", func(c *fiber.Ctx) error { + limit := c.QueryInt("limit", 20) + offset := c.QueryInt("offset", 0) + + reports := store.List(limit, offset) + return c.JSON(fiber.Map{"reports": reports, "count": len(reports)}) + }) + + app.Get("/api/v1/labs/reports/:id", func(c *fiber.Ctx) error { + id := c.Params("id") + report := store.Get(id) + if report == nil { + return c.Status(404).JSON(fiber.Map{"error": "Report not found"}) + } + return c.JSON(report) + }) + + app.Delete("/api/v1/labs/reports/:id", func(c *fiber.Ctx) error { + id := c.Params("id") + if store.Delete(id) { + return c.JSON(fiber.Map{"success": true}) + } + return c.Status(404).JSON(fiber.Map{"error": "Report not found"}) + }) + + app.Post("/api/v1/labs/reports/:id/export", func(c *fiber.Ctx) error { + id := c.Params("id") + format := c.Query("format", "html") + + report := store.Get(id) + if report == nil { + return c.Status(404).JSON(fiber.Map{"error": "Report not found"}) + } + + switch format { + case "html": + html := exportToHTML(report) + c.Set("Content-Type", "text/html") + c.Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.html\"", report.ID)) + return c.SendString(html) + case "json": + c.Set("Content-Type", "application/json") + c.Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.json\"", report.ID)) + return c.JSON(report) + default: + return c.Status(400).JSON(fiber.Map{"error": "Unsupported format"}) + } + }) + + port := getEnvInt("LABS_SVC_PORT", 3031) + log.Printf("labs-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func createVisualizationFromRequest(g *labs.Generator, vizType, title string, data, config interface{}) labs.Visualization { + switch vizType { + case "bar_chart": + return parseChartRequest(g, labs.VizBarChart, title, data) + case "line_chart": + return parseChartRequest(g, labs.VizLineChart, title, data) + case "pie_chart": + return parseChartRequest(g, labs.VizPieChart, title, data) + case "table": + return parseTableRequest(g, title, data) + case "stat_cards": + return parseStatCardsRequest(g, title, data) + case "kpi": + return parseKPIRequest(g, title, data) + case "markdown": + content := "" + if dataMap, ok := data.(map[string]interface{}); ok { + content, _ = dataMap["content"].(string) + } + return g.CreateMarkdown(title, content) + case "code_block": + code, lang := "", "" + if dataMap, ok := data.(map[string]interface{}); ok { + code, _ = dataMap["code"].(string) + lang, _ = dataMap["language"].(string) + } + return g.CreateCodeBlock(title, code, lang) + default: + return g.CreateMarkdown(title, fmt.Sprintf("%v", data)) + } +} + +func parseChartRequest(g *labs.Generator, vizType labs.VisualizationType, title string, data interface{}) labs.Visualization { + dataMap, ok := data.(map[string]interface{}) + if !ok { + return g.CreateMarkdown(title, "Invalid chart data") + } + + labels := make([]string, 0) + if labelsRaw, ok := dataMap["labels"].([]interface{}); ok { + for _, l := range labelsRaw { + labels = append(labels, fmt.Sprintf("%v", l)) + } + } + + values := make([]float64, 0) + if valuesRaw, ok := dataMap["values"].([]interface{}); ok { + for _, v := range valuesRaw { + switch val := v.(type) { + case float64: + values = append(values, val) + case int: + values = append(values, float64(val)) + } + } + } + + switch vizType { + case labs.VizBarChart: + return g.CreateBarChart(title, labels, values) + case labs.VizPieChart: + return g.CreatePieChart(title, labels, values) + default: + return g.CreateLineChart(title, labels, []labs.ChartDataset{{Label: title, Data: values}}) + } +} + +func parseTableRequest(g *labs.Generator, title string, data interface{}) labs.Visualization { + dataMap, ok := data.(map[string]interface{}) + if !ok { + return g.CreateMarkdown(title, "Invalid table data") + } + + columns := make([]labs.TableColumn, 0) + if colsRaw, ok := dataMap["columns"].([]interface{}); ok { + for _, c := range colsRaw { + if colMap, ok := c.(map[string]interface{}); ok { + col := labs.TableColumn{} + if v, ok := colMap["key"].(string); ok { + col.Key = v + } + if v, ok := colMap["label"].(string); ok { + col.Label = v + } + columns = append(columns, col) + } + } + } + + rows := make([]labs.TableRow, 0) + if rowsRaw, ok := dataMap["rows"].([]interface{}); ok { + for _, r := range rowsRaw { + if rowMap, ok := r.(map[string]interface{}); ok { + rows = append(rows, labs.TableRow(rowMap)) + } + } + } + + return g.CreateTable(title, columns, rows) +} + +func parseStatCardsRequest(g *labs.Generator, title string, data interface{}) labs.Visualization { + dataMap, ok := data.(map[string]interface{}) + if !ok { + return g.CreateMarkdown(title, "Invalid stat cards data") + } + + cards := make([]labs.StatCard, 0) + if cardsRaw, ok := dataMap["cards"].([]interface{}); ok { + for _, c := range cardsRaw { + if cardMap, ok := c.(map[string]interface{}); ok { + card := labs.StatCard{ID: uuid.New().String()} + if v, ok := cardMap["title"].(string); ok { + card.Title = v + } + if v, ok := cardMap["value"]; ok { + card.Value = v + } + if v, ok := cardMap["change"].(float64); ok { + card.Change = v + } + if v, ok := cardMap["color"].(string); ok { + card.Color = v + } + cards = append(cards, card) + } + } + } + + return g.CreateStatCards(title, cards) +} + +func parseKPIRequest(g *labs.Generator, title string, data interface{}) labs.Visualization { + dataMap, ok := data.(map[string]interface{}) + if !ok { + return g.CreateKPI(title, data, 0, "") + } + + value := dataMap["value"] + change := 0.0 + if v, ok := dataMap["change"].(float64); ok { + change = v + } + unit := "" + if v, ok := dataMap["unit"].(string); ok { + unit = v + } + + return g.CreateKPI(title, value, change, unit) +} + +func exportToHTML(report *labs.Report) string { + sectionsHTML := "" + for _, section := range report.Sections { + vizHTML := "" + for _, viz := range section.Visualizations { + dataJSON, _ := json.Marshal(viz.Data) + vizHTML += fmt.Sprintf(` +
+

%s

+
+
+ `, viz.Type, viz.Title, string(dataJSON)) + } + sectionsHTML += fmt.Sprintf(` +
+

%s

+ %s +
+ `, section.Title, vizHTML) + } + + return fmt.Sprintf(` + + + + %s + + + + +

%s

+ %s + +`, report.Title, report.Title, sectionsHTML) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/cmd/learning-svc/main.go b/backend/cmd/learning-svc/main.go new file mode 100644 index 0000000..e07f59e --- /dev/null +++ b/backend/cmd/learning-svc/main.go @@ -0,0 +1,404 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/learning" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/pkg/config" +) + +type LessonStore struct { + lessons map[string]*learning.StepByStepLesson +} + +func NewLessonStore() *LessonStore { + return &LessonStore{ + lessons: make(map[string]*learning.StepByStepLesson), + } +} + +func (s *LessonStore) Save(lesson *learning.StepByStepLesson) { + s.lessons[lesson.ID] = lesson +} + +func (s *LessonStore) Get(id string) *learning.StepByStepLesson { + return s.lessons[id] +} + +func (s *LessonStore) List(limit, offset int) []*learning.StepByStepLesson { + result := make([]*learning.StepByStepLesson, 0) + i := 0 + for _, l := range s.lessons { + if i >= offset && len(result) < limit { + result = append(result, l) + } + i++ + } + return result +} + +func (s *LessonStore) Delete(id string) bool { + if _, ok := s.lessons[id]; ok { + delete(s.lessons, id) + return true + } + return false +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var llmClient llm.Client + + // Priority 1: Timeweb Cloud AI (recommended for production) + if cfg.TimewebAgentAccessID != "" && cfg.TimewebAPIKey != "" { + client, err := llm.NewTimewebClient(llm.TimewebConfig{ + ProviderID: "timeweb", + BaseURL: cfg.TimewebAPIBaseURL, + AgentAccessID: cfg.TimewebAgentAccessID, + APIKey: cfg.TimewebAPIKey, + ModelKey: cfg.DefaultLLMModel, + ProxySource: cfg.TimewebProxySource, + }) + if err != nil { + log.Printf("Warning: Failed to create Timeweb client: %v", err) + } else { + llmClient = client + log.Println("Using Timeweb Cloud AI as LLM provider") + } + } + + // Priority 2: Anthropic + if llmClient == nil && cfg.AnthropicAPIKey != "" && !isJWT(cfg.AnthropicAPIKey) { + client, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + APIKey: cfg.AnthropicAPIKey, + ModelKey: "claude-3-5-sonnet-20241022", + }) + if err != nil { + log.Printf("Warning: Failed to create Anthropic client: %v", err) + } else { + llmClient = client + log.Println("Using Anthropic as LLM provider") + } + } + + // Priority 3: OpenAI (only if it's a real OpenAI key, not Timeweb JWT) + if llmClient == nil && cfg.OpenAIAPIKey != "" && !isJWT(cfg.OpenAIAPIKey) { + client, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + APIKey: cfg.OpenAIAPIKey, + ModelKey: "gpt-4o-mini", + }) + if err != nil { + log.Printf("Warning: Failed to create OpenAI client: %v", err) + } else { + llmClient = client + log.Println("Using OpenAI as LLM provider") + } + } + + if llmClient == nil { + log.Fatal("No LLM provider configured. Please set TIMEWEB_AGENT_ACCESS_ID + TIMEWEB_API_KEY, or OPENAI_API_KEY, or ANTHROPIC_API_KEY") + } + + generator := learning.NewLearningGenerator(llmClient) + store := NewLessonStore() + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 120 * time.Second, + WriteTimeout: 120 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Post("/api/v1/learning/lesson", func(c *fiber.Ctx) error { + var req struct { + Topic string `json:"topic"` + Query string `json:"query"` + Difficulty string `json:"difficulty"` + Mode string `json:"mode"` + MaxSteps int `json:"maxSteps"` + Locale string `json:"locale"` + IncludeCode bool `json:"includeCode"` + IncludeQuiz bool `json:"includeQuiz"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) + defer cancel() + + difficulty := learning.DifficultyBeginner + switch req.Difficulty { + case "intermediate": + difficulty = learning.DifficultyIntermediate + case "advanced": + difficulty = learning.DifficultyAdvanced + case "expert": + difficulty = learning.DifficultyExpert + } + + mode := learning.ModeExplain + switch req.Mode { + case "guided": + mode = learning.ModeGuided + case "interactive": + mode = learning.ModeInteractive + case "practice": + mode = learning.ModePractice + case "quiz": + mode = learning.ModeQuiz + } + + lesson, err := generator.GenerateLesson(ctx, learning.GenerateLessonOptions{ + Topic: req.Topic, + Query: req.Query, + Difficulty: difficulty, + Mode: mode, + MaxSteps: req.MaxSteps, + Locale: req.Locale, + IncludeCode: req.IncludeCode, + IncludeQuiz: req.IncludeQuiz, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(lesson) + + return c.JSON(lesson) + }) + + app.Post("/api/v1/learning/explain", func(c *fiber.Ctx) error { + var req struct { + Topic string `json:"topic"` + Difficulty string `json:"difficulty"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + difficulty := learning.DifficultyBeginner + switch req.Difficulty { + case "intermediate": + difficulty = learning.DifficultyIntermediate + case "advanced": + difficulty = learning.DifficultyAdvanced + case "expert": + difficulty = learning.DifficultyExpert + } + + step, err := generator.GenerateExplanation(ctx, req.Topic, difficulty, req.Locale) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(step) + }) + + app.Post("/api/v1/learning/quiz", func(c *fiber.Ctx) error { + var req struct { + Topic string `json:"topic"` + NumQuestions int `json:"numQuestions"` + Difficulty string `json:"difficulty"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + if req.NumQuestions == 0 { + req.NumQuestions = 5 + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + difficulty := learning.DifficultyBeginner + switch req.Difficulty { + case "intermediate": + difficulty = learning.DifficultyIntermediate + case "advanced": + difficulty = learning.DifficultyAdvanced + case "expert": + difficulty = learning.DifficultyExpert + } + + questions, err := generator.GenerateQuiz(ctx, req.Topic, req.NumQuestions, difficulty, req.Locale) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{"questions": questions}) + }) + + app.Post("/api/v1/learning/practice", func(c *fiber.Ctx) error { + var req struct { + Topic string `json:"topic"` + Language string `json:"language"` + Difficulty string `json:"difficulty"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + difficulty := learning.DifficultyBeginner + switch req.Difficulty { + case "intermediate": + difficulty = learning.DifficultyIntermediate + case "advanced": + difficulty = learning.DifficultyAdvanced + case "expert": + difficulty = learning.DifficultyExpert + } + + exercise, err := generator.GeneratePracticeExercise(ctx, req.Topic, req.Language, difficulty, req.Locale) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(exercise) + }) + + app.Get("/api/v1/learning/lessons", func(c *fiber.Ctx) error { + limit := c.QueryInt("limit", 20) + offset := c.QueryInt("offset", 0) + + lessons := store.List(limit, offset) + + summaries := make([]map[string]interface{}, 0) + for _, l := range lessons { + summaries = append(summaries, map[string]interface{}{ + "id": l.ID, + "title": l.Title, + "topic": l.Topic, + "difficulty": l.Difficulty, + "mode": l.Mode, + "stepsCount": len(l.Steps), + "estimatedTime": l.EstimatedTime, + "progress": l.Progress, + "createdAt": l.CreatedAt, + }) + } + + return c.JSON(fiber.Map{"lessons": summaries, "count": len(summaries)}) + }) + + app.Get("/api/v1/learning/lessons/:id", func(c *fiber.Ctx) error { + id := c.Params("id") + lesson := store.Get(id) + if lesson == nil { + return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + } + return c.JSON(lesson) + }) + + app.Post("/api/v1/learning/lessons/:id/complete-step", func(c *fiber.Ctx) error { + id := c.Params("id") + lesson := store.Get(id) + if lesson == nil { + return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + } + + var req struct { + StepIndex int `json:"stepIndex"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + lesson.CompleteStep(req.StepIndex) + + return c.JSON(fiber.Map{ + "success": true, + "progress": lesson.Progress, + }) + }) + + app.Post("/api/v1/learning/lessons/:id/submit-answer", func(c *fiber.Ctx) error { + id := c.Params("id") + lesson := store.Get(id) + if lesson == nil { + return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + } + + var req struct { + StepIndex int `json:"stepIndex"` + SelectedOptions []string `json:"selectedOptions"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + correct, explanation := lesson.SubmitQuizAnswer(req.StepIndex, req.SelectedOptions) + + if correct { + lesson.CompleteStep(req.StepIndex) + } + + return c.JSON(fiber.Map{ + "correct": correct, + "explanation": explanation, + "progress": lesson.Progress, + }) + }) + + app.Delete("/api/v1/learning/lessons/:id", func(c *fiber.Ctx) error { + id := c.Params("id") + if store.Delete(id) { + return c.JSON(fiber.Map{"success": true}) + } + return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + }) + + port := getEnvInt("LEARNING_SVC_PORT", 3034) + log.Printf("learning-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} + +func isJWT(s string) bool { + return len(s) > 10 && s[:3] == "eyJ" +} diff --git a/backend/cmd/llm-svc/main.go b/backend/cmd/llm-svc/main.go new file mode 100644 index 0000000..e13acf1 --- /dev/null +++ b/backend/cmd/llm-svc/main.go @@ -0,0 +1,191 @@ +package main + +import ( + "bufio" + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/ndjson" +) + +type GenerateRequest struct { + ProviderID string `json:"providerId"` + ModelKey string `json:"key"` + Messages []struct { + Role string `json:"role"` + Content string `json:"content"` + } `json:"messages"` + Options struct { + MaxTokens int `json:"maxTokens"` + Temperature float64 `json:"temperature"` + Stream bool `json:"stream"` + } `json:"options"` +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + app := fiber.New(fiber.Config{ + StreamRequestBody: true, + BodyLimit: 10 * 1024 * 1024, + ReadTimeout: time.Minute, + WriteTimeout: 5 * time.Minute, + IdleTimeout: 2 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/api/v1/providers", func(c *fiber.Ctx) error { + providers := []fiber.Map{} + + if cfg.OpenAIAPIKey != "" { + providers = append(providers, fiber.Map{ + "id": "openai", + "name": "OpenAI", + "models": []string{"gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-3.5-turbo"}, + }) + } + + if cfg.AnthropicAPIKey != "" { + providers = append(providers, fiber.Map{ + "id": "anthropic", + "name": "Anthropic", + "models": []string{"claude-3-5-sonnet-20241022", "claude-3-opus-20240229", "claude-3-haiku-20240307"}, + }) + } + + if cfg.GeminiAPIKey != "" { + providers = append(providers, fiber.Map{ + "id": "gemini", + "name": "Google Gemini", + "models": []string{"gemini-1.5-pro", "gemini-1.5-flash", "gemini-2.0-flash-exp"}, + }) + } + + return c.JSON(fiber.Map{ + "providers": providers, + "envOnlyMode": true, + }) + }) + + app.Get("/api/v1/providers/ui-config", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{ + "sections": []interface{}{}, + }) + }) + + app.Post("/api/v1/generate", func(c *fiber.Ctx) error { + var req GenerateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if len(req.Messages) == 0 { + return c.Status(400).JSON(fiber.Map{"error": "Messages required"}) + } + + client, err := llm.NewClient(llm.ProviderConfig{ + ProviderID: req.ProviderID, + ModelKey: req.ModelKey, + APIKey: getAPIKey(cfg, req.ProviderID), + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + messages := make([]llm.Message, len(req.Messages)) + for i, m := range req.Messages { + messages[i] = llm.Message{ + Role: llm.Role(m.Role), + Content: m.Content, + } + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.LLMTimeout) + defer cancel() + + if req.Options.Stream { + stream, err := client.StreamText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{ + MaxTokens: req.Options.MaxTokens, + Temperature: req.Options.Temperature, + }, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + c.Set("Content-Type", "application/x-ndjson") + c.Set("Cache-Control", "no-cache") + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + writer := ndjson.NewWriter(w) + for chunk := range stream { + writer.Write(fiber.Map{ + "type": "chunk", + "chunk": chunk.ContentChunk, + }) + w.Flush() + } + writer.Write(fiber.Map{"type": "done"}) + }) + + return nil + } + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{ + MaxTokens: req.Options.MaxTokens, + Temperature: req.Options.Temperature, + }, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{ + "content": response, + }) + }) + + port := cfg.LLMSvcPort + log.Printf("llm-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getAPIKey(cfg *config.Config, providerID string) string { + switch providerID { + case "openai", "timeweb": + return cfg.OpenAIAPIKey + case "anthropic": + return cfg.AnthropicAPIKey + case "gemini", "google": + return cfg.GeminiAPIKey + default: + return "" + } +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3020") + } +} diff --git a/backend/cmd/podcast-svc/main.go b/backend/cmd/podcast-svc/main.go new file mode 100644 index 0000000..7d10f6d --- /dev/null +++ b/backend/cmd/podcast-svc/main.go @@ -0,0 +1,424 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/podcast" + "github.com/gooseek/backend/pkg/config" +) + +type PodcastStore struct { + podcasts map[string]*podcast.Podcast +} + +func NewPodcastStore() *PodcastStore { + return &PodcastStore{ + podcasts: make(map[string]*podcast.Podcast), + } +} + +func (s *PodcastStore) Save(p *podcast.Podcast) { + s.podcasts[p.ID] = p +} + +func (s *PodcastStore) Get(id string) *podcast.Podcast { + return s.podcasts[id] +} + +func (s *PodcastStore) List(limit, offset int) []*podcast.Podcast { + result := make([]*podcast.Podcast, 0) + i := 0 + for _, p := range s.podcasts { + if i >= offset && len(result) < limit { + result = append(result, p) + } + i++ + } + return result +} + +func (s *PodcastStore) GetLatest() *podcast.Podcast { + var latest *podcast.Podcast + for _, p := range s.podcasts { + if latest == nil || p.GeneratedAt.After(latest.GeneratedAt) { + latest = p + } + } + return latest +} + +func (s *PodcastStore) GetByDate(date time.Time) *podcast.Podcast { + dateStr := date.Format("2006-01-02") + for _, p := range s.podcasts { + if p.Date.Format("2006-01-02") == dateStr { + return p + } + } + return nil +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var llmClient llm.Client + if cfg.OpenAIAPIKey != "" { + client, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + APIKey: cfg.OpenAIAPIKey, + ModelKey: "gpt-4o-mini", + }) + if err != nil { + log.Fatal("Failed to create OpenAI client:", err) + } + llmClient = client + } else if cfg.AnthropicAPIKey != "" { + client, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + APIKey: cfg.AnthropicAPIKey, + ModelKey: "claude-3-5-sonnet-20241022", + }) + if err != nil { + log.Fatal("Failed to create Anthropic client:", err) + } + llmClient = client + } + + var ttsClient podcast.TTSClient + elevenLabsKey := os.Getenv("ELEVENLABS_API_KEY") + if elevenLabsKey != "" { + ttsClient = podcast.NewElevenLabsTTS(elevenLabsKey) + } else { + ttsClient = &podcast.DummyTTS{} + } + + generator := podcast.NewPodcastGenerator(llmClient, ttsClient, podcast.GeneratorConfig{ + DefaultDuration: 300, + MaxDuration: 1800, + OutputDir: "/data/podcasts", + }) + + store := NewPodcastStore() + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 120 * time.Second, + WriteTimeout: 120 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Post("/api/v1/podcast/generate", func(c *fiber.Ctx) error { + var req struct { + Type string `json:"type"` + Topics []string `json:"topics"` + NewsItems []podcast.NewsItem `json:"newsItems"` + Duration int `json:"duration"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + podcastType := podcast.PodcastDaily + switch req.Type { + case "weekly": + podcastType = podcast.PodcastWeekly + case "topic_deep": + podcastType = podcast.PodcastTopicDeep + case "breaking": + podcastType = podcast.PodcastBreaking + } + + ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) + defer cancel() + + p, err := generator.GenerateDailyPodcast(ctx, podcast.GenerateOptions{ + Type: podcastType, + Topics: req.Topics, + NewsItems: req.NewsItems, + Duration: req.Duration, + Locale: req.Locale, + IncludeIntro: true, + IncludeOutro: true, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(p) + + return c.JSON(p) + }) + + app.Post("/api/v1/podcast/generate-daily", func(c *fiber.Ctx) error { + var req struct { + NewsItems []podcast.NewsItem `json:"newsItems"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + if req.Locale == "" { + req.Locale = "ru" + } + + ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) + defer cancel() + + p, err := generator.GenerateDailyPodcast(ctx, podcast.GenerateOptions{ + Type: podcast.PodcastDaily, + NewsItems: req.NewsItems, + Date: time.Now(), + Duration: 300, + Locale: req.Locale, + IncludeIntro: true, + IncludeOutro: true, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(p) + + return c.JSON(p) + }) + + app.Post("/api/v1/podcast/generate-weekly", func(c *fiber.Ctx) error { + var req struct { + NewsItems []podcast.NewsItem `json:"newsItems"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 180*time.Second) + defer cancel() + + p, err := generator.GenerateWeeklySummary(ctx, req.NewsItems, req.Locale) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(p) + + return c.JSON(p) + }) + + app.Post("/api/v1/podcast/generate-topic", func(c *fiber.Ctx) error { + var req struct { + Topic string `json:"topic"` + Articles []podcast.NewsItem `json:"articles"` + Locale string `json:"locale"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 180*time.Second) + defer cancel() + + p, err := generator.GenerateTopicDeepDive(ctx, req.Topic, req.Articles, req.Locale) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + store.Save(p) + + return c.JSON(p) + }) + + app.Post("/api/v1/podcast/:id/audio", func(c *fiber.Ctx) error { + id := c.Params("id") + p := store.Get(id) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "Podcast not found"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 300*time.Second) + defer cancel() + + audioData, err := generator.GenerateAudio(ctx, p) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + c.Set("Content-Type", "audio/mpeg") + c.Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s.mp3\"", p.ID)) + return c.Send(audioData) + }) + + app.Get("/api/v1/podcasts", func(c *fiber.Ctx) error { + limit := c.QueryInt("limit", 20) + offset := c.QueryInt("offset", 0) + + podcasts := store.List(limit, offset) + + summaries := make([]map[string]interface{}, 0) + for _, p := range podcasts { + summaries = append(summaries, map[string]interface{}{ + "id": p.ID, + "title": p.Title, + "description": p.Description, + "type": p.Type, + "date": p.Date, + "duration": p.Duration, + "status": p.Status, + "topics": p.Topics, + "audioUrl": p.AudioURL, + "generatedAt": p.GeneratedAt, + }) + } + + return c.JSON(fiber.Map{"podcasts": summaries, "count": len(summaries)}) + }) + + app.Get("/api/v1/podcasts/latest", func(c *fiber.Ctx) error { + p := store.GetLatest() + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "No podcasts found"}) + } + return c.JSON(p) + }) + + app.Get("/api/v1/podcasts/today", func(c *fiber.Ctx) error { + p := store.GetByDate(time.Now()) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "No podcast for today"}) + } + return c.JSON(p) + }) + + app.Get("/api/v1/podcasts/:id", func(c *fiber.Ctx) error { + id := c.Params("id") + p := store.Get(id) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "Podcast not found"}) + } + return c.JSON(p) + }) + + app.Get("/api/v1/podcasts/:id/transcript", func(c *fiber.Ctx) error { + id := c.Params("id") + p := store.Get(id) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "Podcast not found"}) + } + return c.JSON(fiber.Map{ + "id": p.ID, + "title": p.Title, + "transcript": p.Transcript, + "segments": p.Segments, + }) + }) + + app.Get("/api/v1/podcasts/:id/segments", func(c *fiber.Ctx) error { + id := c.Params("id") + p := store.Get(id) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "Podcast not found"}) + } + return c.JSON(fiber.Map{ + "segments": p.Segments, + }) + }) + + app.Post("/api/v1/podcast/:id/publish", func(c *fiber.Ctx) error { + id := c.Params("id") + p := store.Get(id) + if p == nil { + return c.Status(404).JSON(fiber.Map{"error": "Podcast not found"}) + } + + if p.Status != podcast.StatusReady { + return c.Status(400).JSON(fiber.Map{"error": "Podcast audio not ready"}) + } + + now := time.Now() + p.Status = podcast.StatusPublished + p.PublishedAt = &now + + return c.JSON(fiber.Map{"success": true, "publishedAt": now}) + }) + + app.Get("/api/v1/podcast/rss", func(c *fiber.Ctx) error { + podcasts := store.List(50, 0) + + rss := generateRSSFeed(podcasts, c.BaseURL()) + + c.Set("Content-Type", "application/rss+xml") + return c.SendString(rss) + }) + + port := getEnvInt("PODCAST_SVC_PORT", 3032) + log.Printf("podcast-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func generateRSSFeed(podcasts []*podcast.Podcast, baseURL string) string { + items := "" + for _, p := range podcasts { + if p.Status == podcast.StatusPublished && p.AudioURL != "" { + pubDate := "" + if p.PublishedAt != nil { + pubDate = p.PublishedAt.Format(time.RFC1123Z) + } + items += fmt.Sprintf(` + + %s + + %s + + %s + %d + `, p.Title, p.Description, pubDate, p.AudioURL, p.ID, p.Duration) + } + } + + return fmt.Sprintf(` + + + GooSeek Daily + %s + Ежедневный подкаст с главными новостями от GooSeek + ru + GooSeek + + + %s + +`, baseURL, baseURL, items) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/cmd/scraper-svc/main.go b/backend/cmd/scraper-svc/main.go new file mode 100644 index 0000000..5a3c640 --- /dev/null +++ b/backend/cmd/scraper-svc/main.go @@ -0,0 +1,284 @@ +package main + +import ( + "context" + "fmt" + "io" + "log" + "net/http" + "os" + "regexp" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/pkg/config" +) + +type ScrapeRequest struct { + URL string `json:"url"` +} + +type ScrapeResponse struct { + URL string `json:"url"` + Title string `json:"title"` + Content string `json:"content"` + Success bool `json:"success"` + Error string `json:"error,omitempty"` +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + app := fiber.New(fiber.Config{ + BodyLimit: 10 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + IdleTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Post("/api/v1/scrape", func(c *fiber.Ctx) error { + var req ScrapeRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.URL == "" { + return c.Status(400).JSON(fiber.Map{"error": "URL is required"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.ScrapeTimeout) + defer cancel() + + result := scrapeURL(ctx, req.URL, cfg) + return c.JSON(result) + }) + + app.Post("/api/v1/scrape/batch", func(c *fiber.Ctx) error { + var req struct { + URLs []string `json:"urls"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if len(req.URLs) == 0 { + return c.Status(400).JSON(fiber.Map{"error": "URLs are required"}) + } + + if len(req.URLs) > 10 { + req.URLs = req.URLs[:10] + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.ScrapeTimeout*3) + defer cancel() + + results := make([]ScrapeResponse, len(req.URLs)) + resultCh := make(chan struct { + index int + result ScrapeResponse + }, len(req.URLs)) + + for i, url := range req.URLs { + go func(idx int, u string) { + resultCh <- struct { + index int + result ScrapeResponse + }{idx, scrapeURL(ctx, u, cfg)} + }(i, url) + } + + for range req.URLs { + r := <-resultCh + results[r.index] = r.result + } + + return c.JSON(fiber.Map{"results": results}) + }) + + port := cfg.ScraperSvcPort + log.Printf("scraper-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func scrapeURL(ctx context.Context, url string, cfg *config.Config) ScrapeResponse { + if cfg.Crawl4AIURL != "" { + result, err := scrapeWithCrawl4AI(ctx, url, cfg.Crawl4AIURL) + if err == nil && result.Success { + return *result + } + } + + return scrapeDirectly(ctx, url) +} + +func scrapeWithCrawl4AI(ctx context.Context, url, crawl4aiURL string) (*ScrapeResponse, error) { + reqBody := fmt.Sprintf(`{ + "urls": ["%s"], + "crawler_config": { + "type": "CrawlerRunConfig", + "params": { + "cache_mode": "default", + "page_timeout": 20000 + } + } + }`, url) + + req, err := http.NewRequestWithContext(ctx, "POST", crawl4aiURL+"/crawl", strings.NewReader(reqBody)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + client := &http.Client{Timeout: 25 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("Crawl4AI returned status %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + markdown := extractMarkdown(string(body)) + title := extractTitle(string(body)) + + if len(markdown) > 100 { + return &ScrapeResponse{ + URL: url, + Title: title, + Content: truncate(markdown, 15000), + Success: true, + }, nil + } + + return nil, fmt.Errorf("insufficient content from Crawl4AI") +} + +func scrapeDirectly(ctx context.Context, url string) ScrapeResponse { + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return ScrapeResponse{URL: url, Success: false, Error: err.Error()} + } + + req.Header.Set("User-Agent", "GooSeek-Scraper/1.0") + req.Header.Set("Accept", "text/html,application/xhtml+xml") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return ScrapeResponse{URL: url, Success: false, Error: err.Error()} + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return ScrapeResponse{URL: url, Success: false, Error: fmt.Sprintf("HTTP %d", resp.StatusCode)} + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return ScrapeResponse{URL: url, Success: false, Error: err.Error()} + } + + html := string(body) + title := extractHTMLTitle(html) + content := extractTextContent(html) + + if len(content) < 100 { + return ScrapeResponse{URL: url, Success: false, Error: "Insufficient content"} + } + + return ScrapeResponse{ + URL: url, + Title: title, + Content: truncate(content, 15000), + Success: true, + } +} + +var ( + titleRegex = regexp.MustCompile(`]*>([^<]+)`) + scriptRegex = regexp.MustCompile(`(?s)]*>.*?`) + styleRegex = regexp.MustCompile(`(?s)]*>.*?`) + tagRegex = regexp.MustCompile(`<[^>]+>`) + spaceRegex = regexp.MustCompile(`\s+`) +) + +func extractHTMLTitle(html string) string { + matches := titleRegex.FindStringSubmatch(html) + if len(matches) > 1 { + return strings.TrimSpace(matches[1]) + } + return "" +} + +func extractTextContent(html string) string { + bodyStart := strings.Index(strings.ToLower(html), "") + + if bodyStart != -1 && bodyEnd != -1 && bodyEnd > bodyStart { + html = html[bodyStart:bodyEnd] + } + + html = scriptRegex.ReplaceAllString(html, "") + html = styleRegex.ReplaceAllString(html, "") + html = tagRegex.ReplaceAllString(html, " ") + html = spaceRegex.ReplaceAllString(html, " ") + + return strings.TrimSpace(html) +} + +func extractMarkdown(response string) string { + if idx := strings.Index(response, `"raw_markdown"`); idx != -1 { + start := strings.Index(response[idx:], `"`) + idx + 1 + start = strings.Index(response[start:], `"`) + start + 1 + end := strings.Index(response[start:], `"`) + if end != -1 { + return response[start : start+end] + } + } + return "" +} + +func extractTitle(response string) string { + if idx := strings.Index(response, `"title"`); idx != -1 { + start := strings.Index(response[idx:], `"`) + idx + 1 + start = strings.Index(response[start:], `"`) + start + 1 + end := strings.Index(response[start:], `"`) + if end != -1 { + return response[start : start+end] + } + } + return "" +} + +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3021") + } +} diff --git a/backend/cmd/search-svc/main.go b/backend/cmd/search-svc/main.go new file mode 100644 index 0000000..56ca2f4 --- /dev/null +++ b/backend/cmd/search-svc/main.go @@ -0,0 +1,143 @@ +package main + +import ( + "context" + "fmt" + "log" + "os" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/pkg/config" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + searchClient := search.NewSearXNGClient(cfg) + + app := fiber.New(fiber.Config{ + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + IdleTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/api/v1/search", func(c *fiber.Ctx) error { + query := c.Query("q") + if query == "" { + return c.Status(400).JSON(fiber.Map{"error": "Query parameter 'q' is required"}) + } + + opts := &search.SearchOptions{} + + if engines := c.Query("engines"); engines != "" { + opts.Engines = strings.Split(engines, ",") + } + if categories := c.Query("categories"); categories != "" { + opts.Categories = strings.Split(categories, ",") + } + if pageno := c.QueryInt("pageno", 1); pageno > 0 { + opts.PageNo = pageno + } + if lang := c.Query("language"); lang != "" { + opts.Language = lang + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout) + defer cancel() + + result, err := searchClient.Search(ctx, query, opts) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + }) + + app.Get("/api/v1/search/images", func(c *fiber.Ctx) error { + query := c.Query("q") + if query == "" { + return c.Status(400).JSON(fiber.Map{"error": "Query parameter 'q' is required"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout) + defer cancel() + + result, err := searchClient.Search(ctx, query, &search.SearchOptions{ + Categories: []string{"images"}, + PageNo: 1, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + }) + + app.Get("/api/v1/search/videos", func(c *fiber.Ctx) error { + query := c.Query("q") + if query == "" { + return c.Status(400).JSON(fiber.Map{"error": "Query parameter 'q' is required"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout) + defer cancel() + + result, err := searchClient.Search(ctx, query, &search.SearchOptions{ + Categories: []string{"videos"}, + PageNo: 1, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + }) + + app.Get("/api/v1/search/media", func(c *fiber.Ctx) error { + query := c.Query("q") + if query == "" { + return c.Status(400).JSON(fiber.Map{"error": "Query parameter 'q' is required"}) + } + + maxImages := c.QueryInt("maxImages", 8) + maxVideos := c.QueryInt("maxVideos", 6) + + ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout*2) + defer cancel() + + result, err := searchClient.SearchMedia(ctx, query, &search.MediaSearchOptions{ + MaxImages: maxImages, + MaxVideos: maxVideos, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + }) + + port := cfg.SearchSvcPort + log.Printf("search-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func init() { + if os.Getenv("PORT") == "" { + os.Setenv("PORT", "3001") + } +} diff --git a/backend/cmd/thread-svc/main.go b/backend/cmd/thread-svc/main.go new file mode 100644 index 0000000..fb28c64 --- /dev/null +++ b/backend/cmd/thread-svc/main.go @@ -0,0 +1,624 @@ +package main + +import ( + "context" + "crypto/rand" + "encoding/hex" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/pages" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var database *db.PostgresDB + var threadRepo *db.ThreadRepository + var spaceRepo *db.SpaceRepository + var memoryRepo *db.MemoryRepository + var pageRepo *db.PageRepository + + if cfg.DatabaseURL != "" { + maxRetries := 30 + for i := 0; i < maxRetries; i++ { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err == nil { + break + } + log.Printf("Waiting for database (attempt %d/%d): %v", i+1, maxRetries, err) + time.Sleep(2 * time.Second) + } + if err != nil { + log.Fatal("Database required for thread-svc:", err) + } + log.Println("PostgreSQL connected successfully") + defer database.Close() + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Base migrations warning: %v", err) + } + + spaceRepo = db.NewSpaceRepository(database) + if err := spaceRepo.RunMigrations(ctx); err != nil { + log.Printf("Space migrations warning: %v", err) + } + + threadRepo = db.NewThreadRepository(database) + if err := threadRepo.RunMigrations(ctx); err != nil { + log.Printf("Thread migrations warning: %v", err) + } + + memoryRepo = db.NewMemoryRepository(database) + if err := memoryRepo.RunMigrations(ctx); err != nil { + log.Printf("Memory migrations warning: %v", err) + } + + pageRepo = db.NewPageRepository(database) + if err := pageRepo.RunMigrations(ctx); err != nil { + log.Printf("Page migrations warning: %v", err) + } + + log.Println("PostgreSQL connected, all migrations complete") + } else { + log.Fatal("DATABASE_URL required for thread-svc") + } + + var llmClient llm.Client + if cfg.OpenAIAPIKey != "" { + llmClient, err = llm.NewClient(llm.ProviderConfig{ + ProviderID: "openai", + ModelKey: "gpt-4o-mini", + APIKey: cfg.OpenAIAPIKey, + }) + if err != nil { + log.Printf("Failed to create LLM client: %v", err) + } + } + + app := fiber.New(fiber.Config{ + BodyLimit: 10 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + threads := app.Group("/api/v1/threads", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: true, + })) + + threads.Get("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.JSON(fiber.Map{"threads": []interface{}{}}) + } + + limit := c.QueryInt("limit", 50) + offset := c.QueryInt("offset", 0) + + threadList, err := threadRepo.GetByUserID(c.Context(), userID, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get threads"}) + } + + return c.JSON(fiber.Map{"threads": threadList}) + }) + + threads.Post("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + + var req struct { + Title string `json:"title"` + FocusMode string `json:"focusMode"` + SpaceID *string `json:"spaceId"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + thread := &db.Thread{ + UserID: userID, + SpaceID: req.SpaceID, + Title: req.Title, + FocusMode: req.FocusMode, + } + + if thread.Title == "" { + thread.Title = "New Thread" + } + if thread.FocusMode == "" { + thread.FocusMode = "all" + } + + if err := threadRepo.Create(c.Context(), thread); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to create thread"}) + } + + return c.Status(201).JSON(thread) + }) + + threads.Get("/:id", func(c *fiber.Ctx) error { + threadID := c.Params("id") + userID := middleware.GetUserID(c) + + thread, err := threadRepo.GetByID(c.Context(), threadID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } + + if thread.UserID != userID && !thread.IsPublic { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + messages, _ := threadRepo.GetMessages(c.Context(), threadID, 100, 0) + thread.Messages = messages + + return c.JSON(thread) + }) + + threads.Post("/:id/messages", func(c *fiber.Ctx) error { + threadID := c.Params("id") + userID := middleware.GetUserID(c) + + thread, err := threadRepo.GetByID(c.Context(), threadID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } + + if thread.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + var req struct { + Role string `json:"role"` + Content string `json:"content"` + Sources []db.ThreadSource `json:"sources"` + Widgets []map[string]interface{} `json:"widgets"` + RelatedQuestions []string `json:"relatedQuestions"` + Model string `json:"model"` + TokensUsed int `json:"tokensUsed"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + msg := &db.ThreadMessage{ + ThreadID: threadID, + Role: req.Role, + Content: req.Content, + Sources: req.Sources, + Widgets: req.Widgets, + RelatedQuestions: req.RelatedQuestions, + Model: req.Model, + TokensUsed: req.TokensUsed, + } + + if err := threadRepo.AddMessage(c.Context(), msg); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to add message"}) + } + + if thread.Title == "New Thread" && req.Role == "user" { + threadRepo.GenerateTitle(c.Context(), threadID, req.Content) + } + + return c.Status(201).JSON(msg) + }) + + threads.Post("/:id/share", func(c *fiber.Ctx) error { + threadID := c.Params("id") + userID := middleware.GetUserID(c) + + thread, err := threadRepo.GetByID(c.Context(), threadID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } + + if thread.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + shareID := generateShareID() + if err := threadRepo.SetShareID(c.Context(), threadID, shareID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to share thread"}) + } + + return c.JSON(fiber.Map{ + "shareId": shareID, + "shareUrl": fmt.Sprintf("/share/%s", shareID), + }) + }) + + threads.Delete("/:id", func(c *fiber.Ctx) error { + threadID := c.Params("id") + userID := middleware.GetUserID(c) + + thread, err := threadRepo.GetByID(c.Context(), threadID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } + + if thread.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if err := threadRepo.Delete(c.Context(), threadID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete thread"}) + } + + return c.Status(204).Send(nil) + }) + + share := app.Group("/api/v1/share") + + share.Get("/:shareId", func(c *fiber.Ctx) error { + shareID := c.Params("shareId") + + thread, err := threadRepo.GetByShareID(c.Context(), shareID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Shared thread not found"}) + } + + messages, _ := threadRepo.GetMessages(c.Context(), thread.ID, 100, 0) + thread.Messages = messages + + return c.JSON(thread) + }) + + spaces := app.Group("/api/v1/spaces", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + spaces.Get("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + + spaceList, err := spaceRepo.GetByUserID(c.Context(), userID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get spaces"}) + } + + return c.JSON(fiber.Map{"spaces": spaceList}) + }) + + spaces.Post("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + + var req db.Space + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + req.UserID = userID + if req.Name == "" { + return c.Status(400).JSON(fiber.Map{"error": "Name required"}) + } + + if err := spaceRepo.Create(c.Context(), &req); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to create space"}) + } + + return c.Status(201).JSON(req) + }) + + spaces.Get("/:id", func(c *fiber.Ctx) error { + spaceID := c.Params("id") + userID := middleware.GetUserID(c) + + space, err := spaceRepo.GetByID(c.Context(), spaceID) + if err != nil || space == nil { + return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) + } + + if space.UserID != userID && !space.IsPublic { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + return c.JSON(space) + }) + + spaces.Put("/:id", func(c *fiber.Ctx) error { + spaceID := c.Params("id") + userID := middleware.GetUserID(c) + + space, err := spaceRepo.GetByID(c.Context(), spaceID) + if err != nil || space == nil { + return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) + } + + if space.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + var req db.Space + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + req.ID = spaceID + req.UserID = userID + + if err := spaceRepo.Update(c.Context(), &req); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to update space"}) + } + + return c.JSON(req) + }) + + spaces.Delete("/:id", func(c *fiber.Ctx) error { + spaceID := c.Params("id") + userID := middleware.GetUserID(c) + + space, err := spaceRepo.GetByID(c.Context(), spaceID) + if err != nil || space == nil { + return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) + } + + if space.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if err := spaceRepo.Delete(c.Context(), spaceID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete space"}) + } + + return c.Status(204).Send(nil) + }) + + memory := app.Group("/api/v1/memory", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + memory.Get("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + memType := c.Query("type", "") + + memories, err := memoryRepo.GetByUserID(c.Context(), userID, memType, 50) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get memories"}) + } + + return c.JSON(fiber.Map{"memories": memories}) + }) + + memory.Post("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + + var req db.UserMemory + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + req.UserID = userID + + if err := memoryRepo.Save(c.Context(), &req); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save memory"}) + } + + return c.Status(201).JSON(req) + }) + + memory.Get("/context", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + + ctx, err := memoryRepo.GetContextForUser(c.Context(), userID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get context"}) + } + + return c.JSON(fiber.Map{"context": ctx}) + }) + + memory.Delete("/:id", func(c *fiber.Ctx) error { + memID := c.Params("id") + + if err := memoryRepo.Delete(c.Context(), memID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete memory"}) + } + + return c.Status(204).Send(nil) + }) + + pagesAPI := app.Group("/api/v1/pages", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: true, + })) + + pagesAPI.Get("/", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.JSON(fiber.Map{"pages": []interface{}{}}) + } + + limit := c.QueryInt("limit", 50) + offset := c.QueryInt("offset", 0) + + pageList, err := pageRepo.GetByUserID(c.Context(), userID, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get pages"}) + } + + return c.JSON(fiber.Map{"pages": pageList}) + }) + + pagesAPI.Post("/from-thread/:threadId", func(c *fiber.Ctx) error { + threadID := c.Params("threadId") + userID := middleware.GetUserID(c) + + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + + thread, err := threadRepo.GetByID(c.Context(), threadID) + if err != nil || thread == nil { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } + + if thread.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + messages, _ := threadRepo.GetMessages(c.Context(), threadID, 100, 0) + + var query, answer string + for _, msg := range messages { + if msg.Role == "user" && query == "" { + query = msg.Content + } + if msg.Role == "assistant" { + answer += msg.Content + "\n\n" + } + } + + if llmClient == nil { + return c.Status(503).JSON(fiber.Map{"error": "LLM not configured"}) + } + + generator := pages.NewPageGenerator(pages.PageGeneratorConfig{ + LLMClient: llmClient, + Locale: c.Query("locale", "en"), + }) + + page, err := generator.GenerateFromThread(c.Context(), query, answer, nil) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to generate page"}) + } + + page.UserID = userID + page.ThreadID = threadID + + if err := pageRepo.Create(c.Context(), page); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save page"}) + } + + return c.Status(201).JSON(page) + }) + + pagesAPI.Get("/:id", func(c *fiber.Ctx) error { + pageID := c.Params("id") + userID := middleware.GetUserID(c) + + page, err := pageRepo.GetByID(c.Context(), pageID) + if err != nil || page == nil { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } + + if page.UserID != userID && !page.IsPublic { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + return c.JSON(page) + }) + + pagesAPI.Post("/:id/share", func(c *fiber.Ctx) error { + pageID := c.Params("id") + userID := middleware.GetUserID(c) + + page, err := pageRepo.GetByID(c.Context(), pageID) + if err != nil || page == nil { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } + + if page.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + shareID := generateShareID() + if err := pageRepo.SetShareID(c.Context(), pageID, shareID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to share page"}) + } + + return c.JSON(fiber.Map{ + "shareId": shareID, + "shareUrl": fmt.Sprintf("/page/%s", shareID), + }) + }) + + pagesAPI.Get("/share/:shareId", func(c *fiber.Ctx) error { + shareID := c.Params("shareId") + + page, err := pageRepo.GetByShareID(c.Context(), shareID) + if err != nil || page == nil { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } + + pageRepo.IncrementViewCount(c.Context(), page.ID) + + return c.JSON(page) + }) + + pagesAPI.Delete("/:id", func(c *fiber.Ctx) error { + pageID := c.Params("id") + userID := middleware.GetUserID(c) + + page, err := pageRepo.GetByID(c.Context(), pageID) + if err != nil || page == nil { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } + + if page.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + if err := pageRepo.Delete(c.Context(), pageID); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to delete page"}) + } + + return c.Status(204).Send(nil) + }) + + port := getEnvInt("THREAD_SVC_PORT", 3027) + log.Printf("thread-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func generateShareID() string { + b := make([]byte, 8) + rand.Read(b) + return hex.EncodeToString(b) +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} diff --git a/backend/deploy/docker/Dockerfile.agent-svc b/backend/deploy/docker/Dockerfile.agent-svc new file mode 100644 index 0000000..0c2d276 --- /dev/null +++ b/backend/deploy/docker/Dockerfile.agent-svc @@ -0,0 +1,31 @@ +# Dockerfile for agent-svc only +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/agent-svc ./cmd/agent-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /bin/agent-svc /app/agent-svc + +ENV PORT=3018 + +EXPOSE 3018 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3018/health || exit 1 + +CMD ["/app/agent-svc"] diff --git a/backend/deploy/docker/Dockerfile.all b/backend/deploy/docker/Dockerfile.all new file mode 100644 index 0000000..134c19d --- /dev/null +++ b/backend/deploy/docker/Dockerfile.all @@ -0,0 +1,50 @@ +# Multi-service Dockerfile - builds all services +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +# Build all services +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/api-gateway ./cmd/api-gateway +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/agent-svc ./cmd/agent-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/chat-svc ./cmd/chat-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/search-svc ./cmd/search-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/llm-svc ./cmd/llm-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/scraper-svc ./cmd/scraper-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/discover-svc ./cmd/discover-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/collection-svc ./cmd/collection-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/file-svc ./cmd/file-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/thread-svc ./cmd/thread-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/computer-svc ./cmd/computer-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/finance-heatmap-svc ./cmd/finance-heatmap-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/learning-svc ./cmd/learning-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/labs-svc ./cmd/labs-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/podcast-svc ./cmd/podcast-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/browser-svc ./cmd/browser-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /bin/* /app/ + +# Default entrypoint - can be overridden +ENV SERVICE=api-gateway +ENV PORT=3015 + +EXPOSE 3015 3018 3005 3001 3020 3021 3002 3025 3026 3027 3030 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT}/health || exit 1 + +# Run the specified service +CMD /app/${SERVICE} diff --git a/backend/deploy/docker/Dockerfile.api-gateway b/backend/deploy/docker/Dockerfile.api-gateway new file mode 100644 index 0000000..c7695ed --- /dev/null +++ b/backend/deploy/docker/Dockerfile.api-gateway @@ -0,0 +1,32 @@ +# Build stage +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /api-gateway ./cmd/api-gateway + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /api-gateway . + +ENV PORT=3015 +ENV GIN_MODE=release + +EXPOSE 3015 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3015/health || exit 1 + +CMD ["./api-gateway"] diff --git a/backend/deploy/docker/Dockerfile.chat-svc b/backend/deploy/docker/Dockerfile.chat-svc new file mode 100644 index 0000000..40eac4f --- /dev/null +++ b/backend/deploy/docker/Dockerfile.chat-svc @@ -0,0 +1,31 @@ +# Dockerfile for chat-svc only +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/chat-svc ./cmd/chat-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /bin/chat-svc /app/chat-svc + +ENV PORT=3005 + +EXPOSE 3005 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3005/health || exit 1 + +CMD ["/app/chat-svc"] diff --git a/backend/deploy/docker/Dockerfile.computer-svc b/backend/deploy/docker/Dockerfile.computer-svc new file mode 100644 index 0000000..94a7387 --- /dev/null +++ b/backend/deploy/docker/Dockerfile.computer-svc @@ -0,0 +1,32 @@ +# Dockerfile for computer-svc only +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +# Build only computer-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/computer-svc ./cmd/computer-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata docker-cli + +WORKDIR /app + +COPY --from=builder /bin/computer-svc /app/computer-svc + +ENV PORT=3030 + +EXPOSE 3030 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3030/health || exit 1 + +CMD ["/app/computer-svc"] diff --git a/backend/deploy/docker/Dockerfile.discover-svc b/backend/deploy/docker/Dockerfile.discover-svc new file mode 100644 index 0000000..44816b8 --- /dev/null +++ b/backend/deploy/docker/Dockerfile.discover-svc @@ -0,0 +1,31 @@ +# Dockerfile for discover-svc only +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/discover-svc ./cmd/discover-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /bin/discover-svc /app/discover-svc + +ENV PORT=3002 + +EXPOSE 3002 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3002/health || exit 1 + +CMD ["/app/discover-svc"] diff --git a/backend/deploy/docker/Dockerfile.search-svc b/backend/deploy/docker/Dockerfile.search-svc new file mode 100644 index 0000000..c1aca30 --- /dev/null +++ b/backend/deploy/docker/Dockerfile.search-svc @@ -0,0 +1,31 @@ +# Dockerfile for search-svc only +FROM golang:1.22-alpine AS builder + +RUN apk add --no-cache git ca-certificates + +WORKDIR /app + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/search-svc ./cmd/search-svc + +# Runtime stage +FROM alpine:3.19 + +RUN apk add --no-cache ca-certificates tzdata + +WORKDIR /app + +COPY --from=builder /bin/search-svc /app/search-svc + +ENV PORT=3001 + +EXPOSE 3001 + +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:3001/health || exit 1 + +CMD ["/app/search-svc"] diff --git a/backend/deploy/docker/docker-compose.yml b/backend/deploy/docker/docker-compose.yml new file mode 100644 index 0000000..41dc7ae --- /dev/null +++ b/backend/deploy/docker/docker-compose.yml @@ -0,0 +1,382 @@ +version: '3.8' + +services: + api-gateway: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=api-gateway + - PORT=3015 + - CHAT_SVC_URL=http://chat-svc:3005 + - MASTER_AGENTS_SVC_URL=http://agent-svc:3018 + - SEARCH_SVC_URL=http://search-svc:3001 + - LLM_SVC_URL=http://llm-svc:3020 + - SCRAPER_SVC_URL=http://scraper-svc:3021 + - THREAD_SVC_URL=http://thread-svc:3027 + - COMPUTER_SVC_URL=http://computer-svc:3030 + - DISCOVER_SVC_URL=http://discover-svc:3002 + - FINANCE_HEATMAP_SVC_URL=http://finance-heatmap-svc:3033 + - LEARNING_SVC_URL=http://learning-svc:3034 + ports: + - "3015:3015" + depends_on: + - chat-svc + - agent-svc + - thread-svc + - computer-svc + networks: + - gooseek + + chat-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.chat-svc + environment: + - SERVICE=chat-svc + - PORT=3005 + - MASTER_AGENTS_SVC_URL=http://agent-svc:3018 + - DISCOVER_SVC_URL=http://discover-svc:3002 + ports: + - "3005:3005" + depends_on: + - agent-svc + - discover-svc + networks: + - gooseek + + agent-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.agent-svc + environment: + - SERVICE=agent-svc + - PORT=3018 + - SEARXNG_URL=http://searxng:8080 + - DISCOVER_SVC_URL=http://discover-svc:3002 + - CRAWL4AI_URL=http://crawl4ai:11235 + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - GEMINI_API_KEY=${GEMINI_API_KEY} + - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} + - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} + - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} + ports: + - "3018:3018" + depends_on: + - search-svc + - discover-svc + networks: + - gooseek + + search-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.search-svc + environment: + - SERVICE=search-svc + - PORT=3001 + - SEARXNG_URL=http://searxng:8080 + ports: + - "3001:3001" + depends_on: + - searxng + networks: + - gooseek + + llm-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=llm-svc + - PORT=3020 + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - GEMINI_API_KEY=${GEMINI_API_KEY} + ports: + - "3020:3020" + networks: + - gooseek + + scraper-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=scraper-svc + - PORT=3021 + - CRAWL4AI_URL=http://crawl4ai:11235 + ports: + - "3021:3021" + networks: + - gooseek + + discover-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.discover-svc + environment: + - SERVICE=discover-svc + - PORT=3002 + - SEARXNG_URL=http://searxng:8080 + - REDIS_URL=redis://redis:6379 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + ports: + - "3002:3002" + depends_on: + - searxng + - postgres + - redis + networks: + - gooseek + + collection-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=collection-svc + - PORT=3025 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - JWT_SECRET=${JWT_SECRET} + - AUTH_SVC_URL=${AUTH_SVC_URL} + ports: + - "3025:3025" + depends_on: + - postgres + networks: + - gooseek + + file-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=file-svc + - PORT=3026 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - JWT_SECRET=${JWT_SECRET} + - FILE_STORAGE_PATH=/data/files + ports: + - "3026:3026" + volumes: + - file-storage:/data/files + depends_on: + - postgres + networks: + - gooseek + + thread-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=thread-svc + - PORT=3027 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - OPENAI_API_KEY=${OPENAI_API_KEY} + - JWT_SECRET=${JWT_SECRET} + - AUTH_SVC_URL=${AUTH_SVC_URL} + ports: + - "3027:3027" + depends_on: + - postgres + networks: + - gooseek + + computer-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=computer-svc + - PORT=3030 + - COMPUTER_SVC_PORT=3030 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - REDIS_URL=redis://redis:6379 + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - GEMINI_API_KEY=${GEMINI_API_KEY} + - TELEGRAM_BOT_TOKEN=${TELEGRAM_BOT_TOKEN} + - SANDBOX_IMAGE=python:3.11-slim + - BROWSER_SVC_URL=http://browser-svc:3050 + ports: + - "3030:3030" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - checkpoints:/data/checkpoints + depends_on: + - postgres + - redis + - browser-svc + networks: + - gooseek + + browser-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=browser-svc + - PORT=3050 + - BROWSER_SVC_PORT=3050 + ports: + - "3050:3050" + volumes: + - screenshots:/tmp/gooseek-screenshots + - recordings:/tmp/gooseek-recordings + networks: + - gooseek + + labs-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=labs-svc + - PORT=3031 + - LABS_SVC_PORT=3031 + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + ports: + - "3031:3031" + networks: + - gooseek + + podcast-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=podcast-svc + - PORT=3032 + - PODCAST_SVC_PORT=3032 + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - ELEVENLABS_API_KEY=${ELEVENLABS_API_KEY} + ports: + - "3032:3032" + volumes: + - podcasts:/data/podcasts + networks: + - gooseek + + finance-heatmap-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=finance-heatmap-svc + - PORT=3033 + - REDIS_URL=redis://redis:6379 + ports: + - "3033:3033" + depends_on: + - redis + networks: + - gooseek + + learning-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=learning-svc + - PORT=3034 + - LEARNING_SVC_PORT=3034 + - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} + - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} + - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} + - DEFAULT_LLM_MODEL=${DEFAULT_LLM_MODEL:-gpt-4o-mini} + - OPENAI_API_KEY=${OPENAI_API_KEY} + - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + ports: + - "3034:3034" + depends_on: + - postgres + networks: + - gooseek + + webui: + build: + context: ../../webui + dockerfile: Dockerfile + environment: + - NODE_ENV=production + - API_URL=http://api-gateway:3015 + - NEXT_PUBLIC_API_URL= + ports: + - "3000:3000" + depends_on: + - api-gateway + networks: + - gooseek + restart: unless-stopped + + postgres: + image: postgres:16-alpine + environment: + - POSTGRES_USER=gooseek + - POSTGRES_PASSWORD=gooseek + - POSTGRES_DB=gooseek + volumes: + - postgres-data:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U gooseek"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - gooseek + + redis: + image: redis:7-alpine + command: redis-server --appendonly yes --maxmemory 256mb --maxmemory-policy allkeys-lru + volumes: + - redis-data:/data + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - gooseek + + searxng: + image: searxng/searxng:latest + volumes: + - ../../../deploy/docker/searxng:/etc/searxng:ro + environment: + - SEARXNG_BASE_URL=http://localhost:8080 + ports: + - "8080:8080" + networks: + - gooseek + + crawl4ai: + image: unclecode/crawl4ai:latest + ports: + - "11235:11235" + networks: + - gooseek + +networks: + gooseek: + driver: bridge + +volumes: + postgres-data: + redis-data: + file-storage: + checkpoints: + screenshots: + recordings: + podcasts: diff --git a/backend/deploy/k8s/agent-svc.yaml b/backend/deploy/k8s/agent-svc.yaml new file mode 100644 index 0000000..ad5d282 --- /dev/null +++ b/backend/deploy/k8s/agent-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: agent-svc + namespace: gooseek + labels: + app: agent-svc + app.kubernetes.io/name: agent-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 3 + selector: + matchLabels: + app: agent-svc + template: + metadata: + labels: + app: agent-svc + spec: + containers: + - name: agent-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "agent-svc" + - name: PORT + value: "3018" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3018 + name: http + livenessProbe: + httpGet: + path: /health + port: 3018 + initialDelaySeconds: 15 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /ready + port: 3018 + initialDelaySeconds: 10 + periodSeconds: 15 + resources: + requests: + cpu: 500m + memory: 512Mi + limits: + cpu: 2000m + memory: 1Gi +--- +apiVersion: v1 +kind: Service +metadata: + name: agent-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: agent-svc + ports: + - port: 3018 + targetPort: 3018 + name: http diff --git a/backend/deploy/k8s/api-gateway.yaml b/backend/deploy/k8s/api-gateway.yaml new file mode 100644 index 0000000..6ded4f4 --- /dev/null +++ b/backend/deploy/k8s/api-gateway.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: api-gateway + namespace: gooseek + labels: + app: api-gateway + app.kubernetes.io/name: api-gateway + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: api-gateway + template: + metadata: + labels: + app: api-gateway + spec: + containers: + - name: api-gateway + image: gooseek/backend:latest + env: + - name: SERVICE + value: "api-gateway" + - name: PORT + value: "3015" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3015 + name: http + livenessProbe: + httpGet: + path: /health + port: 3015 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3015 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: api-gateway + namespace: gooseek +spec: + type: ClusterIP + selector: + app: api-gateway + ports: + - port: 3015 + targetPort: 3015 + name: http diff --git a/backend/deploy/k8s/chat-svc.yaml b/backend/deploy/k8s/chat-svc.yaml new file mode 100644 index 0000000..fbf75fa --- /dev/null +++ b/backend/deploy/k8s/chat-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: chat-svc + namespace: gooseek + labels: + app: chat-svc + app.kubernetes.io/name: chat-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: chat-svc + template: + metadata: + labels: + app: chat-svc + spec: + containers: + - name: chat-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "chat-svc" + - name: PORT + value: "3005" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3005 + name: http + livenessProbe: + httpGet: + path: /health + port: 3005 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3005 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: chat-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: chat-svc + ports: + - port: 3005 + targetPort: 3005 + name: http diff --git a/backend/deploy/k8s/collection-svc.yaml b/backend/deploy/k8s/collection-svc.yaml new file mode 100644 index 0000000..90a3d95 --- /dev/null +++ b/backend/deploy/k8s/collection-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: collection-svc + namespace: gooseek + labels: + app: collection-svc + app.kubernetes.io/name: collection-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: collection-svc + template: + metadata: + labels: + app: collection-svc + spec: + containers: + - name: collection-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "collection-svc" + - name: PORT + value: "3025" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3025 + name: http + livenessProbe: + httpGet: + path: /health + port: 3025 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3025 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: collection-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: collection-svc + ports: + - port: 3025 + targetPort: 3025 + name: http diff --git a/backend/deploy/k8s/computer-svc.yaml b/backend/deploy/k8s/computer-svc.yaml new file mode 100644 index 0000000..305523b --- /dev/null +++ b/backend/deploy/k8s/computer-svc.yaml @@ -0,0 +1,137 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: computer-svc + namespace: gooseek + labels: + app: computer-svc +spec: + replicas: 1 + selector: + matchLabels: + app: computer-svc + template: + metadata: + labels: + app: computer-svc + spec: + containers: + - name: computer-svc + image: gooseek/backend:latest + command: ["/app/computer-svc"] + ports: + - containerPort: 3030 + env: + - name: COMPUTER_SVC_PORT + value: "3030" + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: DATABASE_URL + - name: REDIS_URL + valueFrom: + configMapKeyRef: + name: gooseek-config + key: REDIS_URL + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: OPENAI_API_KEY + - name: ANTHROPIC_API_KEY + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: ANTHROPIC_API_KEY + optional: true + - name: GEMINI_API_KEY + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: GEMINI_API_KEY + optional: true + - name: TELEGRAM_BOT_TOKEN + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: TELEGRAM_BOT_TOKEN + optional: true + - name: SMTP_HOST + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: SMTP_HOST + optional: true + - name: SMTP_USERNAME + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: SMTP_USERNAME + optional: true + - name: SMTP_PASSWORD + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: SMTP_PASSWORD + optional: true + - name: S3_ENDPOINT + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: S3_ENDPOINT + optional: true + - name: S3_ACCESS_KEY + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: S3_ACCESS_KEY + optional: true + - name: S3_SECRET_KEY + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: S3_SECRET_KEY + optional: true + - name: SANDBOX_IMAGE + value: "gooseek/sandbox:latest" + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "1Gi" + cpu: "1000m" + livenessProbe: + httpGet: + path: /health + port: 3030 + initialDelaySeconds: 10 + periodSeconds: 30 + readinessProbe: + httpGet: + path: /health + port: 3030 + initialDelaySeconds: 5 + periodSeconds: 10 + volumeMounts: + - name: docker-socket + mountPath: /var/run/docker.sock + volumes: + - name: docker-socket + hostPath: + path: /var/run/docker.sock + type: Socket +--- +apiVersion: v1 +kind: Service +metadata: + name: computer-svc + namespace: gooseek +spec: + selector: + app: computer-svc + ports: + - port: 3030 + targetPort: 3030 + type: ClusterIP diff --git a/backend/deploy/k8s/configmap.yaml b/backend/deploy/k8s/configmap.yaml new file mode 100644 index 0000000..da29e6d --- /dev/null +++ b/backend/deploy/k8s/configmap.yaml @@ -0,0 +1,32 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: gooseek-config + namespace: gooseek +data: + SEARXNG_URL: "http://searxng:8080" + CRAWL4AI_URL: "http://crawl4ai:11235" + REDIS_URL: "redis://redis:6379" + DATABASE_URL: "postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable" + DISCOVER_SVC_URL: "http://discover-svc:3002" + MASTER_AGENTS_SVC_URL: "http://agent-svc:3018" + SEARCH_SVC_URL: "http://search-svc:3001" + LLM_SVC_URL: "http://llm-svc:3020" + SCRAPER_SVC_URL: "http://scraper-svc:3021" + COLLECTION_SVC_URL: "http://collection-svc:3025" + FILE_SVC_URL: "http://file-svc:3026" + THREAD_SVC_URL: "http://thread-svc:3027" +--- +apiVersion: v1 +kind: Secret +metadata: + name: gooseek-secrets + namespace: gooseek +type: Opaque +stringData: + OPENAI_API_KEY: "${OPENAI_API_KEY}" + ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY}" + GEMINI_API_KEY: "${GEMINI_API_KEY}" + JWT_SECRET: "${JWT_SECRET}" + POSTGRES_USER: "gooseek" + POSTGRES_PASSWORD: "gooseek" diff --git a/backend/deploy/k8s/deploy.sh b/backend/deploy/k8s/deploy.sh new file mode 100755 index 0000000..e41c781 --- /dev/null +++ b/backend/deploy/k8s/deploy.sh @@ -0,0 +1,56 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +BACKEND_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" + +echo "=== GooSeek Go Backend K8s Deployment ===" +echo "Backend dir: $BACKEND_DIR" + +# Check kubectl +if ! command -v kubectl &> /dev/null; then + echo "Error: kubectl not found" + exit 1 +fi + +# Build Docker image +echo "" +echo "=== Building Docker image ===" +cd "$BACKEND_DIR" +docker build -f deploy/docker/Dockerfile.all -t gooseek/backend:latest . + +# Load to k3s (if using k3s) +if command -v k3s &> /dev/null; then + echo "" + echo "=== Loading image to k3s ===" + docker save gooseek/backend:latest | sudo k3s ctr images import - +fi + +# Apply kustomization +echo "" +echo "=== Applying K8s manifests ===" +cd "$SCRIPT_DIR" +kubectl apply -k . + +# Wait for rollout +echo "" +echo "=== Waiting for deployments ===" +kubectl -n gooseek rollout status deployment/api-gateway --timeout=120s || true +kubectl -n gooseek rollout status deployment/chat-svc --timeout=120s || true +kubectl -n gooseek rollout status deployment/agent-svc --timeout=120s || true +kubectl -n gooseek rollout status deployment/discover-svc --timeout=120s || true +kubectl -n gooseek rollout status deployment/search-svc --timeout=120s || true +kubectl -n gooseek rollout status deployment/redis --timeout=60s || true + +# Show status +echo "" +echo "=== Deployment Status ===" +kubectl -n gooseek get pods +echo "" +kubectl -n gooseek get svc +echo "" +kubectl -n gooseek get ingress + +echo "" +echo "=== Done ===" +echo "API Gateway: http://localhost:3015 (NodePort) or via Ingress" diff --git a/backend/deploy/k8s/discover-svc.yaml b/backend/deploy/k8s/discover-svc.yaml new file mode 100644 index 0000000..904ab10 --- /dev/null +++ b/backend/deploy/k8s/discover-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: discover-svc + namespace: gooseek + labels: + app: discover-svc + app.kubernetes.io/name: discover-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: discover-svc + template: + metadata: + labels: + app: discover-svc + spec: + containers: + - name: discover-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "discover-svc" + - name: PORT + value: "3002" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3002 + name: http + livenessProbe: + httpGet: + path: /health + port: 3002 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3002 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 500m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: discover-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: discover-svc + ports: + - port: 3002 + targetPort: 3002 + name: http diff --git a/backend/deploy/k8s/file-svc.yaml b/backend/deploy/k8s/file-svc.yaml new file mode 100644 index 0000000..ab86cb1 --- /dev/null +++ b/backend/deploy/k8s/file-svc.yaml @@ -0,0 +1,90 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: file-svc + namespace: gooseek + labels: + app: file-svc + app.kubernetes.io/name: file-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: file-svc + template: + metadata: + labels: + app: file-svc + spec: + containers: + - name: file-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "file-svc" + - name: PORT + value: "3026" + - name: FILE_STORAGE_PATH + value: "/data/files" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3026 + name: http + volumeMounts: + - name: file-storage + mountPath: /data/files + livenessProbe: + httpGet: + path: /health + port: 3026 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3026 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi + volumes: + - name: file-storage + persistentVolumeClaim: + claimName: file-storage-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: file-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: file-svc + ports: + - port: 3026 + targetPort: 3026 + name: http +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: file-storage-pvc + namespace: gooseek +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: 10Gi + storageClassName: local-path diff --git a/backend/deploy/k8s/ingress.yaml b/backend/deploy/k8s/ingress.yaml new file mode 100644 index 0000000..e0be725 --- /dev/null +++ b/backend/deploy/k8s/ingress.yaml @@ -0,0 +1,51 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: gooseek-ingress + namespace: gooseek + annotations: + nginx.ingress.kubernetes.io/proxy-body-size: "50m" + nginx.ingress.kubernetes.io/proxy-read-timeout: "300" + nginx.ingress.kubernetes.io/proxy-send-timeout: "300" + nginx.ingress.kubernetes.io/proxy-buffering: "off" + nginx.ingress.kubernetes.io/ssl-redirect: "true" + cert-manager.io/cluster-issuer: "letsencrypt-prod" +spec: + ingressClassName: nginx + tls: + - hosts: + - api.gooseek.ru + secretName: gooseek-tls + rules: + - host: api.gooseek.ru + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: api-gateway + port: + number: 3015 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: gooseek-ingress-local + namespace: gooseek + annotations: + nginx.ingress.kubernetes.io/proxy-body-size: "50m" + nginx.ingress.kubernetes.io/proxy-read-timeout: "300" +spec: + ingressClassName: nginx + rules: + - host: localhost + http: + paths: + - path: /api + pathType: Prefix + backend: + service: + name: api-gateway + port: + number: 3015 diff --git a/backend/deploy/k8s/kustomization.yaml b/backend/deploy/k8s/kustomization.yaml new file mode 100644 index 0000000..d6866cf --- /dev/null +++ b/backend/deploy/k8s/kustomization.yaml @@ -0,0 +1,30 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +namespace: gooseek + +resources: + - namespace.yaml + - configmap.yaml + - postgres.yaml + - redis.yaml + - api-gateway.yaml + - chat-svc.yaml + - agent-svc.yaml + - search-svc.yaml + - discover-svc.yaml + - llm-svc.yaml + - scraper-svc.yaml + - collection-svc.yaml + - file-svc.yaml + - thread-svc.yaml + - computer-svc.yaml + - ingress.yaml + +commonLabels: + app.kubernetes.io/part-of: gooseek + app.kubernetes.io/managed-by: kustomize + +images: + - name: gooseek/backend + newTag: latest diff --git a/backend/deploy/k8s/llm-svc.yaml b/backend/deploy/k8s/llm-svc.yaml new file mode 100644 index 0000000..49ca6f7 --- /dev/null +++ b/backend/deploy/k8s/llm-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: llm-svc + namespace: gooseek + labels: + app: llm-svc + app.kubernetes.io/name: llm-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: llm-svc + template: + metadata: + labels: + app: llm-svc + spec: + containers: + - name: llm-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "llm-svc" + - name: PORT + value: "3020" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3020 + name: http + livenessProbe: + httpGet: + path: /health + port: 3020 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3020 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: llm-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: llm-svc + ports: + - port: 3020 + targetPort: 3020 + name: http diff --git a/backend/deploy/k8s/namespace.yaml b/backend/deploy/k8s/namespace.yaml new file mode 100644 index 0000000..698e6dc --- /dev/null +++ b/backend/deploy/k8s/namespace.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: gooseek + labels: + app.kubernetes.io/name: gooseek + app.kubernetes.io/managed-by: kubectl diff --git a/backend/deploy/k8s/postgres.yaml b/backend/deploy/k8s/postgres.yaml new file mode 100644 index 0000000..5af6d25 --- /dev/null +++ b/backend/deploy/k8s/postgres.yaml @@ -0,0 +1,86 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: postgres + namespace: gooseek + labels: + app: postgres + app.kubernetes.io/name: postgres + app.kubernetes.io/part-of: gooseek +spec: + serviceName: postgres + replicas: 1 + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + spec: + containers: + - name: postgres + image: postgres:16-alpine + env: + - name: POSTGRES_USER + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: POSTGRES_USER + optional: true + - name: POSTGRES_PASSWORD + valueFrom: + secretKeyRef: + name: gooseek-secrets + key: POSTGRES_PASSWORD + optional: true + - name: POSTGRES_DB + value: "gooseek" + - name: PGDATA + value: "/var/lib/postgresql/data/pgdata" + ports: + - containerPort: 5432 + name: postgres + volumeMounts: + - name: postgres-data + mountPath: /var/lib/postgresql/data + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 1000m + memory: 1Gi + livenessProbe: + exec: + command: ["pg_isready", "-U", "gooseek"] + initialDelaySeconds: 30 + periodSeconds: 10 + readinessProbe: + exec: + command: ["pg_isready", "-U", "gooseek"] + initialDelaySeconds: 5 + periodSeconds: 5 + volumeClaimTemplates: + - metadata: + name: postgres-data + spec: + accessModes: ["ReadWriteOnce"] + storageClassName: local-path + resources: + requests: + storage: 10Gi +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + namespace: gooseek +spec: + type: ClusterIP + selector: + app: postgres + ports: + - port: 5432 + targetPort: 5432 + name: postgres diff --git a/backend/deploy/k8s/redis.yaml b/backend/deploy/k8s/redis.yaml new file mode 100644 index 0000000..e82e731 --- /dev/null +++ b/backend/deploy/k8s/redis.yaml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: redis + namespace: gooseek + labels: + app: redis + app.kubernetes.io/name: redis + app.kubernetes.io/part-of: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: redis + template: + metadata: + labels: + app: redis + spec: + containers: + - name: redis + image: redis:7-alpine + command: ["redis-server", "--appendonly", "yes", "--maxmemory", "256mb", "--maxmemory-policy", "allkeys-lru"] + ports: + - containerPort: 6379 + name: redis + volumeMounts: + - name: redis-data + mountPath: /data + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + livenessProbe: + exec: + command: ["redis-cli", "ping"] + initialDelaySeconds: 5 + periodSeconds: 10 + readinessProbe: + exec: + command: ["redis-cli", "ping"] + initialDelaySeconds: 3 + periodSeconds: 5 + volumes: + - name: redis-data + emptyDir: {} +--- +apiVersion: v1 +kind: Service +metadata: + name: redis + namespace: gooseek +spec: + type: ClusterIP + selector: + app: redis + ports: + - port: 6379 + targetPort: 6379 + name: redis diff --git a/backend/deploy/k8s/scraper-svc.yaml b/backend/deploy/k8s/scraper-svc.yaml new file mode 100644 index 0000000..cbf24bc --- /dev/null +++ b/backend/deploy/k8s/scraper-svc.yaml @@ -0,0 +1,66 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: scraper-svc + namespace: gooseek + labels: + app: scraper-svc + app.kubernetes.io/name: scraper-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: scraper-svc + template: + metadata: + labels: + app: scraper-svc + spec: + containers: + - name: scraper-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "scraper-svc" + - name: PORT + value: "3021" + envFrom: + - configMapRef: + name: gooseek-config + ports: + - containerPort: 3021 + name: http + livenessProbe: + httpGet: + path: /health + port: 3021 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3021 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: scraper-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: scraper-svc + ports: + - port: 3021 + targetPort: 3021 + name: http diff --git a/backend/deploy/k8s/search-svc.yaml b/backend/deploy/k8s/search-svc.yaml new file mode 100644 index 0000000..1c25471 --- /dev/null +++ b/backend/deploy/k8s/search-svc.yaml @@ -0,0 +1,66 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: search-svc + namespace: gooseek + labels: + app: search-svc + app.kubernetes.io/name: search-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: search-svc + template: + metadata: + labels: + app: search-svc + spec: + containers: + - name: search-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "search-svc" + - name: PORT + value: "3001" + envFrom: + - configMapRef: + name: gooseek-config + ports: + - containerPort: 3001 + name: http + livenessProbe: + httpGet: + path: /health + port: 3001 + initialDelaySeconds: 10 + periodSeconds: 15 + readinessProbe: + httpGet: + path: /ready + port: 3001 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: search-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: search-svc + ports: + - port: 3001 + targetPort: 3001 + name: http diff --git a/backend/deploy/k8s/thread-svc.yaml b/backend/deploy/k8s/thread-svc.yaml new file mode 100644 index 0000000..fb469f5 --- /dev/null +++ b/backend/deploy/k8s/thread-svc.yaml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: thread-svc + namespace: gooseek + labels: + app: thread-svc +spec: + replicas: 2 + selector: + matchLabels: + app: thread-svc + template: + metadata: + labels: + app: thread-svc + spec: + containers: + - name: thread-svc + image: gooseek/backend:latest + command: ["/app/thread-svc"] + ports: + - containerPort: 3027 + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + env: + - name: THREAD_SVC_PORT + value: "3027" + resources: + requests: + memory: "128Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + readinessProbe: + httpGet: + path: /ready + port: 3027 + initialDelaySeconds: 5 + periodSeconds: 10 + livenessProbe: + httpGet: + path: /health + port: 3027 + initialDelaySeconds: 10 + periodSeconds: 30 +--- +apiVersion: v1 +kind: Service +metadata: + name: thread-svc + namespace: gooseek +spec: + selector: + app: thread-svc + ports: + - port: 3027 + targetPort: 3027 + type: ClusterIP diff --git a/backend/go.mod b/backend/go.mod new file mode 100644 index 0000000..05e29a4 --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,43 @@ +module github.com/gooseek/backend + +go 1.22 + +require ( + github.com/gofiber/fiber/v2 v2.52.0 + github.com/golang-jwt/jwt/v5 v5.2.1 + github.com/google/uuid v1.6.0 + github.com/joho/godotenv v1.5.1 + github.com/ledongthuc/pdf v0.0.0-20240201131950-da5b75280b06 + github.com/lib/pq v1.10.9 + github.com/minio/minio-go/v7 v7.0.70 + github.com/redis/go-redis/v9 v9.4.0 + github.com/robfig/cron/v3 v3.0.1 + github.com/sashabaranov/go-openai v1.20.0 + go.uber.org/zap v1.27.0 + golang.org/x/sync v0.6.0 +) + +require ( + github.com/andybalholm/brotli v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/goccy/go-json v0.10.2 // indirect + github.com/klauspost/compress v1.17.6 // indirect + github.com/klauspost/cpuid/v2 v2.2.6 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/rivo/uniseg v0.4.6 // indirect + github.com/rs/xid v1.5.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.52.0 // indirect + github.com/valyala/tcplisten v1.0.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/crypto v0.21.0 // indirect + golang.org/x/net v0.23.0 // indirect + golang.org/x/sys v0.18.0 // indirect + golang.org/x/text v0.14.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100644 index 0000000..2d15a09 --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,88 @@ +github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= +github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofiber/fiber/v2 v2.52.0 h1:S+qXi7y+/Pgvqq4DrSmREGiFwtB7Bu6+QFLuIHYw/UE= +github.com/gofiber/fiber/v2 v2.52.0/go.mod h1:KEOE+cXMhXG0zHc9d8+E38hoX+ZN7bhOtgeF2oT6jrQ= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= +github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= +github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/ledongthuc/pdf v0.0.0-20240201131950-da5b75280b06 h1:kacRlPN7EN++tVpGUorNGPn/4DnB7/DfTY82AOn6ccU= +github.com/ledongthuc/pdf v0.0.0-20240201131950-da5b75280b06/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= +github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= +github.com/minio/minio-go/v7 v7.0.70 h1:1u9NtMgfK1U42kUxcsl5v0yj6TEOPR497OAQxpJnn2g= +github.com/minio/minio-go/v7 v7.0.70/go.mod h1:4yBA8v80xGA30cfM3fz0DKYMXunWl/AV/6tWEs9ryzo= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/redis/go-redis/v9 v9.4.0 h1:Yzoz33UZw9I/mFhx4MNrB6Fk+XHO1VukNcCa1+lwyKk= +github.com/redis/go-redis/v9 v9.4.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.6 h1:Sovz9sDSwbOz9tgUy8JpT+KgCkPYJEN/oYzlJiYTNLg= +github.com/rivo/uniseg v0.4.6/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= +github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/sashabaranov/go-openai v1.20.0 h1:r9WiwJY6Q2aPDhVyfOSKm83Gs04ogN1yaaBoQOnusS4= +github.com/sashabaranov/go-openai v1.20.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.52.0 h1:wqBQpxH71XW0e2g+Og4dzQM8pk34aFYlA1Ga8db7gU0= +github.com/valyala/fasthttp v1.52.0/go.mod h1:hf5C4QnVMkNXMspnsUlfM3WitlgYflyhHYoKol/szxQ= +github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/backend/internal/agent/classifier.go b/backend/internal/agent/classifier.go new file mode 100644 index 0000000..f180b2f --- /dev/null +++ b/backend/internal/agent/classifier.go @@ -0,0 +1,233 @@ +package agent + +import ( + "context" + "encoding/json" + "regexp" + "strings" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/prompts" +) + +type ClassificationResult struct { + StandaloneFollowUp string `json:"standaloneFollowUp"` + SkipSearch bool `json:"skipSearch"` + Topics []string `json:"topics,omitempty"` + QueryType string `json:"queryType,omitempty"` + Engines []string `json:"engines,omitempty"` +} + +func classify(ctx context.Context, client llm.Client, query string, history []llm.Message, locale, detectedLang string) (*ClassificationResult, error) { + prompt := prompts.GetClassifierPrompt(locale, detectedLang) + + historyStr := formatHistory(history) + userContent := "\n" + historyStr + "\nUser: " + query + "\n" + + messages := []llm.Message{ + {Role: llm.RoleSystem, Content: prompt}, + {Role: llm.RoleUser, Content: userContent}, + } + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{MaxTokens: 1024}, + }) + if err != nil { + return nil, err + } + + jsonMatch := regexp.MustCompile(`\{[\s\S]*\}`).FindString(response) + if jsonMatch == "" { + return &ClassificationResult{ + StandaloneFollowUp: query, + SkipSearch: false, + }, nil + } + + var result ClassificationResult + if err := json.Unmarshal([]byte(jsonMatch), &result); err != nil { + return &ClassificationResult{ + StandaloneFollowUp: query, + SkipSearch: false, + }, nil + } + + if result.StandaloneFollowUp == "" { + result.StandaloneFollowUp = query + } + + return &result, nil +} + +func fastClassify(query string, history []llm.Message) *ClassificationResult { + queryLower := strings.ToLower(query) + + skipPatterns := []string{ + "привет", "как дела", "спасибо", "пока", + "hello", "hi", "thanks", "bye", + "объясни", "расскажи подробнее", "что ты имеешь", + } + + skipSearch := false + for _, p := range skipPatterns { + if strings.Contains(queryLower, p) && len(query) < 50 { + skipSearch = true + break + } + } + + standalone := query + + if len(history) > 0 { + pronouns := []string{ + "это", "этот", "эта", "эти", + "он", "она", "оно", "они", + "it", "this", "that", "they", "them", + } + + hasPronouns := false + for _, p := range pronouns { + if strings.Contains(queryLower, p+" ") || strings.HasPrefix(queryLower, p+" ") { + hasPronouns = true + break + } + } + + if hasPronouns && len(history) >= 2 { + lastAssistant := "" + for i := len(history) - 1; i >= 0; i-- { + if history[i].Role == llm.RoleAssistant { + lastAssistant = history[i].Content + break + } + } + + if lastAssistant != "" { + topics := extractTopics(lastAssistant) + if len(topics) > 0 { + standalone = query + " (контекст: " + strings.Join(topics, ", ") + ")" + } + } + } + } + + engines := detectEngines(queryLower) + + return &ClassificationResult{ + StandaloneFollowUp: standalone, + SkipSearch: skipSearch, + Engines: engines, + } +} + +func generateSearchQueries(query string) []string { + queries := []string{query} + + if len(query) > 100 { + words := strings.Fields(query) + if len(words) > 5 { + queries = append(queries, strings.Join(words[:5], " ")) + } + } + + keywordPatterns := []string{ + "как", "что такое", "где", "когда", "почему", "кто", + "how", "what is", "where", "when", "why", "who", + } + + for _, p := range keywordPatterns { + if strings.HasPrefix(strings.ToLower(query), p) { + withoutPrefix := strings.TrimPrefix(strings.ToLower(query), p) + withoutPrefix = strings.TrimSpace(withoutPrefix) + if len(withoutPrefix) > 10 { + queries = append(queries, withoutPrefix) + } + break + } + } + + if len(queries) > 3 { + queries = queries[:3] + } + + return queries +} + +func detectEngines(query string) []string { + engines := []string{"google", "duckduckgo"} + + if strings.Contains(query, "новости") || strings.Contains(query, "news") { + engines = append(engines, "google_news") + } + + if strings.Contains(query, "видео") || strings.Contains(query, "video") { + engines = append(engines, "youtube") + } + + if strings.Contains(query, "товар") || strings.Contains(query, "купить") || + strings.Contains(query, "цена") || strings.Contains(query, "price") { + engines = append(engines, "google_shopping") + } + + return engines +} + +func extractTopics(text string) []string { + words := strings.Fields(text) + if len(words) > 50 { + words = words[:50] + } + + topics := make([]string, 0) + for _, w := range words { + if len(w) > 5 && len(w) < 20 { + r := []rune(w) + if len(r) > 0 && ((r[0] >= 'A' && r[0] <= 'Z') || (r[0] >= 'А' && r[0] <= 'Я')) { + topics = append(topics, w) + if len(topics) >= 3 { + break + } + } + } + } + + return topics +} + +func formatHistory(messages []llm.Message) string { + var sb strings.Builder + for _, m := range messages { + role := "User" + if m.Role == llm.RoleAssistant { + role = "Assistant" + } + sb.WriteString(role) + sb.WriteString(": ") + content := m.Content + if len(content) > 500 { + content = content[:500] + "..." + } + sb.WriteString(content) + sb.WriteString("\n") + } + return sb.String() +} + +func detectLanguage(text string) string { + cyrillicCount := 0 + latinCount := 0 + + for _, r := range text { + if r >= 'а' && r <= 'я' || r >= 'А' && r <= 'Я' { + cyrillicCount++ + } else if r >= 'a' && r <= 'z' || r >= 'A' && r <= 'Z' { + latinCount++ + } + } + + if cyrillicCount > latinCount { + return "ru" + } + return "en" +} diff --git a/backend/internal/agent/deep_research.go b/backend/internal/agent/deep_research.go new file mode 100644 index 0000000..84f8307 --- /dev/null +++ b/backend/internal/agent/deep_research.go @@ -0,0 +1,543 @@ +package agent + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/internal/session" + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" + "golang.org/x/sync/errgroup" +) + +type DeepResearchConfig struct { + LLM llm.Client + SearchClient *search.SearXNGClient + FocusMode FocusMode + Locale string + MaxSearchQueries int + MaxSources int + MaxIterations int + Timeout time.Duration +} + +type DeepResearchResult struct { + FinalReport string + Sources []types.Chunk + SubQueries []SubQuery + Insights []string + FollowUpQueries []string + TotalSearches int + TotalSources int + Duration time.Duration +} + +type SubQuery struct { + Query string + Purpose string + Status string + Results []types.Chunk + Insights []string +} + +type DeepResearcher struct { + cfg DeepResearchConfig + sess *session.Session + mu sync.Mutex + allSources []types.Chunk + seenURLs map[string]bool + subQueries []SubQuery + insights []string + searchCount int + startTime time.Time +} + +func NewDeepResearcher(cfg DeepResearchConfig, sess *session.Session) *DeepResearcher { + if cfg.MaxSearchQueries == 0 { + cfg.MaxSearchQueries = 30 + } + if cfg.MaxSources == 0 { + cfg.MaxSources = 100 + } + if cfg.MaxIterations == 0 { + cfg.MaxIterations = 5 + } + if cfg.Timeout == 0 { + cfg.Timeout = 5 * time.Minute + } + + return &DeepResearcher{ + cfg: cfg, + sess: sess, + seenURLs: make(map[string]bool), + allSources: make([]types.Chunk, 0), + subQueries: make([]SubQuery, 0), + insights: make([]string, 0), + startTime: time.Now(), + } +} + +func (dr *DeepResearcher) Research(ctx context.Context, query string) (*DeepResearchResult, error) { + ctx, cancel := context.WithTimeout(ctx, dr.cfg.Timeout) + defer cancel() + + researchBlockID := uuid.New().String() + dr.sess.EmitBlock(&types.Block{ + ID: researchBlockID, + Type: types.BlockTypeResearch, + Data: types.ResearchData{ + SubSteps: []types.ResearchSubStep{}, + }, + }) + + subQueries, err := dr.planResearch(ctx, query) + if err != nil { + return nil, fmt.Errorf("planning failed: %w", err) + } + + dr.updateResearchStatus(researchBlockID, "researching", fmt.Sprintf("Executing %d sub-queries", len(subQueries))) + + for i := 0; i < dr.cfg.MaxIterations && dr.searchCount < dr.cfg.MaxSearchQueries; i++ { + if err := dr.executeIteration(ctx, i, researchBlockID); err != nil { + if ctx.Err() != nil { + break + } + } + + if dr.hasEnoughData() { + break + } + + newQueries, err := dr.generateFollowUpQueries(ctx, query) + if err != nil || len(newQueries) == 0 { + break + } + + for _, q := range newQueries { + dr.mu.Lock() + dr.subQueries = append(dr.subQueries, SubQuery{ + Query: q.Query, + Purpose: q.Purpose, + Status: "pending", + }) + dr.mu.Unlock() + } + } + + dr.updateResearchStatus(researchBlockID, "synthesizing", "Analyzing findings") + + insights, err := dr.synthesizeInsights(ctx, query) + if err != nil { + insights = dr.insights + } + + dr.updateResearchStatus(researchBlockID, "writing", "Generating report") + + report, err := dr.generateFinalReport(ctx, query, insights) + if err != nil { + return nil, fmt.Errorf("report generation failed: %w", err) + } + + followUp, _ := dr.generateFollowUpSuggestions(ctx, query, report) + + dr.updateResearchStatus(researchBlockID, "complete", "Research complete") + + return &DeepResearchResult{ + FinalReport: report, + Sources: dr.allSources, + SubQueries: dr.subQueries, + Insights: insights, + FollowUpQueries: followUp, + TotalSearches: dr.searchCount, + TotalSources: len(dr.allSources), + Duration: time.Since(dr.startTime), + }, nil +} + +func (dr *DeepResearcher) planResearch(ctx context.Context, query string) ([]SubQuery, error) { + prompt := fmt.Sprintf(`Analyze this research query and break it into 3-5 sub-queries for comprehensive research. + +Query: %s + +For each sub-query, specify: +1. The search query (optimized for search engines) +2. The purpose (what aspect it addresses) + +Respond in this exact format: +QUERY: [search query] +PURPOSE: [what this addresses] + +QUERY: [search query] +PURPOSE: [what this addresses] + +... + +Be specific and actionable. Focus on different aspects: definitions, current state, history, expert opinions, data/statistics, controversies, future trends.`, query) + + result, err := dr.cfg.LLM.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return dr.generateDefaultSubQueries(query), nil + } + + subQueries := dr.parseSubQueries(result) + if len(subQueries) == 0 { + subQueries = dr.generateDefaultSubQueries(query) + } + + dr.mu.Lock() + dr.subQueries = subQueries + dr.mu.Unlock() + + return subQueries, nil +} + +func (dr *DeepResearcher) parseSubQueries(text string) []SubQuery { + var queries []SubQuery + lines := strings.Split(text, "\n") + + var currentQuery, currentPurpose string + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "QUERY:") { + if currentQuery != "" && currentPurpose != "" { + queries = append(queries, SubQuery{ + Query: currentQuery, + Purpose: currentPurpose, + Status: "pending", + }) + } + currentQuery = strings.TrimSpace(strings.TrimPrefix(line, "QUERY:")) + currentPurpose = "" + } else if strings.HasPrefix(line, "PURPOSE:") { + currentPurpose = strings.TrimSpace(strings.TrimPrefix(line, "PURPOSE:")) + } + } + + if currentQuery != "" && currentPurpose != "" { + queries = append(queries, SubQuery{ + Query: currentQuery, + Purpose: currentPurpose, + Status: "pending", + }) + } + + return queries +} + +func (dr *DeepResearcher) generateDefaultSubQueries(query string) []SubQuery { + return []SubQuery{ + {Query: query, Purpose: "Main query", Status: "pending"}, + {Query: query + " definition explained", Purpose: "Definitions and basics", Status: "pending"}, + {Query: query + " latest news 2026", Purpose: "Current developments", Status: "pending"}, + {Query: query + " expert analysis", Purpose: "Expert opinions", Status: "pending"}, + {Query: query + " statistics data research", Purpose: "Data and evidence", Status: "pending"}, + } +} + +func (dr *DeepResearcher) executeIteration(ctx context.Context, iteration int, blockID string) error { + dr.mu.Lock() + pendingQueries := make([]int, 0) + for i, sq := range dr.subQueries { + if sq.Status == "pending" { + pendingQueries = append(pendingQueries, i) + } + } + dr.mu.Unlock() + + if len(pendingQueries) == 0 { + return nil + } + + batchSize := 3 + if len(pendingQueries) < batchSize { + batchSize = len(pendingQueries) + } + + g, gctx := errgroup.WithContext(ctx) + g.SetLimit(batchSize) + + for _, idx := range pendingQueries[:batchSize] { + idx := idx + g.Go(func() error { + return dr.executeSubQuery(gctx, idx, blockID) + }) + } + + return g.Wait() +} + +func (dr *DeepResearcher) executeSubQuery(ctx context.Context, idx int, blockID string) error { + dr.mu.Lock() + if idx >= len(dr.subQueries) { + dr.mu.Unlock() + return nil + } + sq := &dr.subQueries[idx] + sq.Status = "searching" + query := sq.Query + dr.searchCount++ + dr.mu.Unlock() + + dr.updateResearchStatus(blockID, "researching", fmt.Sprintf("Searching: %s", truncate(query, 50))) + + enhancedQuery := EnhanceQueryForFocusMode(query, dr.cfg.FocusMode) + + results, err := dr.cfg.SearchClient.Search(ctx, enhancedQuery, &search.SearchOptions{ + Engines: dr.cfg.FocusMode.GetSearchEngines(), + Categories: FocusModeConfigs[dr.cfg.FocusMode].Categories, + PageNo: 1, + }) + if err != nil { + dr.mu.Lock() + sq.Status = "failed" + dr.mu.Unlock() + return err + } + + chunks := make([]types.Chunk, 0) + for _, r := range results.Results { + dr.mu.Lock() + if dr.seenURLs[r.URL] { + dr.mu.Unlock() + continue + } + dr.seenURLs[r.URL] = true + dr.mu.Unlock() + + chunk := r.ToChunk() + chunks = append(chunks, chunk) + + if len(chunks) >= 10 { + break + } + } + + dr.mu.Lock() + sq.Results = chunks + sq.Status = "complete" + dr.allSources = append(dr.allSources, chunks...) + dr.mu.Unlock() + + return nil +} + +func (dr *DeepResearcher) generateFollowUpQueries(ctx context.Context, originalQuery string) ([]SubQuery, error) { + if dr.searchCount >= dr.cfg.MaxSearchQueries-5 { + return nil, nil + } + + var sourceSummary strings.Builder + dr.mu.Lock() + for i, s := range dr.allSources { + if i >= 20 { + break + } + sourceSummary.WriteString(fmt.Sprintf("- %s: %s\n", s.Metadata["title"], truncate(s.Content, 100))) + } + dr.mu.Unlock() + + prompt := fmt.Sprintf(`Based on the original query and sources found so far, suggest 2-3 follow-up queries to deepen the research. + +Original query: %s + +Sources found so far: +%s + +What aspects are missing? What would provide more comprehensive coverage? +Respond with queries in format: +QUERY: [query] +PURPOSE: [what gap it fills]`, originalQuery, sourceSummary.String()) + + result, err := dr.cfg.LLM.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + return dr.parseSubQueries(result), nil +} + +func (dr *DeepResearcher) synthesizeInsights(ctx context.Context, query string) ([]string, error) { + var sourcesText strings.Builder + dr.mu.Lock() + for i, s := range dr.allSources { + if i >= 30 { + break + } + sourcesText.WriteString(fmt.Sprintf("[%d] %s\n%s\n\n", i+1, s.Metadata["title"], truncate(s.Content, 300))) + } + dr.mu.Unlock() + + prompt := fmt.Sprintf(`Analyze these sources and extract 5-7 key insights for the query: %s + +Sources: +%s + +Provide insights as bullet points, each starting with a key finding. +Focus on: main conclusions, patterns, contradictions, expert consensus, data points.`, query, sourcesText.String()) + + result, err := dr.cfg.LLM.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + insights := make([]string, 0) + for _, line := range strings.Split(result, "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "-") || strings.HasPrefix(line, "•") || strings.HasPrefix(line, "*") { + insights = append(insights, strings.TrimPrefix(strings.TrimPrefix(strings.TrimPrefix(line, "-"), "•"), "*")) + } + } + + dr.mu.Lock() + dr.insights = insights + dr.mu.Unlock() + + return insights, nil +} + +func (dr *DeepResearcher) generateFinalReport(ctx context.Context, query string, insights []string) (string, error) { + var sourcesText strings.Builder + dr.mu.Lock() + sources := dr.allSources + dr.mu.Unlock() + + for i, s := range sources { + if i >= 50 { + break + } + sourcesText.WriteString(fmt.Sprintf("[%d] %s (%s)\n%s\n\n", i+1, s.Metadata["title"], s.Metadata["url"], truncate(s.Content, 400))) + } + + insightsText := strings.Join(insights, "\n- ") + + focusCfg := FocusModeConfigs[dr.cfg.FocusMode] + locale := dr.cfg.Locale + if locale == "" { + locale = "en" + } + + langInstruction := "" + if locale == "ru" { + langInstruction = "Write the report in Russian." + } + + prompt := fmt.Sprintf(`%s + +Write a comprehensive research report answering: %s + +Key insights discovered: +- %s + +Sources (cite using [1], [2], etc.): +%s + +Structure your report with: +1. Executive Summary (2-3 sentences) +2. Key Findings (organized by theme) +3. Analysis and Discussion +4. Conclusions + +%s +Use citations [1], [2], etc. throughout. +Be thorough but concise. Focus on actionable information.`, focusCfg.SystemPrompt, query, insightsText, sourcesText.String(), langInstruction) + + stream, err := dr.cfg.LLM.StreamText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return "", err + } + + var report strings.Builder + textBlockID := uuid.New().String() + dr.sess.EmitBlock(&types.Block{ + ID: textBlockID, + Type: types.BlockTypeText, + Data: "", + }) + + for chunk := range stream { + report.WriteString(chunk.ContentChunk) + dr.sess.EmitTextChunk(textBlockID, chunk.ContentChunk) + } + + return report.String(), nil +} + +func (dr *DeepResearcher) generateFollowUpSuggestions(ctx context.Context, query, report string) ([]string, error) { + prompt := fmt.Sprintf(`Based on this research query and report, suggest 3-4 follow-up questions the user might want to explore: + +Query: %s + +Report summary: %s + +Provide follow-up questions that: +1. Go deeper into specific aspects +2. Explore related topics +3. Address practical applications +4. Consider alternative perspectives + +Format as simple questions, one per line.`, query, truncate(report, 1000)) + + result, err := dr.cfg.LLM.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + suggestions := make([]string, 0) + for _, line := range strings.Split(result, "\n") { + line = strings.TrimSpace(line) + if line != "" && (strings.Contains(line, "?") || len(line) > 20) { + line = strings.TrimPrefix(line, "- ") + line = strings.TrimPrefix(line, "• ") + line = strings.TrimLeft(line, "0123456789. ") + if line != "" { + suggestions = append(suggestions, line) + } + } + } + + if len(suggestions) > 4 { + suggestions = suggestions[:4] + } + + return suggestions, nil +} + +func (dr *DeepResearcher) updateResearchStatus(blockID, status, message string) { + dr.sess.UpdateBlock(blockID, []session.Patch{ + {Op: "replace", Path: "/data/status", Value: status}, + {Op: "replace", Path: "/data/message", Value: message}, + }) +} + +func (dr *DeepResearcher) hasEnoughData() bool { + dr.mu.Lock() + defer dr.mu.Unlock() + return len(dr.allSources) >= dr.cfg.MaxSources +} + +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] + "..." +} + +func RunDeepResearch(ctx context.Context, sess *session.Session, query string, cfg DeepResearchConfig) (*DeepResearchResult, error) { + researcher := NewDeepResearcher(cfg, sess) + return researcher.Research(ctx, query) +} diff --git a/backend/internal/agent/focus_modes.go b/backend/internal/agent/focus_modes.go new file mode 100644 index 0000000..a9f8897 --- /dev/null +++ b/backend/internal/agent/focus_modes.go @@ -0,0 +1,293 @@ +package agent + +import ( + "strings" +) + +type FocusMode string + +const ( + FocusModeAll FocusMode = "all" + FocusModeAcademic FocusMode = "academic" + FocusModeWriting FocusMode = "writing" + FocusModeYouTube FocusMode = "youtube" + FocusModeReddit FocusMode = "reddit" + FocusModeCode FocusMode = "code" + FocusModeNews FocusMode = "news" + FocusModeImages FocusMode = "images" + FocusModeMath FocusMode = "math" + FocusModeFinance FocusMode = "finance" +) + +type FocusModeConfig struct { + Mode FocusMode + Engines []string + Categories []string + SystemPrompt string + SearchQueryPrefix string + MaxSources int + RequiresCitation bool + AllowScraping bool +} + +var FocusModeConfigs = map[FocusMode]FocusModeConfig{ + FocusModeAll: { + Mode: FocusModeAll, + Engines: []string{"google", "bing", "duckduckgo"}, + Categories: []string{"general"}, + MaxSources: 15, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are a helpful AI assistant that provides comprehensive answers based on web search results. +Always cite your sources using [1], [2], etc. format. +Provide balanced, accurate information from multiple perspectives.`, + }, + FocusModeAcademic: { + Mode: FocusModeAcademic, + Engines: []string{"google scholar", "arxiv", "pubmed", "semantic scholar"}, + Categories: []string{"science"}, + SearchQueryPrefix: "research paper", + MaxSources: 20, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are an academic research assistant specializing in scholarly sources. +Focus on peer-reviewed papers, academic journals, and reputable research institutions. +Always cite sources in academic format with [1], [2], etc. +Distinguish between primary research, meta-analyses, and review articles. +Mention publication dates, authors, and journals when available. +Be precise about confidence levels and note when findings are preliminary or contested.`, + }, + FocusModeWriting: { + Mode: FocusModeWriting, + Engines: []string{"google"}, + Categories: []string{"general"}, + MaxSources: 5, + RequiresCitation: false, + AllowScraping: false, + SystemPrompt: `You are a creative writing assistant. +Help with drafting, editing, and improving written content. +Provide suggestions for style, tone, structure, and clarity. +Offer multiple variations when appropriate. +Focus on the user's voice and intent rather than web search results.`, + }, + FocusModeYouTube: { + Mode: FocusModeYouTube, + Engines: []string{"youtube"}, + Categories: []string{"videos"}, + SearchQueryPrefix: "site:youtube.com", + MaxSources: 10, + RequiresCitation: true, + AllowScraping: false, + SystemPrompt: `You are a video content assistant focused on YouTube. +Summarize video content, recommend relevant videos, and help find tutorials. +Mention video titles, channels, and approximate timestamps when relevant. +Note view counts and upload dates to indicate video popularity and relevance.`, + }, + FocusModeReddit: { + Mode: FocusModeReddit, + Engines: []string{"reddit"}, + Categories: []string{"social media"}, + SearchQueryPrefix: "site:reddit.com", + MaxSources: 15, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are an assistant that specializes in Reddit discussions and community knowledge. +Focus on highly upvoted comments and posts from relevant subreddits. +Note the subreddit source, upvote counts, and community consensus. +Distinguish between personal opinions, experiences, and factual claims. +Be aware of potential biases in specific communities.`, + }, + FocusModeCode: { + Mode: FocusModeCode, + Engines: []string{"google", "github", "stackoverflow"}, + Categories: []string{"it"}, + SearchQueryPrefix: "", + MaxSources: 10, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are a programming assistant focused on code, documentation, and technical solutions. +Provide working code examples with explanations. +Reference official documentation, Stack Overflow answers, and GitHub repositories. +Mention library versions and compatibility considerations. +Follow best practices and coding standards for the relevant language/framework. +Include error handling and edge cases in code examples.`, + }, + FocusModeNews: { + Mode: FocusModeNews, + Engines: []string{"google news", "bing news"}, + Categories: []string{"news"}, + MaxSources: 12, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are a news assistant that provides current events information. +Focus on recent, verified news from reputable sources. +Distinguish between breaking news, analysis, and opinion pieces. +Note publication dates and source credibility. +Present multiple perspectives on controversial topics.`, + }, + FocusModeImages: { + Mode: FocusModeImages, + Engines: []string{"google images", "bing images"}, + Categories: []string{"images"}, + MaxSources: 20, + RequiresCitation: true, + AllowScraping: false, + SystemPrompt: `You are an image search assistant. +Help find relevant images, describe image sources, and provide context. +Note image sources, licenses, and quality when relevant.`, + }, + FocusModeMath: { + Mode: FocusModeMath, + Engines: []string{"wolfram alpha", "google"}, + Categories: []string{"science"}, + MaxSources: 5, + RequiresCitation: true, + AllowScraping: false, + SystemPrompt: `You are a mathematical problem-solving assistant. +Provide step-by-step solutions with clear explanations. +Use proper mathematical notation and formatting. +Show your work and explain the reasoning behind each step. +Mention relevant theorems, formulas, and mathematical concepts. +Verify your calculations and provide alternative solution methods when applicable.`, + }, + FocusModeFinance: { + Mode: FocusModeFinance, + Engines: []string{"google", "google finance", "yahoo finance"}, + Categories: []string{"news"}, + SearchQueryPrefix: "stock market finance", + MaxSources: 10, + RequiresCitation: true, + AllowScraping: true, + SystemPrompt: `You are a financial information assistant. +Provide accurate financial data, market analysis, and investment information. +Note that you cannot provide personalized financial advice. +Cite data sources and note when data may be delayed or historical. +Include relevant disclaimers about investment risks. +Reference SEC filings, analyst reports, and official company statements.`, + }, +} + +func GetFocusModeConfig(mode string) FocusModeConfig { + fm := FocusMode(strings.ToLower(mode)) + if cfg, ok := FocusModeConfigs[fm]; ok { + return cfg + } + return FocusModeConfigs[FocusModeAll] +} + +func DetectFocusMode(query string) FocusMode { + queryLower := strings.ToLower(query) + + academicKeywords := []string{ + "research", "paper", "study", "journal", "scientific", "academic", + "peer-reviewed", "citation", "исследование", "научн", "статья", + "публикация", "диссертация", + } + for _, kw := range academicKeywords { + if strings.Contains(queryLower, kw) { + return FocusModeAcademic + } + } + + codeKeywords := []string{ + "code", "programming", "function", "error", "bug", "api", + "library", "framework", "syntax", "compile", "debug", + "код", "программ", "функция", "ошибка", "библиотека", + "golang", "python", "javascript", "typescript", "react", "vue", + "docker", "kubernetes", "sql", "database", "git", + } + for _, kw := range codeKeywords { + if strings.Contains(queryLower, kw) { + return FocusModeCode + } + } + + if strings.Contains(queryLower, "youtube") || + strings.Contains(queryLower, "video tutorial") || + strings.Contains(queryLower, "видео") { + return FocusModeYouTube + } + + if strings.Contains(queryLower, "reddit") || + strings.Contains(queryLower, "subreddit") || + strings.Contains(queryLower, "/r/") { + return FocusModeReddit + } + + mathKeywords := []string{ + "calculate", "solve", "equation", "integral", "derivative", + "formula", "theorem", "proof", "вычисл", "решить", "уравнение", + "интеграл", "производная", "формула", "теорема", + } + for _, kw := range mathKeywords { + if strings.Contains(queryLower, kw) { + return FocusModeMath + } + } + + financeKeywords := []string{ + "stock", "market", "invest", "price", "trading", "finance", + "акци", "рынок", "инвест", "биржа", "котировк", "финанс", + "etf", "dividend", "portfolio", + } + for _, kw := range financeKeywords { + if strings.Contains(queryLower, kw) { + return FocusModeFinance + } + } + + newsKeywords := []string{ + "news", "today", "latest", "breaking", "current events", + "новост", "сегодня", "последн", "актуальн", + } + for _, kw := range newsKeywords { + if strings.Contains(queryLower, kw) { + return FocusModeNews + } + } + + return FocusModeAll +} + +func (f FocusMode) GetSearchEngines() []string { + if cfg, ok := FocusModeConfigs[f]; ok { + return cfg.Engines + } + return FocusModeConfigs[FocusModeAll].Engines +} + +func (f FocusMode) GetSystemPrompt() string { + if cfg, ok := FocusModeConfigs[f]; ok { + return cfg.SystemPrompt + } + return FocusModeConfigs[FocusModeAll].SystemPrompt +} + +func (f FocusMode) GetMaxSources() int { + if cfg, ok := FocusModeConfigs[f]; ok { + return cfg.MaxSources + } + return 15 +} + +func (f FocusMode) RequiresCitation() bool { + if cfg, ok := FocusModeConfigs[f]; ok { + return cfg.RequiresCitation + } + return true +} + +func (f FocusMode) AllowsScraping() bool { + if cfg, ok := FocusModeConfigs[f]; ok { + return cfg.AllowScraping + } + return true +} + +func EnhanceQueryForFocusMode(query string, mode FocusMode) string { + cfg := FocusModeConfigs[mode] + if cfg.SearchQueryPrefix != "" { + return cfg.SearchQueryPrefix + " " + query + } + return query +} diff --git a/backend/internal/agent/orchestrator.go b/backend/internal/agent/orchestrator.go new file mode 100644 index 0000000..31ac82b --- /dev/null +++ b/backend/internal/agent/orchestrator.go @@ -0,0 +1,950 @@ +package agent + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "regexp" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/prompts" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/internal/session" + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" + "golang.org/x/sync/errgroup" +) + +type Mode string + +const ( + ModeSpeed Mode = "speed" + ModeBalanced Mode = "balanced" + ModeQuality Mode = "quality" +) + +type OrchestratorConfig struct { + LLM llm.Client + SearchClient *search.SearXNGClient + Mode Mode + FocusMode FocusMode + Sources []string + FileIDs []string + FileContext string + CollectionID string + CollectionContext string + SystemInstructions string + Locale string + MemoryContext string + UserMemory string + AnswerMode string + ResponsePrefs *ResponsePrefs + LearningMode bool + EnableDeepResearch bool + EnableClarifying bool + DiscoverSvcURL string + Crawl4AIURL string + CollectionSvcURL string + FileSvcURL string +} + +type DigestResponse struct { + SummaryRu string `json:"summaryRu"` + Citations []DigestCitation `json:"citations"` + FollowUp []string `json:"followUp"` + SourcesCount int `json:"sourcesCount"` + ClusterTitle string `json:"clusterTitle"` +} + +type DigestCitation struct { + Index int `json:"index"` + URL string `json:"url"` + Title string `json:"title"` + Domain string `json:"domain"` +} + +type PreScrapedArticle struct { + Title string + Content string + URL string +} + +type ResponsePrefs struct { + Format string `json:"format,omitempty"` + Length string `json:"length,omitempty"` + Tone string `json:"tone,omitempty"` +} + +type OrchestratorInput struct { + ChatHistory []llm.Message + FollowUp string + Config OrchestratorConfig +} + +func RunOrchestrator(ctx context.Context, sess *session.Session, input OrchestratorInput) error { + detectedLang := detectLanguage(input.FollowUp) + isArticleSummary := strings.HasPrefix(strings.TrimSpace(input.FollowUp), "Summary: ") + + if input.Config.FocusMode == "" { + input.Config.FocusMode = DetectFocusMode(input.FollowUp) + } + + if input.Config.EnableDeepResearch && input.Config.Mode == ModeQuality { + return runDeepResearchMode(ctx, sess, input, detectedLang) + } + + if input.Config.Mode == ModeSpeed && !isArticleSummary { + return runSpeedMode(ctx, sess, input, detectedLang) + } + + return runFullMode(ctx, sess, input, detectedLang, isArticleSummary) +} + +func runDeepResearchMode(ctx context.Context, sess *session.Session, input OrchestratorInput, lang string) error { + sess.EmitBlock(types.NewResearchBlock(uuid.New().String())) + + researcher := NewDeepResearcher(DeepResearchConfig{ + LLM: input.Config.LLM, + SearchClient: input.Config.SearchClient, + FocusMode: input.Config.FocusMode, + Locale: input.Config.Locale, + MaxSearchQueries: 30, + MaxSources: 100, + MaxIterations: 5, + Timeout: 5 * time.Minute, + }, sess) + + result, err := researcher.Research(ctx, input.FollowUp) + if err != nil { + sess.EmitError(err) + return err + } + + sess.EmitBlock(types.NewSourceBlock(uuid.New().String(), result.Sources)) + + if len(result.FollowUpQueries) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "related_questions", map[string]interface{}{ + "questions": result.FollowUpQueries, + })) + } + + sess.EmitResearchComplete() + sess.EmitEnd() + + return nil +} + +func generateClarifyingQuestions(ctx context.Context, llmClient llm.Client, query string) ([]string, error) { + prompt := fmt.Sprintf(`Analyze this query and determine if clarifying questions would help provide a better answer. + +Query: %s + +If the query is: +- Clear and specific → respond with "CLEAR" +- Ambiguous or could benefit from clarification → provide 2-3 short clarifying questions + +Format: +CLEAR +or +QUESTION: [question 1] +QUESTION: [question 2] +QUESTION: [question 3]`, query) + + result, err := llmClient.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + if strings.Contains(strings.ToUpper(result), "CLEAR") { + return nil, nil + } + + var questions []string + for _, line := range strings.Split(result, "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "QUESTION:") { + q := strings.TrimSpace(strings.TrimPrefix(line, "QUESTION:")) + if q != "" { + questions = append(questions, q) + } + } + } + + return questions, nil +} + +func generateRelatedQuestions(ctx context.Context, llmClient llm.Client, query, answer string, locale string) []string { + langInstruction := "" + if locale == "ru" { + langInstruction = "Generate questions in Russian." + } + + prompt := fmt.Sprintf(`Based on this query and answer, generate 3-4 related follow-up questions the user might want to explore. + +Query: %s + +Answer summary: %s + +%s +Format: One question per line, no numbering or bullets.`, query, truncateForPrompt(answer, 500), langInstruction) + + result, err := llmClient.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil + } + + var questions []string + for _, line := range strings.Split(result, "\n") { + line = strings.TrimSpace(line) + if line != "" && len(line) > 10 && strings.Contains(line, "?") { + line = strings.TrimLeft(line, "0123456789.-•* ") + if line != "" { + questions = append(questions, line) + } + } + } + + if len(questions) > 4 { + questions = questions[:4] + } + + return questions +} + +func truncateForPrompt(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] + "..." +} + +func buildEnhancedContext(input OrchestratorInput) string { + var ctx strings.Builder + + if input.Config.UserMemory != "" { + ctx.WriteString("## User Preferences\n") + ctx.WriteString(input.Config.UserMemory) + ctx.WriteString("\n\n") + } + + if input.Config.CollectionContext != "" { + ctx.WriteString("## Collection Context\n") + ctx.WriteString(input.Config.CollectionContext) + ctx.WriteString("\n\n") + } + + if input.Config.FileContext != "" { + ctx.WriteString("## Uploaded Files Content\n") + ctx.WriteString(input.Config.FileContext) + ctx.WriteString("\n\n") + } + + if input.Config.MemoryContext != "" { + ctx.WriteString("## Previous Context\n") + ctx.WriteString(input.Config.MemoryContext) + ctx.WriteString("\n\n") + } + + return ctx.String() +} + +func fetchPreGeneratedDigest(ctx context.Context, discoverURL, articleURL string) (*DigestResponse, error) { + if discoverURL == "" { + return nil, nil + } + + reqURL := fmt.Sprintf("%s/api/v1/discover/digest?url=%s", + strings.TrimSuffix(discoverURL, "/"), + url.QueryEscape(articleURL)) + + req, err := http.NewRequestWithContext(ctx, "GET", reqURL, nil) + if err != nil { + return nil, err + } + + client := &http.Client{Timeout: 3 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, nil + } + + var digest DigestResponse + if err := json.NewDecoder(resp.Body).Decode(&digest); err != nil { + return nil, err + } + + if digest.SummaryRu != "" && len(digest.Citations) > 0 { + return &digest, nil + } + + return nil, nil +} + +func preScrapeArticleURL(ctx context.Context, crawl4aiURL, articleURL string) (*PreScrapedArticle, error) { + if crawl4aiURL != "" { + article, err := scrapeWithCrawl4AI(ctx, crawl4aiURL, articleURL) + if err == nil && article != nil { + return article, nil + } + } + + return scrapeDirectly(ctx, articleURL) +} + +func scrapeWithCrawl4AI(ctx context.Context, crawl4aiURL, articleURL string) (*PreScrapedArticle, error) { + reqBody := fmt.Sprintf(`{ + "urls": ["%s"], + "crawler_config": { + "type": "CrawlerRunConfig", + "params": { + "cache_mode": "default", + "page_timeout": 20000 + } + } + }`, articleURL) + + req, err := http.NewRequestWithContext(ctx, "POST", crawl4aiURL+"/crawl", strings.NewReader(reqBody)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + client := &http.Client{Timeout: 25 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("Crawl4AI returned status %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + markdown := extractMarkdownFromCrawl4AI(string(body)) + title := extractTitleFromCrawl4AI(string(body)) + + if len(markdown) > 100 { + content := markdown + if len(content) > 15000 { + content = content[:15000] + } + return &PreScrapedArticle{ + Title: title, + Content: content, + URL: articleURL, + }, nil + } + + return nil, fmt.Errorf("insufficient content from Crawl4AI") +} + +func scrapeDirectly(ctx context.Context, articleURL string) (*PreScrapedArticle, error) { + req, err := http.NewRequestWithContext(ctx, "GET", articleURL, nil) + if err != nil { + return nil, err + } + req.Header.Set("User-Agent", "GooSeek-Agent/1.0") + req.Header.Set("Accept", "text/html") + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("HTTP %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + html := string(body) + title := extractHTMLTitle(html) + content := extractTextContent(html) + + if len(content) < 100 { + return nil, fmt.Errorf("insufficient content") + } + + if len(content) > 15000 { + content = content[:15000] + } + + return &PreScrapedArticle{ + Title: title, + Content: content, + URL: articleURL, + }, nil +} + +var ( + titleRegex = regexp.MustCompile(`]*>([^<]+)`) + scriptRegex = regexp.MustCompile(`(?s)]*>.*?`) + styleRegex = regexp.MustCompile(`(?s)]*>.*?`) + tagRegex = regexp.MustCompile(`<[^>]+>`) + spaceRegex = regexp.MustCompile(`\s+`) +) + +func extractHTMLTitle(html string) string { + matches := titleRegex.FindStringSubmatch(html) + if len(matches) > 1 { + return strings.TrimSpace(matches[1]) + } + return "" +} + +func extractTextContent(html string) string { + bodyStart := strings.Index(strings.ToLower(html), "") + + if bodyStart != -1 && bodyEnd != -1 && bodyEnd > bodyStart { + html = html[bodyStart:bodyEnd] + } + + html = scriptRegex.ReplaceAllString(html, "") + html = styleRegex.ReplaceAllString(html, "") + html = tagRegex.ReplaceAllString(html, " ") + html = spaceRegex.ReplaceAllString(html, " ") + + return strings.TrimSpace(html) +} + +func extractMarkdownFromCrawl4AI(response string) string { + if idx := strings.Index(response, `"raw_markdown"`); idx != -1 { + start := idx + len(`"raw_markdown"`) + if colonIdx := strings.Index(response[start:], ":"); colonIdx != -1 { + start += colonIdx + 1 + for start < len(response) && (response[start] == ' ' || response[start] == '"') { + start++ + } + end := strings.Index(response[start:], `"`) + if end > 0 { + return response[start : start+end] + } + } + } + return "" +} + +func extractTitleFromCrawl4AI(response string) string { + if idx := strings.Index(response, `"title"`); idx != -1 { + start := idx + len(`"title"`) + if colonIdx := strings.Index(response[start:], ":"); colonIdx != -1 { + start += colonIdx + 1 + for start < len(response) && (response[start] == ' ' || response[start] == '"') { + start++ + } + end := strings.Index(response[start:], `"`) + if end > 0 { + return response[start : start+end] + } + } + } + return "" +} + +func runSpeedMode(ctx context.Context, sess *session.Session, input OrchestratorInput, detectedLang string) error { + classification := fastClassify(input.FollowUp, input.ChatHistory) + searchQuery := classification.StandaloneFollowUp + if searchQuery == "" { + searchQuery = input.FollowUp + } + queries := generateSearchQueries(searchQuery) + + researchBlockID := uuid.New().String() + sess.EmitBlock(types.NewResearchBlock(researchBlockID)) + + var searchResults []types.Chunk + var mediaResult *search.MediaSearchResult + + g, gctx := errgroup.WithContext(ctx) + + g.Go(func() error { + results, err := parallelSearch(gctx, input.Config.SearchClient, queries) + if err != nil { + return nil + } + searchResults = results + return nil + }) + + g.Go(func() error { + result, err := input.Config.SearchClient.SearchMedia(gctx, searchQuery, &search.MediaSearchOptions{ + MaxImages: 6, + MaxVideos: 4, + }) + if err != nil { + return nil + } + mediaResult = result + return nil + }) + + _ = g.Wait() + + if len(searchResults) > 0 { + sess.EmitBlock(types.NewSourceBlock(uuid.New().String(), searchResults)) + } + + if mediaResult != nil { + if len(mediaResult.Images) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "image_gallery", map[string]interface{}{ + "images": mediaResult.Images, + "layout": "carousel", + })) + } + if len(mediaResult.Videos) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "videos", map[string]interface{}{ + "items": mediaResult.Videos, + "title": "", + })) + } + } + + sess.EmitResearchComplete() + + queryComplexity := search.EstimateQueryComplexity(searchQuery) + adaptiveTopK := search.ComputeAdaptiveTopK(len(searchResults), queryComplexity, "speed") + rankedResults := search.RerankBM25(searchResults, searchQuery, adaptiveTopK) + + finalContext := buildContext(rankedResults, 15, 250) + + writerPrompt := prompts.GetWriterPrompt(prompts.WriterConfig{ + Context: finalContext, + SystemInstructions: input.Config.SystemInstructions, + Mode: string(input.Config.Mode), + Locale: input.Config.Locale, + MemoryContext: input.Config.MemoryContext, + AnswerMode: input.Config.AnswerMode, + DetectedLanguage: detectedLang, + IsArticleSummary: false, + }) + + messages := []llm.Message{ + {Role: llm.RoleSystem, Content: writerPrompt}, + } + messages = append(messages, input.ChatHistory...) + messages = append(messages, llm.Message{Role: llm.RoleUser, Content: input.FollowUp}) + + return streamResponse(ctx, sess, input.Config.LLM, messages, 2048, input.FollowUp, input.Config.Locale) +} + +func runFullMode(ctx context.Context, sess *session.Session, input OrchestratorInput, detectedLang string, isArticleSummary bool) error { + if input.Config.EnableClarifying && !isArticleSummary && input.Config.Mode == ModeQuality { + clarifying, err := generateClarifyingQuestions(ctx, input.Config.LLM, input.FollowUp) + if err == nil && len(clarifying) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "clarifying", map[string]interface{}{ + "questions": clarifying, + "query": input.FollowUp, + })) + return nil + } + } + + enhancedContext := buildEnhancedContext(input) + if enhancedContext != "" { + input.Config.MemoryContext = enhancedContext + input.Config.MemoryContext + } + + var preScrapedArticle *PreScrapedArticle + var articleURL string + + if isArticleSummary { + articleURL = strings.TrimSpace(strings.TrimPrefix(strings.TrimSpace(input.FollowUp), "Summary: ")) + + digestCtx, digestCancel := context.WithTimeout(ctx, 3*time.Second) + scrapeCtx, scrapeCancel := context.WithTimeout(ctx, 25*time.Second) + + digestCh := make(chan *DigestResponse, 1) + scrapeCh := make(chan *PreScrapedArticle, 1) + + go func() { + defer digestCancel() + digest, _ := fetchPreGeneratedDigest(digestCtx, input.Config.DiscoverSvcURL, articleURL) + digestCh <- digest + }() + + go func() { + defer scrapeCancel() + article, _ := preScrapeArticleURL(scrapeCtx, input.Config.Crawl4AIURL, articleURL) + scrapeCh <- article + }() + + digest := <-digestCh + preScrapedArticle = <-scrapeCh + + if digest != nil { + chunks := make([]types.Chunk, len(digest.Citations)) + for i, c := range digest.Citations { + chunks[i] = types.Chunk{ + Content: c.Title, + Metadata: map[string]string{ + "url": c.URL, + "title": c.Title, + "domain": c.Domain, + }, + } + } + sess.EmitBlock(types.NewSourceBlock(uuid.New().String(), chunks)) + sess.EmitResearchComplete() + + summaryText := digest.SummaryRu + if len(digest.FollowUp) > 0 { + summaryText += "\n\n---\n" + for _, q := range digest.FollowUp { + summaryText += "> " + q + "\n" + } + } + sess.EmitBlock(types.NewTextBlock(uuid.New().String(), summaryText)) + sess.EmitEnd() + return nil + } + } + + classification, err := classify(ctx, input.Config.LLM, input.FollowUp, input.ChatHistory, input.Config.Locale, detectedLang) + if err != nil { + classification = &ClassificationResult{ + StandaloneFollowUp: input.FollowUp, + SkipSearch: false, + } + } + + if isArticleSummary && classification.SkipSearch { + classification.SkipSearch = false + } + + g, gctx := errgroup.WithContext(ctx) + + var searchResults []types.Chunk + var mediaResult *search.MediaSearchResult + + mediaQuery := classification.StandaloneFollowUp + if mediaQuery == "" { + mediaQuery = input.FollowUp + } + + effectiveFollowUp := input.FollowUp + if isArticleSummary && preScrapedArticle != nil && preScrapedArticle.Title != "" { + effectiveFollowUp = fmt.Sprintf("Summary: %s\nArticle title: %s", preScrapedArticle.URL, preScrapedArticle.Title) + if classification.StandaloneFollowUp != "" { + classification.StandaloneFollowUp = preScrapedArticle.Title + " " + classification.StandaloneFollowUp + } else { + classification.StandaloneFollowUp = preScrapedArticle.Title + } + } + + if !classification.SkipSearch { + g.Go(func() error { + results, err := research(gctx, sess, input.Config.LLM, input.Config.SearchClient, ResearchInput{ + ChatHistory: input.ChatHistory, + FollowUp: effectiveFollowUp, + Classification: classification, + Mode: input.Config.Mode, + Sources: input.Config.Sources, + Locale: input.Config.Locale, + DetectedLang: detectedLang, + IsArticleSummary: isArticleSummary, + }) + if err != nil { + return nil + } + searchResults = results + return nil + }) + } + + if !isArticleSummary { + g.Go(func() error { + result, err := input.Config.SearchClient.SearchMedia(gctx, mediaQuery, &search.MediaSearchOptions{ + MaxImages: 8, + MaxVideos: 6, + }) + if err != nil { + return nil + } + mediaResult = result + return nil + }) + } + + _ = g.Wait() + + if isArticleSummary && preScrapedArticle != nil { + alreadyHasURL := false + for _, r := range searchResults { + if strings.Contains(r.Metadata["url"], preScrapedArticle.URL) { + alreadyHasURL = true + break + } + } + if !alreadyHasURL { + prependChunk := types.Chunk{ + Content: preScrapedArticle.Content, + Metadata: map[string]string{ + "url": preScrapedArticle.URL, + "title": preScrapedArticle.Title, + }, + } + searchResults = append([]types.Chunk{prependChunk}, searchResults...) + } + } + + if len(searchResults) > 0 { + sess.EmitBlock(types.NewSourceBlock(uuid.New().String(), searchResults)) + } + + if mediaResult != nil { + if len(mediaResult.Images) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "image_gallery", map[string]interface{}{ + "images": mediaResult.Images, + "layout": "carousel", + })) + } + if len(mediaResult.Videos) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "videos", map[string]interface{}{ + "items": mediaResult.Videos, + "title": "", + })) + } + } + + sess.EmitResearchComplete() + + maxResults := 25 + maxContent := 320 + if isArticleSummary { + maxResults = 30 + maxContent = 2000 + } + + rankedResults := rankByRelevance(searchResults, input.FollowUp) + if len(rankedResults) > maxResults { + rankedResults = rankedResults[:maxResults] + } + + finalContext := buildContext(rankedResults, maxResults, maxContent) + + writerPrompt := prompts.GetWriterPrompt(prompts.WriterConfig{ + Context: finalContext, + SystemInstructions: input.Config.SystemInstructions, + Mode: string(input.Config.Mode), + Locale: input.Config.Locale, + MemoryContext: input.Config.MemoryContext, + AnswerMode: input.Config.AnswerMode, + DetectedLanguage: detectedLang, + IsArticleSummary: isArticleSummary, + }) + + messages := []llm.Message{ + {Role: llm.RoleSystem, Content: writerPrompt}, + } + messages = append(messages, input.ChatHistory...) + messages = append(messages, llm.Message{Role: llm.RoleUser, Content: input.FollowUp}) + + maxTokens := 4096 + return streamResponse(ctx, sess, input.Config.LLM, messages, maxTokens, input.FollowUp, input.Config.Locale) +} + +func streamResponse(ctx context.Context, sess *session.Session, client llm.Client, messages []llm.Message, maxTokens int, query string, locale string) error { + stream, err := client.StreamText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{MaxTokens: maxTokens}, + }) + if err != nil { + return err + } + + var responseBlockID string + var accumulatedText string + + for chunk := range stream { + if chunk.ContentChunk == "" && responseBlockID == "" { + continue + } + + if responseBlockID == "" { + responseBlockID = uuid.New().String() + accumulatedText = chunk.ContentChunk + sess.EmitBlock(types.NewTextBlock(responseBlockID, accumulatedText)) + } else if chunk.ContentChunk != "" { + accumulatedText += chunk.ContentChunk + sess.EmitTextChunk(responseBlockID, chunk.ContentChunk) + } + } + + if responseBlockID != "" { + sess.UpdateBlock(responseBlockID, []session.Patch{ + {Op: "replace", Path: "/data", Value: accumulatedText}, + }) + } + + go func() { + relatedCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + related := generateRelatedQuestions(relatedCtx, client, query, accumulatedText, locale) + if len(related) > 0 { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "related_questions", map[string]interface{}{ + "questions": related, + })) + } + }() + + sess.EmitEnd() + return nil +} + +func parallelSearch(ctx context.Context, client *search.SearXNGClient, queries []string) ([]types.Chunk, error) { + results := make([]types.Chunk, 0) + seen := make(map[string]bool) + + g, gctx := errgroup.WithContext(ctx) + resultsCh := make(chan []types.SearchResult, len(queries)) + + for _, q := range queries { + query := q + g.Go(func() error { + resp, err := client.Search(gctx, query, &search.SearchOptions{ + Categories: []string{"general", "news"}, + PageNo: 1, + }) + if err != nil { + resultsCh <- nil + return nil + } + resultsCh <- resp.Results + return nil + }) + } + + go func() { + g.Wait() + close(resultsCh) + }() + + for batch := range resultsCh { + for _, r := range batch { + if r.URL != "" && !seen[r.URL] { + seen[r.URL] = true + results = append(results, r.ToChunk()) + } + } + } + + return results, nil +} + +func buildContext(chunks []types.Chunk, maxResults, maxContentLen int) string { + if len(chunks) > maxResults { + chunks = chunks[:maxResults] + } + + var sb strings.Builder + sb.WriteString("\n") + + for i, chunk := range chunks { + content := chunk.Content + if len(content) > maxContentLen { + content = content[:maxContentLen] + "…" + } + title := chunk.Metadata["title"] + sb.WriteString("") + sb.WriteString(content) + sb.WriteString("\n") + } + + sb.WriteString("") + return sb.String() +} + +func rankByRelevance(chunks []types.Chunk, query string) []types.Chunk { + if len(chunks) == 0 { + return chunks + } + + terms := extractQueryTerms(query) + if len(terms) == 0 { + return chunks + } + + type scored struct { + chunk types.Chunk + score int + } + + scored_chunks := make([]scored, len(chunks)) + for i, chunk := range chunks { + score := 0 + content := strings.ToLower(chunk.Content) + title := strings.ToLower(chunk.Metadata["title"]) + + for term := range terms { + if strings.Contains(title, term) { + score += 3 + } + if strings.Contains(content, term) { + score += 1 + } + } + + scored_chunks[i] = scored{chunk: chunk, score: score} + } + + for i := 0; i < len(scored_chunks)-1; i++ { + for j := i + 1; j < len(scored_chunks); j++ { + if scored_chunks[j].score > scored_chunks[i].score { + scored_chunks[i], scored_chunks[j] = scored_chunks[j], scored_chunks[i] + } + } + } + + result := make([]types.Chunk, len(scored_chunks)) + for i, s := range scored_chunks { + result[i] = s.chunk + } + + return result +} + +func extractQueryTerms(query string) map[string]bool { + query = strings.ToLower(query) + query = strings.TrimPrefix(query, "summary: ") + + words := strings.Fields(query) + terms := make(map[string]bool) + + for _, w := range words { + if len(w) >= 2 && !strings.HasPrefix(w, "http") { + terms[w] = true + } + } + + return terms +} diff --git a/backend/internal/agent/researcher.go b/backend/internal/agent/researcher.go new file mode 100644 index 0000000..fc127e5 --- /dev/null +++ b/backend/internal/agent/researcher.go @@ -0,0 +1,128 @@ +package agent + +import ( + "context" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/search" + "github.com/gooseek/backend/internal/session" + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" +) + +type ResearchInput struct { + ChatHistory []llm.Message + FollowUp string + Classification *ClassificationResult + Mode Mode + Sources []string + Locale string + DetectedLang string + IsArticleSummary bool +} + +func research( + ctx context.Context, + sess *session.Session, + llmClient llm.Client, + searchClient *search.SearXNGClient, + input ResearchInput, +) ([]types.Chunk, error) { + maxIterations := 1 + switch input.Mode { + case ModeBalanced: + maxIterations = 3 + case ModeQuality: + maxIterations = 10 + } + + researchBlockID := uuid.New().String() + sess.EmitBlock(types.NewResearchBlock(researchBlockID)) + + allResults := make([]types.Chunk, 0) + seenURLs := make(map[string]bool) + + searchQuery := input.Classification.StandaloneFollowUp + if searchQuery == "" { + searchQuery = input.FollowUp + } + + for i := 0; i < maxIterations; i++ { + queries := generateSearchQueries(searchQuery) + + sess.UpdateBlock(researchBlockID, []session.Patch{ + { + Op: "replace", + Path: "/data/subSteps", + Value: []types.ResearchSubStep{ + { + ID: uuid.New().String(), + Type: "searching", + Searching: queries, + }, + }, + }, + }) + + for _, q := range queries { + resp, err := searchClient.Search(ctx, q, &search.SearchOptions{ + Categories: categoriesToSearch(input.Sources), + PageNo: 1, + }) + if err != nil { + continue + } + + for _, r := range resp.Results { + if r.URL != "" && !seenURLs[r.URL] { + seenURLs[r.URL] = true + allResults = append(allResults, r.ToChunk()) + } + } + } + + if input.Mode == ModeSpeed { + break + } + + if len(allResults) >= 20 && input.Mode == ModeBalanced { + break + } + + if len(allResults) >= 50 { + break + } + } + + return allResults, nil +} + +func categoriesToSearch(sources []string) []string { + if len(sources) == 0 { + return []string{"general", "news"} + } + + categories := make([]string, 0) + for _, s := range sources { + switch s { + case "web": + categories = append(categories, "general") + case "discussions": + categories = append(categories, "social media") + case "academic": + categories = append(categories, "science") + case "news": + categories = append(categories, "news") + case "images": + categories = append(categories, "images") + case "videos": + categories = append(categories, "videos") + } + } + + if len(categories) == 0 { + return []string{"general"} + } + + return categories +} diff --git a/backend/internal/computer/browser/browser.go b/backend/internal/computer/browser/browser.go new file mode 100644 index 0000000..31849a5 --- /dev/null +++ b/backend/internal/computer/browser/browser.go @@ -0,0 +1,587 @@ +package browser + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "os" + "os/exec" + "strings" + "sync" + "time" + + "github.com/google/uuid" +) + +type PlaywrightBrowser struct { + cmd *exec.Cmd + serverURL string + client *http.Client + sessions map[string]*BrowserSession + mu sync.RWMutex + config Config +} + +type Config struct { + PlaywrightServerURL string + DefaultTimeout time.Duration + Headless bool + UserAgent string + ProxyURL string + ScreenshotsDir string + RecordingsDir string +} + +type BrowserSession struct { + ID string + ContextID string + PageID string + CreatedAt time.Time + LastAction time.Time + Screenshots []string + Recordings []string + Closed bool +} + +type ActionRequest struct { + SessionID string `json:"sessionId"` + Action string `json:"action"` + Params map[string]interface{} `json:"params"` +} + +type ActionResponse struct { + Success bool `json:"success"` + Data interface{} `json:"data,omitempty"` + Screenshot string `json:"screenshot,omitempty"` + Error string `json:"error,omitempty"` + PageTitle string `json:"pageTitle,omitempty"` + PageURL string `json:"pageUrl,omitempty"` +} + +func NewPlaywrightBrowser(cfg Config) *PlaywrightBrowser { + if cfg.DefaultTimeout == 0 { + cfg.DefaultTimeout = 30 * time.Second + } + if cfg.PlaywrightServerURL == "" { + cfg.PlaywrightServerURL = "http://localhost:3050" + } + if cfg.ScreenshotsDir == "" { + cfg.ScreenshotsDir = "/tmp/gooseek-screenshots" + } + if cfg.RecordingsDir == "" { + cfg.RecordingsDir = "/tmp/gooseek-recordings" + } + + os.MkdirAll(cfg.ScreenshotsDir, 0755) + os.MkdirAll(cfg.RecordingsDir, 0755) + + return &PlaywrightBrowser{ + serverURL: cfg.PlaywrightServerURL, + client: &http.Client{ + Timeout: cfg.DefaultTimeout, + }, + sessions: make(map[string]*BrowserSession), + config: cfg, + } +} + +func (b *PlaywrightBrowser) NewSession(ctx context.Context, opts SessionOptions) (*BrowserSession, error) { + sessionID := uuid.New().String() + + params := map[string]interface{}{ + "headless": b.config.Headless, + "sessionId": sessionID, + } + + if opts.Viewport != nil { + params["viewport"] = opts.Viewport + } + if opts.UserAgent != "" { + params["userAgent"] = opts.UserAgent + } else if b.config.UserAgent != "" { + params["userAgent"] = b.config.UserAgent + } + if opts.ProxyURL != "" { + params["proxy"] = opts.ProxyURL + } else if b.config.ProxyURL != "" { + params["proxy"] = b.config.ProxyURL + } + if opts.RecordVideo { + params["recordVideo"] = map[string]interface{}{ + "dir": b.config.RecordingsDir, + } + } + + resp, err := b.sendCommand(ctx, "browser.newContext", params) + if err != nil { + return nil, fmt.Errorf("failed to create browser context: %w", err) + } + + contextID, _ := resp["contextId"].(string) + pageID, _ := resp["pageId"].(string) + + session := &BrowserSession{ + ID: sessionID, + ContextID: contextID, + PageID: pageID, + CreatedAt: time.Now(), + LastAction: time.Now(), + } + + b.mu.Lock() + b.sessions[sessionID] = session + b.mu.Unlock() + + return session, nil +} + +func (b *PlaywrightBrowser) CloseSession(ctx context.Context, sessionID string) error { + b.mu.Lock() + session, ok := b.sessions[sessionID] + if !ok { + b.mu.Unlock() + return errors.New("session not found") + } + session.Closed = true + delete(b.sessions, sessionID) + b.mu.Unlock() + + _, err := b.sendCommand(ctx, "browser.closeContext", map[string]interface{}{ + "sessionId": sessionID, + }) + return err +} + +func (b *PlaywrightBrowser) Navigate(ctx context.Context, sessionID, url string, opts NavigateOptions) (*ActionResponse, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "url": url, + } + if opts.Timeout > 0 { + params["timeout"] = opts.Timeout + } + if opts.WaitUntil != "" { + params["waitUntil"] = opts.WaitUntil + } + + resp, err := b.sendCommand(ctx, "page.goto", params) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + result := &ActionResponse{ + Success: true, + PageURL: getString(resp, "url"), + PageTitle: getString(resp, "title"), + } + + if opts.Screenshot { + screenshot, _ := b.Screenshot(ctx, sessionID, ScreenshotOptions{FullPage: false}) + if screenshot != nil { + result.Screenshot = screenshot.Data + } + } + + return result, nil +} + +func (b *PlaywrightBrowser) Click(ctx context.Context, sessionID, selector string, opts ClickOptions) (*ActionResponse, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + } + if opts.Button != "" { + params["button"] = opts.Button + } + if opts.ClickCount > 0 { + params["clickCount"] = opts.ClickCount + } + if opts.Timeout > 0 { + params["timeout"] = opts.Timeout + } + if opts.Force { + params["force"] = true + } + + _, err := b.sendCommand(ctx, "page.click", params) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + result := &ActionResponse{Success: true} + + if opts.WaitAfter > 0 { + time.Sleep(time.Duration(opts.WaitAfter) * time.Millisecond) + } + + if opts.Screenshot { + screenshot, _ := b.Screenshot(ctx, sessionID, ScreenshotOptions{FullPage: false}) + if screenshot != nil { + result.Screenshot = screenshot.Data + } + } + + return result, nil +} + +func (b *PlaywrightBrowser) Type(ctx context.Context, sessionID, selector, text string, opts TypeOptions) (*ActionResponse, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + "text": text, + } + if opts.Delay > 0 { + params["delay"] = opts.Delay + } + if opts.Timeout > 0 { + params["timeout"] = opts.Timeout + } + if opts.Clear { + b.sendCommand(ctx, "page.fill", map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + "value": "", + }) + } + + _, err := b.sendCommand(ctx, "page.type", params) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + return &ActionResponse{Success: true}, nil +} + +func (b *PlaywrightBrowser) Fill(ctx context.Context, sessionID, selector, value string) (*ActionResponse, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + "value": value, + } + + _, err := b.sendCommand(ctx, "page.fill", params) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + return &ActionResponse{Success: true}, nil +} + +func (b *PlaywrightBrowser) Screenshot(ctx context.Context, sessionID string, opts ScreenshotOptions) (*ScreenshotResult, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "fullPage": opts.FullPage, + } + if opts.Selector != "" { + params["selector"] = opts.Selector + } + if opts.Quality > 0 { + params["quality"] = opts.Quality + } + params["type"] = "png" + if opts.Format != "" { + params["type"] = opts.Format + } + + resp, err := b.sendCommand(ctx, "page.screenshot", params) + if err != nil { + return nil, err + } + + data, _ := resp["data"].(string) + + filename := fmt.Sprintf("%s/%s-%d.png", b.config.ScreenshotsDir, sessionID, time.Now().UnixNano()) + if decoded, err := base64.StdEncoding.DecodeString(data); err == nil { + os.WriteFile(filename, decoded, 0644) + } + + b.mu.Lock() + if session, ok := b.sessions[sessionID]; ok { + session.Screenshots = append(session.Screenshots, filename) + } + b.mu.Unlock() + + return &ScreenshotResult{ + Data: data, + Path: filename, + MimeType: "image/png", + }, nil +} + +func (b *PlaywrightBrowser) ExtractText(ctx context.Context, sessionID, selector string) (string, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + } + + resp, err := b.sendCommand(ctx, "page.textContent", params) + if err != nil { + return "", err + } + + return getString(resp, "text"), nil +} + +func (b *PlaywrightBrowser) ExtractHTML(ctx context.Context, sessionID, selector string) (string, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + } + + resp, err := b.sendCommand(ctx, "page.innerHTML", params) + if err != nil { + return "", err + } + + return getString(resp, "html"), nil +} + +func (b *PlaywrightBrowser) WaitForSelector(ctx context.Context, sessionID, selector string, opts WaitOptions) error { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + } + if opts.Timeout > 0 { + params["timeout"] = opts.Timeout + } + if opts.State != "" { + params["state"] = opts.State + } + + _, err := b.sendCommand(ctx, "page.waitForSelector", params) + return err +} + +func (b *PlaywrightBrowser) WaitForNavigation(ctx context.Context, sessionID string, opts WaitOptions) error { + params := map[string]interface{}{ + "sessionId": sessionID, + } + if opts.Timeout > 0 { + params["timeout"] = opts.Timeout + } + if opts.WaitUntil != "" { + params["waitUntil"] = opts.WaitUntil + } + + _, err := b.sendCommand(ctx, "page.waitForNavigation", params) + return err +} + +func (b *PlaywrightBrowser) Scroll(ctx context.Context, sessionID string, opts ScrollOptions) (*ActionResponse, error) { + script := fmt.Sprintf("window.scrollBy(%d, %d)", opts.X, opts.Y) + if opts.Selector != "" { + script = fmt.Sprintf(`document.querySelector('%s').scrollBy(%d, %d)`, opts.Selector, opts.X, opts.Y) + } + if opts.ToBottom { + script = "window.scrollTo(0, document.body.scrollHeight)" + } + if opts.ToTop { + script = "window.scrollTo(0, 0)" + } + + _, err := b.Evaluate(ctx, sessionID, script) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + if opts.WaitAfter > 0 { + time.Sleep(time.Duration(opts.WaitAfter) * time.Millisecond) + } + + return &ActionResponse{Success: true}, nil +} + +func (b *PlaywrightBrowser) Evaluate(ctx context.Context, sessionID, script string) (interface{}, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "expression": script, + } + + resp, err := b.sendCommand(ctx, "page.evaluate", params) + if err != nil { + return nil, err + } + + return resp["result"], nil +} + +func (b *PlaywrightBrowser) Select(ctx context.Context, sessionID, selector string, values []string) (*ActionResponse, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + "selector": selector, + "values": values, + } + + _, err := b.sendCommand(ctx, "page.selectOption", params) + if err != nil { + return &ActionResponse{Success: false, Error: err.Error()}, err + } + + return &ActionResponse{Success: true}, nil +} + +func (b *PlaywrightBrowser) GetPageInfo(ctx context.Context, sessionID string) (*PageInfo, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + } + + resp, err := b.sendCommand(ctx, "page.info", params) + if err != nil { + return nil, err + } + + return &PageInfo{ + URL: getString(resp, "url"), + Title: getString(resp, "title"), + Content: getString(resp, "content"), + }, nil +} + +func (b *PlaywrightBrowser) PDF(ctx context.Context, sessionID string, opts PDFOptions) ([]byte, error) { + params := map[string]interface{}{ + "sessionId": sessionID, + } + if opts.Format != "" { + params["format"] = opts.Format + } + if opts.Landscape { + params["landscape"] = true + } + if opts.PrintBackground { + params["printBackground"] = true + } + + resp, err := b.sendCommand(ctx, "page.pdf", params) + if err != nil { + return nil, err + } + + data, _ := resp["data"].(string) + return base64.StdEncoding.DecodeString(data) +} + +func (b *PlaywrightBrowser) sendCommand(ctx context.Context, method string, params map[string]interface{}) (map[string]interface{}, error) { + body := map[string]interface{}{ + "method": method, + "params": params, + } + + jsonBody, err := json.Marshal(body) + if err != nil { + return nil, err + } + + req, err := http.NewRequestWithContext(ctx, "POST", b.serverURL+"/api/browser", strings.NewReader(string(jsonBody))) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := b.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var result map[string]interface{} + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, err + } + + if errMsg, ok := result["error"].(string); ok && errMsg != "" { + return result, errors.New(errMsg) + } + + return result, nil +} + +func getString(m map[string]interface{}, key string) string { + if v, ok := m[key].(string); ok { + return v + } + return "" +} + +type SessionOptions struct { + Headless bool + Viewport *Viewport + UserAgent string + ProxyURL string + RecordVideo bool + BlockAds bool +} + +type Viewport struct { + Width int `json:"width"` + Height int `json:"height"` +} + +type NavigateOptions struct { + Timeout int + WaitUntil string + Screenshot bool +} + +type ClickOptions struct { + Button string + ClickCount int + Timeout int + Force bool + WaitAfter int + Screenshot bool +} + +type TypeOptions struct { + Delay int + Timeout int + Clear bool +} + +type ScreenshotOptions struct { + FullPage bool + Selector string + Format string + Quality int +} + +type ScreenshotResult struct { + Data string + Path string + MimeType string +} + +type WaitOptions struct { + Timeout int + State string + WaitUntil string +} + +type ScrollOptions struct { + X int + Y int + Selector string + ToBottom bool + ToTop bool + WaitAfter int +} + +type PageInfo struct { + URL string + Title string + Content string +} + +type PDFOptions struct { + Format string + Landscape bool + PrintBackground bool +} diff --git a/backend/internal/computer/browser/server.go b/backend/internal/computer/browser/server.go new file mode 100644 index 0000000..300c8e8 --- /dev/null +++ b/backend/internal/computer/browser/server.go @@ -0,0 +1,555 @@ +package browser + +import ( + "context" + "encoding/json" + "fmt" + "log" + "net/http" + "sync" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" +) + +type BrowserServer struct { + browser *PlaywrightBrowser + sessions map[string]*ManagedSession + mu sync.RWMutex + config ServerConfig +} + +type ServerConfig struct { + Port int + MaxSessions int + SessionTimeout time.Duration + CleanupInterval time.Duration +} + +type ManagedSession struct { + *BrowserSession + LastActive time.Time + Actions []ActionLog +} + +type ActionLog struct { + Action string `json:"action"` + Params string `json:"params"` + Success bool `json:"success"` + Error string `json:"error,omitempty"` + Duration int64 `json:"durationMs"` + Timestamp time.Time `json:"timestamp"` +} + +type BrowserRequest struct { + Method string `json:"method"` + Params map[string]interface{} `json:"params"` +} + +func NewBrowserServer(cfg ServerConfig) *BrowserServer { + if cfg.Port == 0 { + cfg.Port = 3050 + } + if cfg.MaxSessions == 0 { + cfg.MaxSessions = 20 + } + if cfg.SessionTimeout == 0 { + cfg.SessionTimeout = 30 * time.Minute + } + if cfg.CleanupInterval == 0 { + cfg.CleanupInterval = 5 * time.Minute + } + + return &BrowserServer{ + browser: NewPlaywrightBrowser(Config{ + DefaultTimeout: 30 * time.Second, + Headless: true, + }), + sessions: make(map[string]*ManagedSession), + config: cfg, + } +} + +func (s *BrowserServer) Start(ctx context.Context) error { + go s.cleanupLoop(ctx) + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 2 * time.Minute, + WriteTimeout: 2 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok", "sessions": len(s.sessions)}) + }) + + app.Post("/api/browser", s.handleBrowserCommand) + + app.Post("/api/session/new", s.handleNewSession) + app.Delete("/api/session/:id", s.handleCloseSession) + app.Get("/api/session/:id", s.handleGetSession) + app.Get("/api/sessions", s.handleListSessions) + + app.Post("/api/action", s.handleAction) + + log.Printf("[BrowserServer] Starting on port %d", s.config.Port) + return app.Listen(fmt.Sprintf(":%d", s.config.Port)) +} + +func (s *BrowserServer) handleBrowserCommand(c *fiber.Ctx) error { + var req BrowserRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + sessionID, _ := req.Params["sessionId"].(string) + + s.mu.Lock() + if session, ok := s.sessions[sessionID]; ok { + session.LastActive = time.Now() + } + s.mu.Unlock() + + start := time.Now() + result, err := s.executeMethod(ctx, req.Method, req.Params) + + s.mu.Lock() + if session, ok := s.sessions[sessionID]; ok { + paramsJSON, _ := json.Marshal(req.Params) + session.Actions = append(session.Actions, ActionLog{ + Action: req.Method, + Params: string(paramsJSON), + Success: err == nil, + Error: errToString(err), + Duration: time.Since(start).Milliseconds(), + Timestamp: time.Now(), + }) + } + s.mu.Unlock() + + if err != nil { + return c.JSON(fiber.Map{ + "success": false, + "error": err.Error(), + }) + } + + return c.JSON(result) +} + +func (s *BrowserServer) executeMethod(ctx context.Context, method string, params map[string]interface{}) (map[string]interface{}, error) { + sessionID, _ := params["sessionId"].(string) + + switch method { + case "browser.newContext": + opts := SessionOptions{ + Headless: getBool(params, "headless"), + } + if viewport, ok := params["viewport"].(map[string]interface{}); ok { + opts.Viewport = &Viewport{ + Width: getInt(viewport, "width"), + Height: getInt(viewport, "height"), + } + } + if ua, ok := params["userAgent"].(string); ok { + opts.UserAgent = ua + } + if proxy, ok := params["proxy"].(string); ok { + opts.ProxyURL = proxy + } + if rv, ok := params["recordVideo"].(map[string]interface{}); ok { + _ = rv + opts.RecordVideo = true + } + + session, err := s.browser.NewSession(ctx, opts) + if err != nil { + return nil, err + } + + s.mu.Lock() + s.sessions[session.ID] = &ManagedSession{ + BrowserSession: session, + LastActive: time.Now(), + Actions: make([]ActionLog, 0), + } + s.mu.Unlock() + + return map[string]interface{}{ + "sessionId": session.ID, + "contextId": session.ContextID, + "pageId": session.PageID, + }, nil + + case "browser.closeContext": + err := s.browser.CloseSession(ctx, sessionID) + s.mu.Lock() + delete(s.sessions, sessionID) + s.mu.Unlock() + return map[string]interface{}{"success": err == nil}, err + + case "page.goto": + url, _ := params["url"].(string) + opts := NavigateOptions{ + Timeout: getInt(params, "timeout"), + WaitUntil: getString(params, "waitUntil"), + } + result, err := s.browser.Navigate(ctx, sessionID, url, opts) + if err != nil { + return nil, err + } + return map[string]interface{}{ + "success": result.Success, + "url": result.PageURL, + "title": result.PageTitle, + }, nil + + case "page.click": + selector, _ := params["selector"].(string) + opts := ClickOptions{ + Button: getString(params, "button"), + ClickCount: getInt(params, "clickCount"), + Timeout: getInt(params, "timeout"), + Force: getBool(params, "force"), + } + result, err := s.browser.Click(ctx, sessionID, selector, opts) + if err != nil { + return nil, err + } + return map[string]interface{}{ + "success": result.Success, + "screenshot": result.Screenshot, + }, nil + + case "page.type": + selector, _ := params["selector"].(string) + text, _ := params["text"].(string) + opts := TypeOptions{ + Delay: getInt(params, "delay"), + Timeout: getInt(params, "timeout"), + } + _, err := s.browser.Type(ctx, sessionID, selector, text, opts) + return map[string]interface{}{"success": err == nil}, err + + case "page.fill": + selector, _ := params["selector"].(string) + value, _ := params["value"].(string) + _, err := s.browser.Fill(ctx, sessionID, selector, value) + return map[string]interface{}{"success": err == nil}, err + + case "page.screenshot": + opts := ScreenshotOptions{ + FullPage: getBool(params, "fullPage"), + Selector: getString(params, "selector"), + Format: getString(params, "type"), + Quality: getInt(params, "quality"), + } + result, err := s.browser.Screenshot(ctx, sessionID, opts) + if err != nil { + return nil, err + } + return map[string]interface{}{ + "data": result.Data, + "path": result.Path, + }, nil + + case "page.textContent": + selector, _ := params["selector"].(string) + text, err := s.browser.ExtractText(ctx, sessionID, selector) + return map[string]interface{}{"text": text}, err + + case "page.innerHTML": + selector, _ := params["selector"].(string) + html, err := s.browser.ExtractHTML(ctx, sessionID, selector) + return map[string]interface{}{"html": html}, err + + case "page.waitForSelector": + selector, _ := params["selector"].(string) + opts := WaitOptions{ + Timeout: getInt(params, "timeout"), + State: getString(params, "state"), + } + err := s.browser.WaitForSelector(ctx, sessionID, selector, opts) + return map[string]interface{}{"success": err == nil}, err + + case "page.waitForNavigation": + opts := WaitOptions{ + Timeout: getInt(params, "timeout"), + WaitUntil: getString(params, "waitUntil"), + } + err := s.browser.WaitForNavigation(ctx, sessionID, opts) + return map[string]interface{}{"success": err == nil}, err + + case "page.evaluate": + expression, _ := params["expression"].(string) + result, err := s.browser.Evaluate(ctx, sessionID, expression) + return map[string]interface{}{"result": result}, err + + case "page.selectOption": + selector, _ := params["selector"].(string) + values := getStringArray(params, "values") + _, err := s.browser.Select(ctx, sessionID, selector, values) + return map[string]interface{}{"success": err == nil}, err + + case "page.info": + info, err := s.browser.GetPageInfo(ctx, sessionID) + if err != nil { + return nil, err + } + return map[string]interface{}{ + "url": info.URL, + "title": info.Title, + "content": info.Content, + }, nil + + case "page.pdf": + opts := PDFOptions{ + Format: getString(params, "format"), + Landscape: getBool(params, "landscape"), + PrintBackground: getBool(params, "printBackground"), + } + data, err := s.browser.PDF(ctx, sessionID, opts) + if err != nil { + return nil, err + } + return map[string]interface{}{ + "data": data, + }, nil + + default: + return nil, fmt.Errorf("unknown method: %s", method) + } +} + +func (s *BrowserServer) handleNewSession(c *fiber.Ctx) error { + var req struct { + Headless bool `json:"headless"` + Viewport *Viewport `json:"viewport,omitempty"` + UserAgent string `json:"userAgent,omitempty"` + ProxyURL string `json:"proxyUrl,omitempty"` + } + + if err := c.BodyParser(&req); err != nil { + req.Headless = true + } + + s.mu.RLock() + if len(s.sessions) >= s.config.MaxSessions { + s.mu.RUnlock() + return c.Status(http.StatusTooManyRequests).JSON(fiber.Map{ + "error": "Maximum sessions limit reached", + }) + } + s.mu.RUnlock() + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + session, err := s.browser.NewSession(ctx, SessionOptions{ + Headless: req.Headless, + Viewport: req.Viewport, + UserAgent: req.UserAgent, + ProxyURL: req.ProxyURL, + }) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + s.mu.Lock() + s.sessions[session.ID] = &ManagedSession{ + BrowserSession: session, + LastActive: time.Now(), + Actions: make([]ActionLog, 0), + } + s.mu.Unlock() + + return c.JSON(fiber.Map{ + "sessionId": session.ID, + "contextId": session.ContextID, + "pageId": session.PageID, + }) +} + +func (s *BrowserServer) handleCloseSession(c *fiber.Ctx) error { + sessionID := c.Params("id") + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + err := s.browser.CloseSession(ctx, sessionID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": err.Error()}) + } + + s.mu.Lock() + delete(s.sessions, sessionID) + s.mu.Unlock() + + return c.JSON(fiber.Map{"success": true}) +} + +func (s *BrowserServer) handleGetSession(c *fiber.Ctx) error { + sessionID := c.Params("id") + + s.mu.RLock() + session, ok := s.sessions[sessionID] + s.mu.RUnlock() + + if !ok { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + return c.JSON(fiber.Map{ + "sessionId": session.ID, + "createdAt": session.CreatedAt, + "lastActive": session.LastActive, + "screenshots": session.Screenshots, + "actions": len(session.Actions), + }) +} + +func (s *BrowserServer) handleListSessions(c *fiber.Ctx) error { + s.mu.RLock() + defer s.mu.RUnlock() + + sessions := make([]map[string]interface{}, 0, len(s.sessions)) + for _, session := range s.sessions { + sessions = append(sessions, map[string]interface{}{ + "sessionId": session.ID, + "createdAt": session.CreatedAt, + "lastActive": session.LastActive, + "actions": len(session.Actions), + }) + } + + return c.JSON(fiber.Map{"sessions": sessions, "count": len(sessions)}) +} + +func (s *BrowserServer) handleAction(c *fiber.Ctx) error { + var req struct { + SessionID string `json:"sessionId"` + Action string `json:"action"` + Selector string `json:"selector,omitempty"` + URL string `json:"url,omitempty"` + Value string `json:"value,omitempty"` + Screenshot bool `json:"screenshot"` + } + + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + s.mu.Lock() + if session, ok := s.sessions[req.SessionID]; ok { + session.LastActive = time.Now() + } + s.mu.Unlock() + + var result *ActionResponse + var err error + + switch req.Action { + case "navigate": + result, err = s.browser.Navigate(ctx, req.SessionID, req.URL, NavigateOptions{Screenshot: req.Screenshot}) + case "click": + result, err = s.browser.Click(ctx, req.SessionID, req.Selector, ClickOptions{Screenshot: req.Screenshot}) + case "type": + result, err = s.browser.Type(ctx, req.SessionID, req.Selector, req.Value, TypeOptions{}) + case "fill": + result, err = s.browser.Fill(ctx, req.SessionID, req.Selector, req.Value) + case "screenshot": + var screenshot *ScreenshotResult + screenshot, err = s.browser.Screenshot(ctx, req.SessionID, ScreenshotOptions{}) + if err == nil { + result = &ActionResponse{Success: true, Screenshot: screenshot.Data} + } + case "extract": + var text string + text, err = s.browser.ExtractText(ctx, req.SessionID, req.Selector) + result = &ActionResponse{Success: err == nil, Data: text} + default: + return c.Status(400).JSON(fiber.Map{"error": "Unknown action: " + req.Action}) + } + + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error(), "success": false}) + } + + return c.JSON(result) +} + +func (s *BrowserServer) cleanupLoop(ctx context.Context) { + ticker := time.NewTicker(s.config.CleanupInterval) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + case <-ticker.C: + s.cleanupExpiredSessions() + } + } +} + +func (s *BrowserServer) cleanupExpiredSessions() { + s.mu.Lock() + defer s.mu.Unlock() + + now := time.Now() + for sessionID, session := range s.sessions { + if now.Sub(session.LastActive) > s.config.SessionTimeout { + log.Printf("[BrowserServer] Cleaning up expired session: %s", sessionID) + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + s.browser.CloseSession(ctx, sessionID) + cancel() + delete(s.sessions, sessionID) + } + } +} + +func errToString(err error) string { + if err == nil { + return "" + } + return err.Error() +} + +func getBool(m map[string]interface{}, key string) bool { + if v, ok := m[key].(bool); ok { + return v + } + return false +} + +func getInt(m map[string]interface{}, key string) int { + if v, ok := m[key].(float64); ok { + return int(v) + } + if v, ok := m[key].(int); ok { + return v + } + return 0 +} + +func getStringArray(m map[string]interface{}, key string) []string { + if v, ok := m[key].([]interface{}); ok { + result := make([]string, len(v)) + for i, item := range v { + result[i], _ = item.(string) + } + return result + } + return nil +} diff --git a/backend/internal/computer/computer.go b/backend/internal/computer/computer.go new file mode 100644 index 0000000..925a165 --- /dev/null +++ b/backend/internal/computer/computer.go @@ -0,0 +1,738 @@ +package computer + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sync" + "time" + + "github.com/gooseek/backend/internal/computer/connectors" + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type ComputerConfig struct { + MaxParallelTasks int + MaxSubTasks int + TaskTimeout time.Duration + SubTaskTimeout time.Duration + TotalBudget float64 + EnableSandbox bool + EnableScheduling bool + EnableBrowser bool + SandboxImage string + ArtifactStorageURL string + BrowserServerURL string + CheckpointStorePath string + MaxConcurrentTasks int + HeartbeatInterval time.Duration + CheckpointInterval time.Duration +} + +func DefaultConfig() ComputerConfig { + return ComputerConfig{ + MaxParallelTasks: 10, + MaxSubTasks: 100, + TaskTimeout: 365 * 24 * time.Hour, + SubTaskTimeout: 2 * time.Hour, + TotalBudget: 100.0, + EnableSandbox: true, + EnableScheduling: true, + EnableBrowser: true, + SandboxImage: "gooseek/sandbox:latest", + BrowserServerURL: "http://browser-svc:3050", + CheckpointStorePath: "/data/checkpoints", + MaxConcurrentTasks: 50, + HeartbeatInterval: 30 * time.Second, + CheckpointInterval: 15 * time.Minute, + } +} + +func GetDurationConfig(mode DurationMode) (maxDuration, checkpointFreq, heartbeatFreq time.Duration, maxIter int) { + cfg, ok := DurationModeConfigs[mode] + if !ok { + cfg = DurationModeConfigs[DurationMedium] + } + return cfg.MaxDuration, cfg.CheckpointFreq, cfg.HeartbeatFreq, cfg.MaxIterations +} + +type Dependencies struct { + Registry *llm.ModelRegistry + TaskRepo TaskRepository + MemoryRepo MemoryRepository + ArtifactRepo ArtifactRepository +} + +type TaskRepository interface { + Create(ctx context.Context, task *ComputerTask) error + Update(ctx context.Context, task *ComputerTask) error + GetByID(ctx context.Context, id string) (*ComputerTask, error) + GetByUserID(ctx context.Context, userID string, limit, offset int) ([]ComputerTask, error) + GetScheduled(ctx context.Context) ([]ComputerTask, error) + Delete(ctx context.Context, id string) error +} + +type MemoryRepository interface { + Store(ctx context.Context, entry *MemoryEntry) error + GetByUser(ctx context.Context, userID string, limit int) ([]MemoryEntry, error) + GetByTask(ctx context.Context, taskID string) ([]MemoryEntry, error) + Search(ctx context.Context, userID, query string, limit int) ([]MemoryEntry, error) + Delete(ctx context.Context, id string) error +} + +type ArtifactRepository interface { + Create(ctx context.Context, artifact *Artifact) error + GetByID(ctx context.Context, id string) (*Artifact, error) + GetByTaskID(ctx context.Context, taskID string) ([]Artifact, error) + Delete(ctx context.Context, id string) error +} + +type Computer struct { + cfg ComputerConfig + planner *Planner + router *Router + executor *Executor + sandbox *SandboxManager + memory *MemoryStore + scheduler *Scheduler + connectors *connectors.ConnectorHub + registry *llm.ModelRegistry + taskRepo TaskRepository + eventBus *EventBus + mu sync.RWMutex + tasks map[string]*ComputerTask +} + +func NewComputer(cfg ComputerConfig, deps Dependencies) *Computer { + eventBus := NewEventBus() + + c := &Computer{ + cfg: cfg, + registry: deps.Registry, + taskRepo: deps.TaskRepo, + eventBus: eventBus, + tasks: make(map[string]*ComputerTask), + } + + c.planner = NewPlanner(deps.Registry) + c.router = NewRouter(deps.Registry) + c.executor = NewExecutor(c.router, cfg.MaxParallelTasks) + c.memory = NewMemoryStore(deps.MemoryRepo) + c.connectors = connectors.NewConnectorHub() + + if cfg.EnableSandbox { + c.sandbox = NewSandboxManager(SandboxConfig{ + Image: cfg.SandboxImage, + Timeout: cfg.SubTaskTimeout, + }) + c.executor.SetSandbox(c.sandbox) + } + + if cfg.EnableScheduling { + c.scheduler = NewScheduler(deps.TaskRepo, c) + } + + return c +} + +func (c *Computer) Execute(ctx context.Context, userID, query string, opts ExecuteOptions) (*ComputerTask, error) { + if opts.ResumeFromID != "" { + return c.resumeFromCheckpoint(ctx, opts.ResumeFromID, opts) + } + + durationMode := opts.DurationMode + if durationMode == "" { + durationMode = DurationMedium + } + + maxDuration, _, _, maxIter := GetDurationConfig(durationMode) + + task := &ComputerTask{ + ID: uuid.New().String(), + UserID: userID, + Query: query, + Status: StatusPending, + Memory: make(map[string]interface{}), + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + DurationMode: durationMode, + MaxDuration: maxDuration, + MaxIterations: maxIter, + Priority: opts.Priority, + } + + if opts.Priority == "" { + task.Priority = PriorityNormal + } + + if opts.ResourceLimits != nil { + task.ResourceLimits = opts.ResourceLimits + } + + if opts.Schedule != nil { + task.Schedule = opts.Schedule + task.Status = StatusScheduled + } + + if opts.Context != nil { + task.Memory = opts.Context + } + + estimatedEnd := time.Now().Add(maxDuration) + task.EstimatedEnd = &estimatedEnd + + if err := c.taskRepo.Create(ctx, task); err != nil { + return nil, fmt.Errorf("failed to create task: %w", err) + } + + c.mu.Lock() + c.tasks[task.ID] = task + c.mu.Unlock() + + c.emitEvent(TaskEvent{ + Type: EventTaskCreated, + TaskID: task.ID, + Status: task.Status, + Message: fmt.Sprintf("Task created (mode: %s, max duration: %v)", durationMode, maxDuration), + Timestamp: time.Now(), + Data: map[string]interface{}{ + "durationMode": durationMode, + "maxDuration": maxDuration.String(), + "maxIterations": maxIter, + }, + }) + + if opts.Async { + go c.executeTaskWithCheckpoints(context.Background(), task, opts) + return task, nil + } + + return c.executeTaskWithCheckpoints(ctx, task, opts) +} + +func (c *Computer) resumeFromCheckpoint(ctx context.Context, checkpointID string, opts ExecuteOptions) (*ComputerTask, error) { + task, err := c.taskRepo.GetByID(ctx, checkpointID) + if err != nil { + return nil, fmt.Errorf("task not found: %w", err) + } + + if task.Checkpoint == nil { + return nil, errors.New("no checkpoint found for this task") + } + + task.Status = StatusExecuting + now := time.Now() + task.ResumedAt = &now + task.UpdatedAt = now + + c.emitEvent(TaskEvent{ + Type: EventResumed, + TaskID: task.ID, + Status: task.Status, + Message: fmt.Sprintf("Resumed from checkpoint (wave: %d, subtask: %d)", task.Checkpoint.WaveIndex, task.Checkpoint.SubTaskIndex), + Progress: task.Checkpoint.Progress, + Timestamp: time.Now(), + }) + + c.mu.Lock() + c.tasks[task.ID] = task + c.mu.Unlock() + + if opts.Async { + go c.executeTaskWithCheckpoints(context.Background(), task, opts) + return task, nil + } + + return c.executeTaskWithCheckpoints(ctx, task, opts) +} + +func (c *Computer) executeTask(ctx context.Context, task *ComputerTask, opts ExecuteOptions) (*ComputerTask, error) { + return c.executeTaskWithCheckpoints(ctx, task, opts) +} + +func (c *Computer) executeTaskWithCheckpoints(ctx context.Context, task *ComputerTask, opts ExecuteOptions) (*ComputerTask, error) { + maxDuration, checkpointFreq, heartbeatFreq, _ := GetDurationConfig(task.DurationMode) + + if opts.Timeout > 0 { + maxDuration = time.Duration(opts.Timeout) * time.Second + } + + ctx, cancel := context.WithTimeout(ctx, maxDuration) + defer cancel() + + budget := c.cfg.TotalBudget + if opts.MaxCost > 0 { + budget = opts.MaxCost + } + if task.ResourceLimits != nil && task.ResourceLimits.MaxTotalCost > 0 { + budget = task.ResourceLimits.MaxTotalCost + } + + startWave := 0 + if task.Checkpoint != nil { + startWave = task.Checkpoint.WaveIndex + for k, v := range task.Checkpoint.Memory { + task.Memory[k] = v + } + } + + if task.Plan == nil { + task.Status = StatusPlanning + task.UpdatedAt = time.Now() + c.updateTask(ctx, task) + + c.emitEvent(TaskEvent{ + Type: EventTaskStarted, + TaskID: task.ID, + Status: StatusPlanning, + Message: "Planning task execution", + Timestamp: time.Now(), + }) + + userMemory, _ := c.memory.GetUserContext(ctx, task.UserID) + memoryContext := make(map[string]interface{}) + for k, v := range userMemory { + memoryContext[k] = v + } + for k, v := range task.Memory { + memoryContext[k] = v + } + + plan, err := c.planner.Plan(ctx, task.Query, memoryContext) + if err != nil { + task.Status = StatusFailed + task.Error = fmt.Sprintf("Planning failed: %v", err) + task.UpdatedAt = time.Now() + c.updateTask(ctx, task) + c.emitEvent(TaskEvent{ + Type: EventTaskFailed, + TaskID: task.ID, + Status: StatusFailed, + Message: task.Error, + Timestamp: time.Now(), + }) + return task, err + } + + task.Plan = plan + task.SubTasks = plan.SubTasks + } + + task.Status = StatusLongRunning + task.UpdatedAt = time.Now() + c.updateTask(ctx, task) + + c.emitEvent(TaskEvent{ + Type: EventTaskProgress, + TaskID: task.ID, + Status: StatusLongRunning, + Progress: 10, + Message: fmt.Sprintf("Executing %d subtasks (long-running mode)", len(task.Plan.SubTasks)), + Data: map[string]interface{}{ + "plan": task.Plan, + "durationMode": task.DurationMode, + "checkpointFreq": checkpointFreq.String(), + }, + Timestamp: time.Now(), + }) + + heartbeatTicker := time.NewTicker(heartbeatFreq) + defer heartbeatTicker.Stop() + + checkpointTicker := time.NewTicker(checkpointFreq) + defer checkpointTicker.Stop() + + go func() { + for { + select { + case <-ctx.Done(): + return + case <-heartbeatTicker.C: + now := time.Now() + task.HeartbeatAt = &now + c.emitEvent(TaskEvent{ + Type: EventHeartbeat, + TaskID: task.ID, + Progress: task.Progress, + Message: fmt.Sprintf("Heartbeat: %d%% complete, cost: $%.4f", task.Progress, task.TotalCost), + Data: map[string]interface{}{ + "runtime": time.Since(task.CreatedAt).String(), + "cost": task.TotalCost, + }, + Timestamp: now, + }) + } + } + }() + + totalSubTasks := len(task.Plan.ExecutionOrder) + for waveIdx := startWave; waveIdx < totalSubTasks; waveIdx++ { + select { + case <-ctx.Done(): + c.saveCheckpoint(task, waveIdx, 0, "context_timeout") + return task, ctx.Err() + case <-checkpointTicker.C: + c.saveCheckpoint(task, waveIdx, 0, "periodic") + default: + } + + if budget > 0 && task.TotalCost >= budget { + c.saveCheckpoint(task, waveIdx, 0, "budget_exceeded") + task.Status = StatusPaused + task.Message = fmt.Sprintf("Paused: budget exceeded ($%.2f / $%.2f)", task.TotalCost, budget) + c.updateTask(ctx, task) + return task, nil + } + + wave := task.Plan.ExecutionOrder[waveIdx] + waveTasks := make([]SubTask, 0) + for _, subTaskID := range wave { + for i := range task.SubTasks { + if task.SubTasks[i].ID == subTaskID { + waveTasks = append(waveTasks, task.SubTasks[i]) + break + } + } + } + + results, err := c.executor.ExecuteGroup(ctx, waveTasks, budget-task.TotalCost) + if err != nil { + c.saveCheckpoint(task, waveIdx, 0, "execution_error") + task.Status = StatusFailed + task.Error = fmt.Sprintf("Execution failed at wave %d: %v", waveIdx, err) + task.UpdatedAt = time.Now() + c.updateTask(ctx, task) + return task, err + } + + for _, result := range results { + for i := range task.SubTasks { + if task.SubTasks[i].ID == result.SubTaskID { + task.SubTasks[i].Output = result.Output + task.SubTasks[i].Cost = result.Cost + task.SubTasks[i].Status = StatusCompleted + now := time.Now() + task.SubTasks[i].CompletedAt = &now + if result.Error != nil { + task.SubTasks[i].Status = StatusFailed + task.SubTasks[i].Error = result.Error.Error() + } + break + } + } + + task.TotalCost += result.Cost + task.TotalRuntime = time.Since(task.CreatedAt) + + for _, artifact := range result.Artifacts { + task.Artifacts = append(task.Artifacts, artifact) + c.emitEvent(TaskEvent{ + Type: EventArtifact, + TaskID: task.ID, + SubTaskID: result.SubTaskID, + Data: map[string]interface{}{ + "artifact": artifact, + }, + Timestamp: time.Now(), + }) + } + } + + progress := 10 + int(float64(waveIdx+1)/float64(totalSubTasks)*80) + task.Progress = progress + task.Iterations = waveIdx + 1 + task.UpdatedAt = time.Now() + c.updateTask(ctx, task) + + c.emitEvent(TaskEvent{ + Type: EventIteration, + TaskID: task.ID, + Progress: progress, + Message: fmt.Sprintf("Completed wave %d/%d (runtime: %v)", waveIdx+1, totalSubTasks, time.Since(task.CreatedAt).Round(time.Second)), + Data: map[string]interface{}{ + "wave": waveIdx + 1, + "total": totalSubTasks, + "cost": task.TotalCost, + "runtime": time.Since(task.CreatedAt).String(), + "artifacts": len(task.Artifacts), + }, + Timestamp: time.Now(), + }) + } + + task.Status = StatusCompleted + task.Progress = 100 + now := time.Now() + task.CompletedAt = &now + task.UpdatedAt = now + task.TotalRuntime = time.Since(task.CreatedAt) + c.updateTask(ctx, task) + + c.emitEvent(TaskEvent{ + Type: EventTaskCompleted, + TaskID: task.ID, + Status: StatusCompleted, + Progress: 100, + Message: fmt.Sprintf("Task completed (runtime: %v, cost: $%.4f)", task.TotalRuntime.Round(time.Second), task.TotalCost), + Data: map[string]interface{}{ + "artifacts": task.Artifacts, + "totalCost": task.TotalCost, + "totalRuntime": task.TotalRuntime.String(), + "iterations": task.Iterations, + }, + Timestamp: time.Now(), + }) + + c.storeTaskResults(ctx, task) + + return task, nil +} + +func (c *Computer) saveCheckpoint(task *ComputerTask, waveIdx, subTaskIdx int, reason string) { + checkpoint := Checkpoint{ + ID: uuid.New().String(), + TaskID: task.ID, + WaveIndex: waveIdx, + SubTaskIndex: subTaskIdx, + State: make(map[string]interface{}), + Progress: task.Progress, + Memory: task.Memory, + CreatedAt: time.Now(), + RuntimeSoFar: time.Since(task.CreatedAt), + CostSoFar: task.TotalCost, + Reason: reason, + } + + for _, artifact := range task.Artifacts { + checkpoint.Artifacts = append(checkpoint.Artifacts, artifact.ID) + } + + task.Checkpoint = &checkpoint + task.Checkpoints = append(task.Checkpoints, checkpoint) + task.UpdatedAt = time.Now() + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + c.taskRepo.Update(ctx, task) + + c.emitEvent(TaskEvent{ + Type: EventCheckpointSaved, + TaskID: task.ID, + Progress: task.Progress, + Message: fmt.Sprintf("Checkpoint saved: %s (wave %d)", reason, waveIdx), + Data: map[string]interface{}{ + "checkpointId": checkpoint.ID, + "waveIndex": waveIdx, + "subTaskIndex": subTaskIdx, + "reason": reason, + "runtime": checkpoint.RuntimeSoFar.String(), + "cost": checkpoint.CostSoFar, + }, + Timestamp: time.Now(), + }) +} + +func (c *Computer) Pause(ctx context.Context, taskID string) error { + c.mu.Lock() + task, ok := c.tasks[taskID] + if !ok { + c.mu.Unlock() + var err error + task, err = c.taskRepo.GetByID(ctx, taskID) + if err != nil { + return err + } + c.mu.Lock() + } + + if task.Status != StatusExecuting && task.Status != StatusLongRunning { + c.mu.Unlock() + return errors.New("task is not running") + } + + now := time.Now() + task.Status = StatusPaused + task.PausedAt = &now + task.UpdatedAt = now + c.mu.Unlock() + + c.saveCheckpoint(task, task.Iterations, 0, "user_paused") + + c.emitEvent(TaskEvent{ + Type: EventPaused, + TaskID: taskID, + Status: StatusPaused, + Progress: task.Progress, + Message: "Task paused by user", + Timestamp: now, + }) + + return c.taskRepo.Update(ctx, task) +} + +func (c *Computer) Resume(ctx context.Context, taskID string, userInput string) error { + c.mu.RLock() + task, ok := c.tasks[taskID] + c.mu.RUnlock() + + if !ok { + var err error + task, err = c.taskRepo.GetByID(ctx, taskID) + if err != nil { + return fmt.Errorf("task not found: %w", err) + } + } + + if task.Status != StatusWaiting { + return errors.New("task is not waiting for user input") + } + + task.Memory["user_input"] = userInput + task.Status = StatusExecuting + task.UpdatedAt = time.Now() + + go c.executeTask(context.Background(), task, ExecuteOptions{Async: true}) + + return nil +} + +func (c *Computer) Cancel(ctx context.Context, taskID string) error { + c.mu.Lock() + task, ok := c.tasks[taskID] + if ok { + task.Status = StatusCancelled + task.UpdatedAt = time.Now() + } + c.mu.Unlock() + + if !ok { + task, err := c.taskRepo.GetByID(ctx, taskID) + if err != nil { + return fmt.Errorf("task not found: %w", err) + } + task.Status = StatusCancelled + task.UpdatedAt = time.Now() + return c.taskRepo.Update(ctx, task) + } + + c.emitEvent(TaskEvent{ + Type: EventTaskFailed, + TaskID: taskID, + Status: StatusCancelled, + Message: "Task cancelled by user", + Timestamp: time.Now(), + }) + + return c.taskRepo.Update(ctx, task) +} + +func (c *Computer) GetStatus(ctx context.Context, taskID string) (*ComputerTask, error) { + c.mu.RLock() + task, ok := c.tasks[taskID] + c.mu.RUnlock() + + if ok { + return task, nil + } + + return c.taskRepo.GetByID(ctx, taskID) +} + +func (c *Computer) GetUserTasks(ctx context.Context, userID string, limit, offset int) ([]ComputerTask, error) { + return c.taskRepo.GetByUserID(ctx, userID, limit, offset) +} + +func (c *Computer) Stream(ctx context.Context, taskID string) (<-chan TaskEvent, error) { + return c.eventBus.Subscribe(taskID), nil +} + +func (c *Computer) updateTask(ctx context.Context, task *ComputerTask) { + c.mu.Lock() + c.tasks[task.ID] = task + c.mu.Unlock() + + _ = c.taskRepo.Update(ctx, task) +} + +func (c *Computer) emitEvent(event TaskEvent) { + c.eventBus.Publish(event.TaskID, event) +} + +func (c *Computer) storeTaskResults(ctx context.Context, task *ComputerTask) { + for _, st := range task.SubTasks { + if st.Output != nil { + outputJSON, _ := json.Marshal(st.Output) + entry := &MemoryEntry{ + ID: uuid.New().String(), + UserID: task.UserID, + TaskID: task.ID, + Key: fmt.Sprintf("subtask_%s_result", st.ID), + Value: string(outputJSON), + Type: MemoryTypeResult, + CreatedAt: time.Now(), + } + _ = c.memory.Store(ctx, task.UserID, entry) + } + } +} + +func (c *Computer) StartScheduler(ctx context.Context) { + if c.scheduler != nil { + c.scheduler.Start(ctx) + } +} + +func (c *Computer) StopScheduler() { + if c.scheduler != nil { + c.scheduler.Stop() + } +} + +type EventBus struct { + subscribers map[string][]chan TaskEvent + mu sync.RWMutex +} + +func NewEventBus() *EventBus { + return &EventBus{ + subscribers: make(map[string][]chan TaskEvent), + } +} + +func (eb *EventBus) Subscribe(taskID string) <-chan TaskEvent { + eb.mu.Lock() + defer eb.mu.Unlock() + + ch := make(chan TaskEvent, 100) + eb.subscribers[taskID] = append(eb.subscribers[taskID], ch) + return ch +} + +func (eb *EventBus) Unsubscribe(taskID string, ch <-chan TaskEvent) { + eb.mu.Lock() + defer eb.mu.Unlock() + + subs := eb.subscribers[taskID] + for i, sub := range subs { + if sub == ch { + eb.subscribers[taskID] = append(subs[:i], subs[i+1:]...) + close(sub) + break + } + } +} + +func (eb *EventBus) Publish(taskID string, event TaskEvent) { + eb.mu.RLock() + subs := eb.subscribers[taskID] + eb.mu.RUnlock() + + for _, ch := range subs { + select { + case ch <- event: + default: + } + } +} diff --git a/backend/internal/computer/connectors/connector.go b/backend/internal/computer/connectors/connector.go new file mode 100644 index 0000000..4fa2902 --- /dev/null +++ b/backend/internal/computer/connectors/connector.go @@ -0,0 +1,104 @@ +package connectors + +import ( + "context" + "errors" + "sync" +) + +type Connector interface { + ID() string + Name() string + Description() string + Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) + GetActions() []Action + Validate(params map[string]interface{}) error +} + +type Action struct { + Name string `json:"name"` + Description string `json:"description"` + Schema map[string]interface{} `json:"schema"` + Required []string `json:"required"` +} + +type ConnectorHub struct { + connectors map[string]Connector + mu sync.RWMutex +} + +func NewConnectorHub() *ConnectorHub { + return &ConnectorHub{ + connectors: make(map[string]Connector), + } +} + +func (h *ConnectorHub) Register(connector Connector) { + h.mu.Lock() + defer h.mu.Unlock() + h.connectors[connector.ID()] = connector +} + +func (h *ConnectorHub) Unregister(id string) { + h.mu.Lock() + defer h.mu.Unlock() + delete(h.connectors, id) +} + +func (h *ConnectorHub) Get(id string) (Connector, error) { + h.mu.RLock() + defer h.mu.RUnlock() + + connector, ok := h.connectors[id] + if !ok { + return nil, errors.New("connector not found: " + id) + } + return connector, nil +} + +func (h *ConnectorHub) List() []Connector { + h.mu.RLock() + defer h.mu.RUnlock() + + result := make([]Connector, 0, len(h.connectors)) + for _, c := range h.connectors { + result = append(result, c) + } + return result +} + +func (h *ConnectorHub) Execute(ctx context.Context, connectorID, action string, params map[string]interface{}) (interface{}, error) { + connector, err := h.Get(connectorID) + if err != nil { + return nil, err + } + + if err := connector.Validate(params); err != nil { + return nil, err + } + + return connector.Execute(ctx, action, params) +} + +type ConnectorInfo struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Actions []Action `json:"actions"` +} + +func (h *ConnectorHub) GetInfo() []ConnectorInfo { + h.mu.RLock() + defer h.mu.RUnlock() + + result := make([]ConnectorInfo, 0, len(h.connectors)) + for _, c := range h.connectors { + result = append(result, ConnectorInfo{ + ID: c.ID(), + Name: c.Name(), + Description: c.Description(), + Actions: c.GetActions(), + }) + } + return result +} diff --git a/backend/internal/computer/connectors/email.go b/backend/internal/computer/connectors/email.go new file mode 100644 index 0000000..7700fbb --- /dev/null +++ b/backend/internal/computer/connectors/email.go @@ -0,0 +1,215 @@ +package connectors + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "net/smtp" + "strings" +) + +type EmailConfig struct { + SMTPHost string + SMTPPort int + Username string + Password string + FromAddress string + FromName string + UseTLS bool + AllowHTML bool +} + +type EmailConnector struct { + cfg EmailConfig +} + +func NewEmailConnector(cfg EmailConfig) *EmailConnector { + return &EmailConnector{cfg: cfg} +} + +func (e *EmailConnector) ID() string { + return "email" +} + +func (e *EmailConnector) Name() string { + return "Email" +} + +func (e *EmailConnector) Description() string { + return "Send emails via SMTP" +} + +func (e *EmailConnector) GetActions() []Action { + return []Action{ + { + Name: "send", + Description: "Send an email", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "to": map[string]interface{}{"type": "string", "description": "Recipient email address"}, + "subject": map[string]interface{}{"type": "string", "description": "Email subject"}, + "body": map[string]interface{}{"type": "string", "description": "Email body"}, + "html": map[string]interface{}{"type": "boolean", "description": "Whether body is HTML"}, + "cc": map[string]interface{}{"type": "string", "description": "CC recipients (comma-separated)"}, + "bcc": map[string]interface{}{"type": "string", "description": "BCC recipients (comma-separated)"}, + }, + }, + Required: []string{"to", "subject", "body"}, + }, + } +} + +func (e *EmailConnector) Validate(params map[string]interface{}) error { + if _, ok := params["to"]; !ok { + return errors.New("'to' is required") + } + if _, ok := params["subject"]; !ok { + return errors.New("'subject' is required") + } + if _, ok := params["body"]; !ok { + return errors.New("'body' is required") + } + return nil +} + +func (e *EmailConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { + switch action { + case "send": + return e.send(ctx, params) + default: + return nil, errors.New("unknown action: " + action) + } +} + +func (e *EmailConnector) send(ctx context.Context, params map[string]interface{}) (interface{}, error) { + to := params["to"].(string) + subject := params["subject"].(string) + body := params["body"].(string) + + isHTML := false + if html, ok := params["html"].(bool); ok { + isHTML = html && e.cfg.AllowHTML + } + + var cc, bcc []string + if ccStr, ok := params["cc"].(string); ok && ccStr != "" { + cc = strings.Split(ccStr, ",") + for i := range cc { + cc[i] = strings.TrimSpace(cc[i]) + } + } + if bccStr, ok := params["bcc"].(string); ok && bccStr != "" { + bcc = strings.Split(bccStr, ",") + for i := range bcc { + bcc[i] = strings.TrimSpace(bcc[i]) + } + } + + from := e.cfg.FromAddress + if e.cfg.FromName != "" { + from = fmt.Sprintf("%s <%s>", e.cfg.FromName, e.cfg.FromAddress) + } + + var msg strings.Builder + msg.WriteString(fmt.Sprintf("From: %s\r\n", from)) + msg.WriteString(fmt.Sprintf("To: %s\r\n", to)) + if len(cc) > 0 { + msg.WriteString(fmt.Sprintf("Cc: %s\r\n", strings.Join(cc, ", "))) + } + msg.WriteString(fmt.Sprintf("Subject: %s\r\n", subject)) + msg.WriteString("MIME-Version: 1.0\r\n") + + if isHTML { + msg.WriteString("Content-Type: text/html; charset=\"UTF-8\"\r\n") + } else { + msg.WriteString("Content-Type: text/plain; charset=\"UTF-8\"\r\n") + } + + msg.WriteString("\r\n") + msg.WriteString(body) + + recipients := []string{to} + recipients = append(recipients, cc...) + recipients = append(recipients, bcc...) + + addr := fmt.Sprintf("%s:%d", e.cfg.SMTPHost, e.cfg.SMTPPort) + + var auth smtp.Auth + if e.cfg.Username != "" && e.cfg.Password != "" { + auth = smtp.PlainAuth("", e.cfg.Username, e.cfg.Password, e.cfg.SMTPHost) + } + + var err error + if e.cfg.UseTLS { + err = e.sendWithTLS(addr, auth, e.cfg.FromAddress, recipients, []byte(msg.String())) + } else { + err = smtp.SendMail(addr, auth, e.cfg.FromAddress, recipients, []byte(msg.String())) + } + + if err != nil { + return map[string]interface{}{ + "success": false, + "error": err.Error(), + }, err + } + + return map[string]interface{}{ + "success": true, + "to": to, + "subject": subject, + "recipients": len(recipients), + }, nil +} + +func (e *EmailConnector) sendWithTLS(addr string, auth smtp.Auth, from string, to []string, msg []byte) error { + tlsConfig := &tls.Config{ + ServerName: e.cfg.SMTPHost, + } + + conn, err := tls.Dial("tcp", addr, tlsConfig) + if err != nil { + return err + } + defer conn.Close() + + client, err := smtp.NewClient(conn, e.cfg.SMTPHost) + if err != nil { + return err + } + defer client.Close() + + if auth != nil { + if err := client.Auth(auth); err != nil { + return err + } + } + + if err := client.Mail(from); err != nil { + return err + } + + for _, recipient := range to { + if err := client.Rcpt(recipient); err != nil { + return err + } + } + + w, err := client.Data() + if err != nil { + return err + } + + _, err = w.Write(msg) + if err != nil { + return err + } + + err = w.Close() + if err != nil { + return err + } + + return client.Quit() +} diff --git a/backend/internal/computer/connectors/storage.go b/backend/internal/computer/connectors/storage.go new file mode 100644 index 0000000..1872f13 --- /dev/null +++ b/backend/internal/computer/connectors/storage.go @@ -0,0 +1,432 @@ +package connectors + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" +) + +type StorageConfig struct { + Endpoint string + AccessKeyID string + SecretAccessKey string + BucketName string + UseSSL bool + Region string + PublicURL string +} + +type StorageConnector struct { + cfg StorageConfig + client *minio.Client +} + +func NewStorageConnector(cfg StorageConfig) (*StorageConnector, error) { + client, err := minio.New(cfg.Endpoint, &minio.Options{ + Creds: credentials.NewStaticV4(cfg.AccessKeyID, cfg.SecretAccessKey, ""), + Secure: cfg.UseSSL, + Region: cfg.Region, + }) + if err != nil { + return nil, fmt.Errorf("failed to create storage client: %w", err) + } + + return &StorageConnector{ + cfg: cfg, + client: client, + }, nil +} + +func (s *StorageConnector) ID() string { + return "storage" +} + +func (s *StorageConnector) Name() string { + return "Storage" +} + +func (s *StorageConnector) Description() string { + return "Store and retrieve files from S3-compatible storage" +} + +func (s *StorageConnector) GetActions() []Action { + return []Action{ + { + Name: "upload", + Description: "Upload a file", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, + "content": map[string]interface{}{"type": "string", "description": "File content (base64 or text)"}, + "content_type": map[string]interface{}{"type": "string", "description": "MIME type"}, + "public": map[string]interface{}{"type": "boolean", "description": "Make file publicly accessible"}, + }, + }, + Required: []string{"path", "content"}, + }, + { + Name: "download", + Description: "Download a file", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, + }, + }, + Required: []string{"path"}, + }, + { + Name: "delete", + Description: "Delete a file", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, + }, + }, + Required: []string{"path"}, + }, + { + Name: "list", + Description: "List files in a directory", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "prefix": map[string]interface{}{"type": "string", "description": "Path prefix"}, + "limit": map[string]interface{}{"type": "integer", "description": "Max results"}, + }, + }, + }, + { + Name: "get_url", + Description: "Get a presigned URL for a file", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, + "expires": map[string]interface{}{"type": "integer", "description": "URL expiry in seconds"}, + }, + }, + Required: []string{"path"}, + }, + } +} + +func (s *StorageConnector) Validate(params map[string]interface{}) error { + return nil +} + +func (s *StorageConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { + switch action { + case "upload": + return s.upload(ctx, params) + case "download": + return s.download(ctx, params) + case "delete": + return s.deleteFile(ctx, params) + case "list": + return s.list(ctx, params) + case "get_url": + return s.getURL(ctx, params) + default: + return nil, errors.New("unknown action: " + action) + } +} + +func (s *StorageConnector) upload(ctx context.Context, params map[string]interface{}) (interface{}, error) { + path := params["path"].(string) + content := params["content"].(string) + + contentType := "application/octet-stream" + if ct, ok := params["content_type"].(string); ok { + contentType = ct + } + + if contentType == "" { + contentType = s.detectContentType(path) + } + + reader := bytes.NewReader([]byte(content)) + size := int64(len(content)) + + info, err := s.client.PutObject(ctx, s.cfg.BucketName, path, reader, size, minio.PutObjectOptions{ + ContentType: contentType, + }) + if err != nil { + return nil, fmt.Errorf("upload failed: %w", err) + } + + url := "" + if s.cfg.PublicURL != "" { + url = fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path) + } + + return map[string]interface{}{ + "success": true, + "path": path, + "size": info.Size, + "etag": info.ETag, + "url": url, + }, nil +} + +func (s *StorageConnector) UploadBytes(ctx context.Context, path string, content []byte, contentType string) (string, error) { + if contentType == "" { + contentType = s.detectContentType(path) + } + + reader := bytes.NewReader(content) + size := int64(len(content)) + + _, err := s.client.PutObject(ctx, s.cfg.BucketName, path, reader, size, minio.PutObjectOptions{ + ContentType: contentType, + }) + if err != nil { + return "", err + } + + if s.cfg.PublicURL != "" { + return fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path), nil + } + + return path, nil +} + +func (s *StorageConnector) download(ctx context.Context, params map[string]interface{}) (interface{}, error) { + path := params["path"].(string) + + obj, err := s.client.GetObject(ctx, s.cfg.BucketName, path, minio.GetObjectOptions{}) + if err != nil { + return nil, fmt.Errorf("download failed: %w", err) + } + defer obj.Close() + + content, err := io.ReadAll(obj) + if err != nil { + return nil, fmt.Errorf("read failed: %w", err) + } + + stat, _ := obj.Stat() + + return map[string]interface{}{ + "success": true, + "path": path, + "content": string(content), + "size": len(content), + "content_type": stat.ContentType, + "modified": stat.LastModified, + }, nil +} + +func (s *StorageConnector) DownloadBytes(ctx context.Context, path string) ([]byte, error) { + obj, err := s.client.GetObject(ctx, s.cfg.BucketName, path, minio.GetObjectOptions{}) + if err != nil { + return nil, err + } + defer obj.Close() + + return io.ReadAll(obj) +} + +func (s *StorageConnector) deleteFile(ctx context.Context, params map[string]interface{}) (interface{}, error) { + path := params["path"].(string) + + err := s.client.RemoveObject(ctx, s.cfg.BucketName, path, minio.RemoveObjectOptions{}) + if err != nil { + return nil, fmt.Errorf("delete failed: %w", err) + } + + return map[string]interface{}{ + "success": true, + "path": path, + }, nil +} + +func (s *StorageConnector) list(ctx context.Context, params map[string]interface{}) (interface{}, error) { + prefix := "" + if p, ok := params["prefix"].(string); ok { + prefix = p + } + + limit := 100 + if l, ok := params["limit"].(float64); ok { + limit = int(l) + } + + objects := s.client.ListObjects(ctx, s.cfg.BucketName, minio.ListObjectsOptions{ + Prefix: prefix, + Recursive: true, + }) + + var files []map[string]interface{} + count := 0 + + for obj := range objects { + if obj.Err != nil { + continue + } + + files = append(files, map[string]interface{}{ + "path": obj.Key, + "size": obj.Size, + "modified": obj.LastModified, + "etag": obj.ETag, + }) + + count++ + if count >= limit { + break + } + } + + return map[string]interface{}{ + "success": true, + "files": files, + "count": len(files), + }, nil +} + +func (s *StorageConnector) getURL(ctx context.Context, params map[string]interface{}) (interface{}, error) { + path := params["path"].(string) + + expires := 3600 + if e, ok := params["expires"].(float64); ok { + expires = int(e) + } + + url, err := s.client.PresignedGetObject(ctx, s.cfg.BucketName, path, time.Duration(expires)*time.Second, nil) + if err != nil { + return nil, fmt.Errorf("failed to generate URL: %w", err) + } + + return map[string]interface{}{ + "success": true, + "url": url.String(), + "expires": expires, + }, nil +} + +func (s *StorageConnector) GetPublicURL(path string) string { + if s.cfg.PublicURL != "" { + return fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path) + } + return "" +} + +func (s *StorageConnector) detectContentType(path string) string { + ext := strings.ToLower(filepath.Ext(path)) + + contentTypes := map[string]string{ + ".html": "text/html", + ".css": "text/css", + ".js": "application/javascript", + ".json": "application/json", + ".xml": "application/xml", + ".pdf": "application/pdf", + ".zip": "application/zip", + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".svg": "image/svg+xml", + ".mp4": "video/mp4", + ".mp3": "audio/mpeg", + ".txt": "text/plain", + ".md": "text/markdown", + ".csv": "text/csv", + ".py": "text/x-python", + ".go": "text/x-go", + ".rs": "text/x-rust", + } + + if ct, ok := contentTypes[ext]; ok { + return ct + } + + return "application/octet-stream" +} + +func (s *StorageConnector) EnsureBucket(ctx context.Context) error { + exists, err := s.client.BucketExists(ctx, s.cfg.BucketName) + if err != nil { + return err + } + + if !exists { + return s.client.MakeBucket(ctx, s.cfg.BucketName, minio.MakeBucketOptions{ + Region: s.cfg.Region, + }) + } + + return nil +} + +func NewLocalStorageConnector(basePath string) *LocalStorageConnector { + return &LocalStorageConnector{basePath: basePath} +} + +type LocalStorageConnector struct { + basePath string +} + +func (l *LocalStorageConnector) ID() string { + return "local_storage" +} + +func (l *LocalStorageConnector) Name() string { + return "Local Storage" +} + +func (l *LocalStorageConnector) Description() string { + return "Store files on local filesystem" +} + +func (l *LocalStorageConnector) GetActions() []Action { + return []Action{ + {Name: "upload", Description: "Upload a file"}, + {Name: "download", Description: "Download a file"}, + {Name: "delete", Description: "Delete a file"}, + {Name: "list", Description: "List files"}, + } +} + +func (l *LocalStorageConnector) Validate(params map[string]interface{}) error { + return nil +} + +func (l *LocalStorageConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { + switch action { + case "upload": + path := params["path"].(string) + content := params["content"].(string) + fullPath := filepath.Join(l.basePath, path) + os.MkdirAll(filepath.Dir(fullPath), 0755) + err := os.WriteFile(fullPath, []byte(content), 0644) + return map[string]interface{}{"success": err == nil, "path": path}, err + + case "download": + path := params["path"].(string) + fullPath := filepath.Join(l.basePath, path) + content, err := os.ReadFile(fullPath) + return map[string]interface{}{"success": err == nil, "content": string(content)}, err + + case "delete": + path := params["path"].(string) + fullPath := filepath.Join(l.basePath, path) + err := os.Remove(fullPath) + return map[string]interface{}{"success": err == nil}, err + + default: + return nil, errors.New("unknown action") + } +} diff --git a/backend/internal/computer/connectors/telegram.go b/backend/internal/computer/connectors/telegram.go new file mode 100644 index 0000000..a6cfc9e --- /dev/null +++ b/backend/internal/computer/connectors/telegram.go @@ -0,0 +1,263 @@ +package connectors + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "mime/multipart" + "net/http" + "strconv" + "time" +) + +type TelegramConfig struct { + BotToken string + Timeout time.Duration +} + +type TelegramConnector struct { + cfg TelegramConfig + client *http.Client +} + +func NewTelegramConnector(cfg TelegramConfig) *TelegramConnector { + timeout := cfg.Timeout + if timeout == 0 { + timeout = 30 * time.Second + } + + return &TelegramConnector{ + cfg: cfg, + client: &http.Client{ + Timeout: timeout, + }, + } +} + +func (t *TelegramConnector) ID() string { + return "telegram" +} + +func (t *TelegramConnector) Name() string { + return "Telegram" +} + +func (t *TelegramConnector) Description() string { + return "Send messages via Telegram Bot API" +} + +func (t *TelegramConnector) GetActions() []Action { + return []Action{ + { + Name: "send_message", + Description: "Send a text message", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID or @username"}, + "text": map[string]interface{}{"type": "string", "description": "Message text"}, + "parse_mode": map[string]interface{}{"type": "string", "enum": []string{"HTML", "Markdown", "MarkdownV2"}}, + }, + }, + Required: []string{"chat_id", "text"}, + }, + { + Name: "send_document", + Description: "Send a document/file", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID"}, + "document": map[string]interface{}{"type": "string", "description": "File path or URL"}, + "caption": map[string]interface{}{"type": "string", "description": "Document caption"}, + }, + }, + Required: []string{"chat_id", "document"}, + }, + { + Name: "send_photo", + Description: "Send a photo", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID"}, + "photo": map[string]interface{}{"type": "string", "description": "Photo URL or file_id"}, + "caption": map[string]interface{}{"type": "string", "description": "Photo caption"}, + }, + }, + Required: []string{"chat_id", "photo"}, + }, + } +} + +func (t *TelegramConnector) Validate(params map[string]interface{}) error { + if _, ok := params["chat_id"]; !ok { + return errors.New("'chat_id' is required") + } + return nil +} + +func (t *TelegramConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { + switch action { + case "send_message": + return t.sendMessage(ctx, params) + case "send_document": + return t.sendDocument(ctx, params) + case "send_photo": + return t.sendPhoto(ctx, params) + default: + return nil, errors.New("unknown action: " + action) + } +} + +func (t *TelegramConnector) sendMessage(ctx context.Context, params map[string]interface{}) (interface{}, error) { + chatID := params["chat_id"].(string) + text := params["text"].(string) + + payload := map[string]interface{}{ + "chat_id": chatID, + "text": text, + } + + if parseMode, ok := params["parse_mode"].(string); ok { + payload["parse_mode"] = parseMode + } + + return t.apiCall(ctx, "sendMessage", payload) +} + +func (t *TelegramConnector) sendDocument(ctx context.Context, params map[string]interface{}) (interface{}, error) { + chatID := params["chat_id"].(string) + document := params["document"].(string) + + payload := map[string]interface{}{ + "chat_id": chatID, + "document": document, + } + + if caption, ok := params["caption"].(string); ok { + payload["caption"] = caption + } + + return t.apiCall(ctx, "sendDocument", payload) +} + +func (t *TelegramConnector) sendPhoto(ctx context.Context, params map[string]interface{}) (interface{}, error) { + chatID := params["chat_id"].(string) + photo := params["photo"].(string) + + payload := map[string]interface{}{ + "chat_id": chatID, + "photo": photo, + } + + if caption, ok := params["caption"].(string); ok { + payload["caption"] = caption + } + + return t.apiCall(ctx, "sendPhoto", payload) +} + +func (t *TelegramConnector) apiCall(ctx context.Context, method string, payload map[string]interface{}) (interface{}, error) { + url := fmt.Sprintf("https://api.telegram.org/bot%s/%s", t.cfg.BotToken, method) + + body, err := json.Marshal(payload) + if err != nil { + return nil, err + } + + req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := t.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var result map[string]interface{} + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, err + } + + if ok, exists := result["ok"].(bool); exists && !ok { + desc := "unknown error" + if d, exists := result["description"].(string); exists { + desc = d + } + return result, errors.New("Telegram API error: " + desc) + } + + return result, nil +} + +func (t *TelegramConnector) SendFileFromBytes(ctx context.Context, chatID string, filename string, content []byte, caption string) (interface{}, error) { + url := fmt.Sprintf("https://api.telegram.org/bot%s/sendDocument", t.cfg.BotToken) + + var b bytes.Buffer + w := multipart.NewWriter(&b) + + w.WriteField("chat_id", chatID) + + if caption != "" { + w.WriteField("caption", caption) + } + + fw, err := w.CreateFormFile("document", filename) + if err != nil { + return nil, err + } + fw.Write(content) + + w.Close() + + req, err := http.NewRequestWithContext(ctx, "POST", url, &b) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", w.FormDataContentType()) + + resp, err := t.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + var result map[string]interface{} + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, err + } + + return result, nil +} + +func (t *TelegramConnector) GetChatID(chatIDOrUsername interface{}) string { + switch v := chatIDOrUsername.(type) { + case string: + return v + case int: + return strconv.Itoa(v) + case int64: + return strconv.FormatInt(v, 10) + case float64: + return strconv.FormatInt(int64(v), 10) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/backend/internal/computer/connectors/webhook.go b/backend/internal/computer/connectors/webhook.go new file mode 100644 index 0000000..314c37c --- /dev/null +++ b/backend/internal/computer/connectors/webhook.go @@ -0,0 +1,275 @@ +package connectors + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" +) + +type WebhookConfig struct { + Timeout time.Duration + MaxRetries int + RetryDelay time.Duration + DefaultSecret string +} + +type WebhookConnector struct { + cfg WebhookConfig + client *http.Client +} + +func NewWebhookConnector(cfg WebhookConfig) *WebhookConnector { + timeout := cfg.Timeout + if timeout == 0 { + timeout = 30 * time.Second + } + if cfg.MaxRetries == 0 { + cfg.MaxRetries = 3 + } + if cfg.RetryDelay == 0 { + cfg.RetryDelay = time.Second + } + + return &WebhookConnector{ + cfg: cfg, + client: &http.Client{ + Timeout: timeout, + }, + } +} + +func (w *WebhookConnector) ID() string { + return "webhook" +} + +func (w *WebhookConnector) Name() string { + return "Webhook" +} + +func (w *WebhookConnector) Description() string { + return "Send HTTP webhooks to external services" +} + +func (w *WebhookConnector) GetActions() []Action { + return []Action{ + { + Name: "post", + Description: "Send POST request", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "url": map[string]interface{}{"type": "string", "description": "Webhook URL"}, + "body": map[string]interface{}{"type": "object", "description": "Request body (JSON)"}, + "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, + "secret": map[string]interface{}{"type": "string", "description": "HMAC secret for signing"}, + }, + }, + Required: []string{"url"}, + }, + { + Name: "get", + Description: "Send GET request", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "url": map[string]interface{}{"type": "string", "description": "Request URL"}, + "params": map[string]interface{}{"type": "object", "description": "Query parameters"}, + "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, + }, + }, + Required: []string{"url"}, + }, + { + Name: "put", + Description: "Send PUT request", + Schema: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "url": map[string]interface{}{"type": "string", "description": "Request URL"}, + "body": map[string]interface{}{"type": "object", "description": "Request body (JSON)"}, + "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, + }, + }, + Required: []string{"url"}, + }, + } +} + +func (w *WebhookConnector) Validate(params map[string]interface{}) error { + urlStr, ok := params["url"].(string) + if !ok { + return errors.New("'url' is required") + } + + parsed, err := url.Parse(urlStr) + if err != nil { + return fmt.Errorf("invalid URL: %w", err) + } + + if parsed.Scheme != "http" && parsed.Scheme != "https" { + return errors.New("URL must use http or https scheme") + } + + return nil +} + +func (w *WebhookConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { + switch action { + case "post": + return w.doRequest(ctx, "POST", params) + case "get": + return w.doRequest(ctx, "GET", params) + case "put": + return w.doRequest(ctx, "PUT", params) + case "delete": + return w.doRequest(ctx, "DELETE", params) + case "patch": + return w.doRequest(ctx, "PATCH", params) + default: + return nil, errors.New("unknown action: " + action) + } +} + +func (w *WebhookConnector) doRequest(ctx context.Context, method string, params map[string]interface{}) (interface{}, error) { + urlStr := params["url"].(string) + + if method == "GET" { + if queryParams, ok := params["params"].(map[string]interface{}); ok { + parsedURL, _ := url.Parse(urlStr) + q := parsedURL.Query() + for k, v := range queryParams { + q.Set(k, fmt.Sprintf("%v", v)) + } + parsedURL.RawQuery = q.Encode() + urlStr = parsedURL.String() + } + } + + var bodyReader io.Reader + var bodyBytes []byte + + if body, ok := params["body"]; ok && method != "GET" { + var err error + bodyBytes, err = json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("failed to marshal body: %w", err) + } + bodyReader = bytes.NewReader(bodyBytes) + } + + var lastErr error + for attempt := 0; attempt <= w.cfg.MaxRetries; attempt++ { + if attempt > 0 { + time.Sleep(w.cfg.RetryDelay * time.Duration(attempt)) + if bodyBytes != nil { + bodyReader = bytes.NewReader(bodyBytes) + } + } + + req, err := http.NewRequestWithContext(ctx, method, urlStr, bodyReader) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", "GooSeek-Computer/1.0") + + if headers, ok := params["headers"].(map[string]interface{}); ok { + for k, v := range headers { + req.Header.Set(k, fmt.Sprintf("%v", v)) + } + } + + if bodyBytes != nil { + secret := w.cfg.DefaultSecret + if s, ok := params["secret"].(string); ok { + secret = s + } + if secret != "" { + signature := w.signPayload(bodyBytes, secret) + req.Header.Set("X-Signature-256", "sha256="+signature) + } + } + + resp, err := w.client.Do(req) + if err != nil { + lastErr = err + continue + } + + respBody, err := io.ReadAll(resp.Body) + resp.Body.Close() + if err != nil { + lastErr = err + continue + } + + result := map[string]interface{}{ + "status_code": resp.StatusCode, + "headers": w.headersToMap(resp.Header), + } + + var jsonBody interface{} + if err := json.Unmarshal(respBody, &jsonBody); err == nil { + result["body"] = jsonBody + } else { + result["body"] = string(respBody) + } + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + result["success"] = true + return result, nil + } + + if resp.StatusCode >= 500 { + lastErr = fmt.Errorf("server error: %d", resp.StatusCode) + continue + } + + result["success"] = false + return result, nil + } + + return map[string]interface{}{ + "success": false, + "error": lastErr.Error(), + }, lastErr +} + +func (w *WebhookConnector) signPayload(payload []byte, secret string) string { + mac := hmac.New(sha256.New, []byte(secret)) + mac.Write(payload) + return hex.EncodeToString(mac.Sum(nil)) +} + +func (w *WebhookConnector) headersToMap(headers http.Header) map[string]string { + result := make(map[string]string) + for k, v := range headers { + result[k] = strings.Join(v, ", ") + } + return result +} + +func (w *WebhookConnector) PostJSON(ctx context.Context, webhookURL string, data interface{}) (interface{}, error) { + return w.Execute(ctx, "post", map[string]interface{}{ + "url": webhookURL, + "body": data, + }) +} + +func (w *WebhookConnector) GetJSON(ctx context.Context, webhookURL string, params map[string]interface{}) (interface{}, error) { + return w.Execute(ctx, "get", map[string]interface{}{ + "url": webhookURL, + "params": params, + }) +} diff --git a/backend/internal/computer/executor.go b/backend/internal/computer/executor.go new file mode 100644 index 0000000..6d19f4f --- /dev/null +++ b/backend/internal/computer/executor.go @@ -0,0 +1,574 @@ +package computer + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "sync" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" + "golang.org/x/sync/errgroup" +) + +type Executor struct { + router *Router + sandbox *SandboxManager + maxWorkers int +} + +func NewExecutor(router *Router, maxWorkers int) *Executor { + if maxWorkers <= 0 { + maxWorkers = 5 + } + return &Executor{ + router: router, + maxWorkers: maxWorkers, + } +} + +func (e *Executor) SetSandbox(sandbox *SandboxManager) { + e.sandbox = sandbox +} + +func (e *Executor) ExecuteGroup(ctx context.Context, tasks []SubTask, budget float64) ([]ExecutionResult, error) { + results := make([]ExecutionResult, len(tasks)) + var mu sync.Mutex + + perTaskBudget := budget / float64(len(tasks)) + + g, gctx := errgroup.WithContext(ctx) + g.SetLimit(e.maxWorkers) + + for i, task := range tasks { + i, task := i, task + g.Go(func() error { + result, err := e.ExecuteTask(gctx, &task, perTaskBudget) + mu.Lock() + if err != nil { + results[i] = ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Error: err, + } + } else { + results[i] = *result + } + mu.Unlock() + return nil + }) + } + + if err := g.Wait(); err != nil { + return results, err + } + + return results, nil +} + +func (e *Executor) ExecuteTask(ctx context.Context, task *SubTask, budget float64) (*ExecutionResult, error) { + startTime := time.Now() + + client, spec, err := e.router.Route(task, budget) + if err != nil { + return nil, fmt.Errorf("routing failed: %w", err) + } + + task.ModelID = spec.ID + now := time.Now() + task.StartedAt = &now + + var result *ExecutionResult + + switch task.Type { + case TaskResearch: + result, err = e.executeResearch(ctx, client, task) + case TaskCode: + result, err = e.executeCode(ctx, client, task) + case TaskAnalysis: + result, err = e.executeAnalysis(ctx, client, task) + case TaskDesign: + result, err = e.executeDesign(ctx, client, task) + case TaskDeploy: + result, err = e.executeDeploy(ctx, client, task) + case TaskReport: + result, err = e.executeReport(ctx, client, task) + case TaskCommunicate: + result, err = e.executeCommunicate(ctx, client, task) + case TaskTransform: + result, err = e.executeTransform(ctx, client, task) + case TaskValidate: + result, err = e.executeValidate(ctx, client, task) + default: + result, err = e.executeGeneric(ctx, client, task) + } + + if err != nil { + return nil, err + } + + result.Duration = time.Since(startTime) + result.Cost = e.router.EstimateCost(task, 1000, 500) + + return result, nil +} + +func (e *Executor) executeResearch(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + prompt := fmt.Sprintf(`You are a research assistant. Complete this research task: + +Task: %s + +Additional context: %v + +Provide a comprehensive research result with: +1. Key findings +2. Sources/references +3. Summary + +Respond in JSON: +{ + "findings": ["finding 1", "finding 2"], + "sources": ["source 1", "source 2"], + "summary": "...", + "data": {} +}`, task.Description, task.Input) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeCode(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputContext := "" + if task.Input != nil { + inputJSON, _ := json.Marshal(task.Input) + inputContext = fmt.Sprintf("\n\nContext from previous tasks:\n%s", string(inputJSON)) + } + + prompt := fmt.Sprintf(`You are an expert programmer. Complete this coding task: + +Task: %s%s + +Requirements: +1. Write clean, production-ready code +2. Include error handling +3. Add necessary imports +4. Follow best practices + +Respond in JSON: +{ + "language": "python", + "code": "...", + "filename": "main.py", + "dependencies": ["package1", "package2"], + "explanation": "..." +}`, task.Description, inputContext) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 8192}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + var artifacts []Artifact + if code, ok := output["code"].(string); ok { + filename := "main.py" + if fn, ok := output["filename"].(string); ok { + filename = fn + } + artifacts = append(artifacts, Artifact{ + ID: uuid.New().String(), + TaskID: task.ID, + Type: ArtifactTypeCode, + Name: filename, + Content: []byte(code), + Size: int64(len(code)), + CreatedAt: time.Now(), + }) + } + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + Artifacts: artifacts, + }, nil +} + +func (e *Executor) executeAnalysis(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`You are a data analyst. Analyze this data/information: + +Task: %s + +Input data: +%s + +Provide: +1. Key insights +2. Patterns observed +3. Recommendations +4. Visualizations needed (describe) + +Respond in JSON: +{ + "insights": ["insight 1", "insight 2"], + "patterns": ["pattern 1"], + "recommendations": ["rec 1"], + "visualizations": ["chart type 1"], + "summary": "..." +}`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeDesign(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`You are a software architect. Design a solution: + +Task: %s + +Context: +%s + +Provide: +1. Architecture overview +2. Components and their responsibilities +3. Data flow +4. Technology recommendations +5. Implementation plan + +Respond in JSON: +{ + "architecture": "...", + "components": [{"name": "...", "responsibility": "..."}], + "dataFlow": "...", + "technologies": ["tech1", "tech2"], + "implementationSteps": ["step1", "step2"], + "diagram": "mermaid diagram code" +}`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeDeploy(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + if e.sandbox == nil { + return e.executeGeneric(ctx, client, task) + } + + var code string + if task.Input != nil { + if c, ok := task.Input["code"].(string); ok { + code = c + } + } + + if code == "" { + return e.executeGeneric(ctx, client, task) + } + + sandbox, err := e.sandbox.Create(ctx, task.ID) + if err != nil { + return nil, fmt.Errorf("failed to create sandbox: %w", err) + } + defer e.sandbox.Destroy(ctx, sandbox) + + result, err := e.sandbox.Execute(ctx, sandbox, code, "python") + if err != nil { + return nil, fmt.Errorf("sandbox execution failed: %w", err) + } + + output := map[string]interface{}{ + "stdout": result.Stdout, + "stderr": result.Stderr, + "exitCode": result.ExitCode, + "duration": result.Duration.String(), + } + + var artifacts []Artifact + for name, content := range result.Files { + artifacts = append(artifacts, Artifact{ + ID: uuid.New().String(), + TaskID: task.ID, + Type: ArtifactTypeFile, + Name: name, + Content: content, + Size: int64(len(content)), + CreatedAt: time.Now(), + }) + } + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + Artifacts: artifacts, + }, nil +} + +func (e *Executor) executeReport(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`You are a report writer. Generate a comprehensive report: + +Task: %s + +Data/Context: +%s + +Create a well-structured report with: +1. Executive Summary +2. Key Findings +3. Detailed Analysis +4. Conclusions +5. Recommendations + +Use markdown formatting.`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 8192}, + }) + if err != nil { + return nil, err + } + + output := map[string]interface{}{ + "report": response, + "format": "markdown", + "wordCount": len(strings.Fields(response)), + } + + artifacts := []Artifact{ + { + ID: uuid.New().String(), + TaskID: task.ID, + Type: ArtifactTypeReport, + Name: "report.md", + Content: []byte(response), + MimeType: "text/markdown", + Size: int64(len(response)), + CreatedAt: time.Now(), + }, + } + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + Artifacts: artifacts, + }, nil +} + +func (e *Executor) executeCommunicate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`Generate a message/notification: + +Task: %s + +Context: +%s + +Create an appropriate message. Respond in JSON: +{ + "subject": "...", + "body": "...", + "format": "text|html", + "priority": "low|normal|high" +}`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 2048}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + output["status"] = "prepared" + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeTransform(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`Transform data as requested: + +Task: %s + +Input data: +%s + +Perform the transformation and return the result in JSON: +{ + "transformed": ..., + "format": "...", + "changes": ["change 1", "change 2"] +}`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeValidate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`Validate the following: + +Task: %s + +Data to validate: +%s + +Check for: +1. Correctness +2. Completeness +3. Consistency +4. Quality + +Respond in JSON: +{ + "valid": true|false, + "score": 0-100, + "issues": ["issue 1", "issue 2"], + "suggestions": ["suggestion 1"], + "summary": "..." +}`, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 2048}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func (e *Executor) executeGeneric(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { + inputJSON, _ := json.Marshal(task.Input) + + prompt := fmt.Sprintf(`Complete this task: + +Task type: %s +Description: %s + +Context: +%s + +Provide a comprehensive result in JSON format.`, task.Type, task.Description, string(inputJSON)) + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return nil, err + } + + output := parseJSONOutput(response) + if len(output) == 0 { + output = map[string]interface{}{ + "result": response, + } + } + + return &ExecutionResult{ + TaskID: task.ID, + SubTaskID: task.ID, + Output: output, + }, nil +} + +func parseJSONOutput(response string) map[string]interface{} { + startIdx := strings.Index(response, "{") + endIdx := strings.LastIndex(response, "}") + + if startIdx == -1 || endIdx == -1 || endIdx <= startIdx { + return map[string]interface{}{"raw": response} + } + + jsonStr := response[startIdx : endIdx+1] + + var output map[string]interface{} + if err := json.Unmarshal([]byte(jsonStr), &output); err != nil { + return map[string]interface{}{"raw": response} + } + + return output +} diff --git a/backend/internal/computer/memory.go b/backend/internal/computer/memory.go new file mode 100644 index 0000000..7e0134e --- /dev/null +++ b/backend/internal/computer/memory.go @@ -0,0 +1,377 @@ +package computer + +import ( + "context" + "encoding/json" + "strings" + "sync" + "time" + + "github.com/google/uuid" +) + +type MemoryStore struct { + repo MemoryRepository + cache map[string][]MemoryEntry + mu sync.RWMutex +} + +func NewMemoryStore(repo MemoryRepository) *MemoryStore { + return &MemoryStore{ + repo: repo, + cache: make(map[string][]MemoryEntry), + } +} + +func (m *MemoryStore) Store(ctx context.Context, userID string, entry *MemoryEntry) error { + if entry.ID == "" { + entry.ID = uuid.New().String() + } + entry.UserID = userID + if entry.CreatedAt.IsZero() { + entry.CreatedAt = time.Now() + } + + if m.repo != nil { + if err := m.repo.Store(ctx, entry); err != nil { + return err + } + } + + m.mu.Lock() + m.cache[userID] = append(m.cache[userID], *entry) + if len(m.cache[userID]) > 1000 { + m.cache[userID] = m.cache[userID][len(m.cache[userID])-500:] + } + m.mu.Unlock() + + return nil +} + +func (m *MemoryStore) StoreResult(ctx context.Context, userID, taskID, key string, value interface{}) error { + valueJSON, _ := json.Marshal(value) + + entry := &MemoryEntry{ + UserID: userID, + TaskID: taskID, + Key: key, + Value: string(valueJSON), + Type: MemoryTypeResult, + CreatedAt: time.Now(), + } + + return m.Store(ctx, userID, entry) +} + +func (m *MemoryStore) StoreFact(ctx context.Context, userID, key string, value interface{}, tags []string) error { + entry := &MemoryEntry{ + UserID: userID, + Key: key, + Value: value, + Type: MemoryTypeFact, + Tags: tags, + CreatedAt: time.Now(), + } + + return m.Store(ctx, userID, entry) +} + +func (m *MemoryStore) StorePreference(ctx context.Context, userID, key string, value interface{}) error { + entry := &MemoryEntry{ + UserID: userID, + Key: key, + Value: value, + Type: MemoryTypePreference, + CreatedAt: time.Now(), + } + + return m.Store(ctx, userID, entry) +} + +func (m *MemoryStore) StoreContext(ctx context.Context, userID, taskID, key string, value interface{}, ttl time.Duration) error { + expiresAt := time.Now().Add(ttl) + + entry := &MemoryEntry{ + UserID: userID, + TaskID: taskID, + Key: key, + Value: value, + Type: MemoryTypeContext, + CreatedAt: time.Now(), + ExpiresAt: &expiresAt, + } + + return m.Store(ctx, userID, entry) +} + +func (m *MemoryStore) Recall(ctx context.Context, userID string, query string, limit int) ([]MemoryEntry, error) { + if m.repo != nil { + entries, err := m.repo.Search(ctx, userID, query, limit) + if err == nil && len(entries) > 0 { + return entries, nil + } + } + + m.mu.RLock() + cached := m.cache[userID] + m.mu.RUnlock() + + if len(cached) == 0 { + return nil, nil + } + + queryLower := strings.ToLower(query) + queryTerms := strings.Fields(queryLower) + + type scored struct { + entry MemoryEntry + score int + } + + var results []scored + now := time.Now() + + for _, entry := range cached { + if entry.ExpiresAt != nil && entry.ExpiresAt.Before(now) { + continue + } + + score := 0 + + keyLower := strings.ToLower(entry.Key) + for _, term := range queryTerms { + if strings.Contains(keyLower, term) { + score += 3 + } + } + + if valueStr, ok := entry.Value.(string); ok { + valueLower := strings.ToLower(valueStr) + for _, term := range queryTerms { + if strings.Contains(valueLower, term) { + score += 1 + } + } + } + + for _, tag := range entry.Tags { + tagLower := strings.ToLower(tag) + for _, term := range queryTerms { + if strings.Contains(tagLower, term) { + score += 2 + } + } + } + + if score > 0 { + results = append(results, scored{entry: entry, score: score}) + } + } + + for i := 0; i < len(results)-1; i++ { + for j := i + 1; j < len(results); j++ { + if results[j].score > results[i].score { + results[i], results[j] = results[j], results[i] + } + } + } + + if len(results) > limit { + results = results[:limit] + } + + entries := make([]MemoryEntry, len(results)) + for i, r := range results { + entries[i] = r.entry + } + + return entries, nil +} + +func (m *MemoryStore) GetByUser(ctx context.Context, userID string, limit int) ([]MemoryEntry, error) { + if m.repo != nil { + return m.repo.GetByUser(ctx, userID, limit) + } + + m.mu.RLock() + cached := m.cache[userID] + m.mu.RUnlock() + + if len(cached) > limit { + return cached[len(cached)-limit:], nil + } + + return cached, nil +} + +func (m *MemoryStore) GetByTask(ctx context.Context, taskID string) ([]MemoryEntry, error) { + if m.repo != nil { + return m.repo.GetByTask(ctx, taskID) + } + + var result []MemoryEntry + + m.mu.RLock() + for _, entries := range m.cache { + for _, e := range entries { + if e.TaskID == taskID { + result = append(result, e) + } + } + } + m.mu.RUnlock() + + return result, nil +} + +func (m *MemoryStore) GetTaskContext(ctx context.Context, taskID string) (map[string]interface{}, error) { + entries, err := m.GetByTask(ctx, taskID) + if err != nil { + return nil, err + } + + context := make(map[string]interface{}) + for _, e := range entries { + context[e.Key] = e.Value + } + + return context, nil +} + +func (m *MemoryStore) GetUserContext(ctx context.Context, userID string) (map[string]interface{}, error) { + entries, err := m.GetByUser(ctx, userID, 100) + if err != nil { + return nil, err + } + + context := make(map[string]interface{}) + for _, e := range entries { + if e.Type == MemoryTypePreference || e.Type == MemoryTypeFact { + context[e.Key] = e.Value + } + } + + return context, nil +} + +func (m *MemoryStore) GetPreferences(ctx context.Context, userID string) (map[string]interface{}, error) { + entries, err := m.GetByUser(ctx, userID, 100) + if err != nil { + return nil, err + } + + prefs := make(map[string]interface{}) + for _, e := range entries { + if e.Type == MemoryTypePreference { + prefs[e.Key] = e.Value + } + } + + return prefs, nil +} + +func (m *MemoryStore) GetFacts(ctx context.Context, userID string) ([]MemoryEntry, error) { + entries, err := m.GetByUser(ctx, userID, 100) + if err != nil { + return nil, err + } + + var facts []MemoryEntry + for _, e := range entries { + if e.Type == MemoryTypeFact { + facts = append(facts, e) + } + } + + return facts, nil +} + +func (m *MemoryStore) Delete(ctx context.Context, id string) error { + if m.repo != nil { + return m.repo.Delete(ctx, id) + } + + m.mu.Lock() + for userID, entries := range m.cache { + for i, e := range entries { + if e.ID == id { + m.cache[userID] = append(entries[:i], entries[i+1:]...) + break + } + } + } + m.mu.Unlock() + + return nil +} + +func (m *MemoryStore) Clear(ctx context.Context, userID string) error { + m.mu.Lock() + delete(m.cache, userID) + m.mu.Unlock() + + return nil +} + +func (m *MemoryStore) ClearTask(ctx context.Context, taskID string) error { + m.mu.Lock() + for userID, entries := range m.cache { + var filtered []MemoryEntry + for _, e := range entries { + if e.TaskID != taskID { + filtered = append(filtered, e) + } + } + m.cache[userID] = filtered + } + m.mu.Unlock() + + return nil +} + +func (m *MemoryStore) Cleanup(ctx context.Context) error { + now := time.Now() + + m.mu.Lock() + for userID, entries := range m.cache { + var valid []MemoryEntry + for _, e := range entries { + if e.ExpiresAt == nil || e.ExpiresAt.After(now) { + valid = append(valid, e) + } + } + m.cache[userID] = valid + } + m.mu.Unlock() + + return nil +} + +func (m *MemoryStore) Stats(userID string) map[string]int { + m.mu.RLock() + entries := m.cache[userID] + m.mu.RUnlock() + + stats := map[string]int{ + "total": len(entries), + "facts": 0, + "preferences": 0, + "context": 0, + "results": 0, + } + + for _, e := range entries { + switch e.Type { + case MemoryTypeFact: + stats["facts"]++ + case MemoryTypePreference: + stats["preferences"]++ + case MemoryTypeContext: + stats["context"]++ + case MemoryTypeResult: + stats["results"]++ + } + } + + return stats +} diff --git a/backend/internal/computer/planner.go b/backend/internal/computer/planner.go new file mode 100644 index 0000000..9aa68c3 --- /dev/null +++ b/backend/internal/computer/planner.go @@ -0,0 +1,371 @@ +package computer + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "strings" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type Planner struct { + registry *llm.ModelRegistry +} + +func NewPlanner(registry *llm.ModelRegistry) *Planner { + return &Planner{ + registry: registry, + } +} + +func (p *Planner) Plan(ctx context.Context, query string, memory map[string]interface{}) (*TaskPlan, error) { + client, _, err := p.registry.GetBest(llm.CapReasoning) + if err != nil { + client, _, err = p.registry.GetBest(llm.CapCoding) + if err != nil { + return nil, fmt.Errorf("no suitable model for planning: %w", err) + } + } + + memoryContext := "" + if len(memory) > 0 { + memoryJSON, _ := json.Marshal(memory) + memoryContext = fmt.Sprintf("\n\nUser context and memory:\n%s", string(memoryJSON)) + } + + prompt := fmt.Sprintf(`You are a task planning AI. Analyze this query and create an execution plan. + +Query: %s%s + +Break this into subtasks. Each subtask should be: +1. Atomic - one clear action +2. Independent where possible (for parallel execution) +3. Have clear dependencies when needed + +Available task types: +- research: Search web, gather information +- code: Write/generate code +- analysis: Analyze data, extract insights +- design: Design architecture, create plans +- deploy: Deploy applications, run code +- monitor: Set up monitoring, tracking +- report: Generate reports, summaries +- communicate: Send emails, messages +- transform: Convert data formats +- validate: Check, verify results + +For each subtask specify: +- type: one of the task types above +- description: what to do +- dependencies: list of subtask IDs this depends on (empty if none) +- capabilities: required AI capabilities (reasoning, coding, search, creative, fast, long_context, vision, math) + +Respond in JSON format: +{ + "summary": "Brief summary of the plan", + "subtasks": [ + { + "id": "1", + "type": "research", + "description": "Search for...", + "dependencies": [], + "capabilities": ["search"] + }, + { + "id": "2", + "type": "code", + "description": "Write code to...", + "dependencies": ["1"], + "capabilities": ["coding"] + } + ], + "estimatedCost": 0.05, + "estimatedTimeSeconds": 120 +} + +Create 3-10 subtasks. Be specific and actionable.`, query, memoryContext) + + messages := []llm.Message{ + {Role: llm.RoleUser, Content: prompt}, + } + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return p.createDefaultPlan(query), nil + } + + plan, err := p.parsePlanResponse(response) + if err != nil { + return p.createDefaultPlan(query), nil + } + + plan.Query = query + plan.ExecutionOrder = p.calculateExecutionOrder(plan.SubTasks) + + return plan, nil +} + +func (p *Planner) parsePlanResponse(response string) (*TaskPlan, error) { + jsonRegex := regexp.MustCompile(`\{[\s\S]*\}`) + jsonMatch := jsonRegex.FindString(response) + if jsonMatch == "" { + return nil, fmt.Errorf("no JSON found in response") + } + + var rawPlan struct { + Summary string `json:"summary"` + EstimatedCost float64 `json:"estimatedCost"` + EstimatedTimeSeconds int `json:"estimatedTimeSeconds"` + SubTasks []struct { + ID string `json:"id"` + Type string `json:"type"` + Description string `json:"description"` + Dependencies []string `json:"dependencies"` + Capabilities []string `json:"capabilities"` + } `json:"subtasks"` + } + + if err := json.Unmarshal([]byte(jsonMatch), &rawPlan); err != nil { + return nil, fmt.Errorf("failed to parse plan JSON: %w", err) + } + + plan := &TaskPlan{ + Summary: rawPlan.Summary, + EstimatedCost: rawPlan.EstimatedCost, + EstimatedTime: rawPlan.EstimatedTimeSeconds, + SubTasks: make([]SubTask, len(rawPlan.SubTasks)), + } + + for i, st := range rawPlan.SubTasks { + caps := make([]llm.ModelCapability, len(st.Capabilities)) + for j, c := range st.Capabilities { + caps[j] = llm.ModelCapability(c) + } + + plan.SubTasks[i] = SubTask{ + ID: st.ID, + Type: TaskType(st.Type), + Description: st.Description, + Dependencies: st.Dependencies, + RequiredCaps: caps, + Status: StatusPending, + MaxRetries: 3, + } + } + + return plan, nil +} + +func (p *Planner) calculateExecutionOrder(subTasks []SubTask) [][]string { + taskMap := make(map[string]*SubTask) + for i := range subTasks { + taskMap[subTasks[i].ID] = &subTasks[i] + } + + inDegree := make(map[string]int) + for _, st := range subTasks { + if _, ok := inDegree[st.ID]; !ok { + inDegree[st.ID] = 0 + } + for _, dep := range st.Dependencies { + inDegree[st.ID]++ + if _, ok := inDegree[dep]; !ok { + inDegree[dep] = 0 + } + } + } + + var order [][]string + completed := make(map[string]bool) + + for len(completed) < len(subTasks) { + var wave []string + + for _, st := range subTasks { + if completed[st.ID] { + continue + } + + canExecute := true + for _, dep := range st.Dependencies { + if !completed[dep] { + canExecute = false + break + } + } + + if canExecute { + wave = append(wave, st.ID) + } + } + + if len(wave) == 0 { + for _, st := range subTasks { + if !completed[st.ID] { + wave = append(wave, st.ID) + } + } + } + + for _, id := range wave { + completed[id] = true + } + + order = append(order, wave) + } + + return order +} + +func (p *Planner) createDefaultPlan(query string) *TaskPlan { + queryLower := strings.ToLower(query) + + subTasks := []SubTask{ + { + ID: uuid.New().String(), + Type: TaskResearch, + Description: "Research and gather information about: " + query, + Dependencies: []string{}, + RequiredCaps: []llm.ModelCapability{llm.CapSearch}, + Status: StatusPending, + MaxRetries: 3, + }, + } + + if strings.Contains(queryLower, "код") || strings.Contains(queryLower, "code") || + strings.Contains(queryLower, "приложение") || strings.Contains(queryLower, "app") || + strings.Contains(queryLower, "скрипт") || strings.Contains(queryLower, "script") { + subTasks = append(subTasks, SubTask{ + ID: uuid.New().String(), + Type: TaskDesign, + Description: "Design architecture and structure", + Dependencies: []string{subTasks[0].ID}, + RequiredCaps: []llm.ModelCapability{llm.CapReasoning}, + Status: StatusPending, + MaxRetries: 3, + }) + subTasks = append(subTasks, SubTask{ + ID: uuid.New().String(), + Type: TaskCode, + Description: "Generate code implementation", + Dependencies: []string{subTasks[1].ID}, + RequiredCaps: []llm.ModelCapability{llm.CapCoding}, + Status: StatusPending, + MaxRetries: 3, + }) + } + + if strings.Contains(queryLower, "отчёт") || strings.Contains(queryLower, "report") || + strings.Contains(queryLower, "анализ") || strings.Contains(queryLower, "analysis") { + subTasks = append(subTasks, SubTask{ + ID: uuid.New().String(), + Type: TaskAnalysis, + Description: "Analyze gathered information", + Dependencies: []string{subTasks[0].ID}, + RequiredCaps: []llm.ModelCapability{llm.CapReasoning}, + Status: StatusPending, + MaxRetries: 3, + }) + subTasks = append(subTasks, SubTask{ + ID: uuid.New().String(), + Type: TaskReport, + Description: "Generate comprehensive report", + Dependencies: []string{subTasks[len(subTasks)-1].ID}, + RequiredCaps: []llm.ModelCapability{llm.CapCreative}, + Status: StatusPending, + MaxRetries: 3, + }) + } + + if strings.Contains(queryLower, "email") || strings.Contains(queryLower, "письмо") || + strings.Contains(queryLower, "telegram") || strings.Contains(queryLower, "отправ") { + subTasks = append(subTasks, SubTask{ + ID: uuid.New().String(), + Type: TaskCommunicate, + Description: "Send notification/message", + Dependencies: []string{subTasks[len(subTasks)-1].ID}, + RequiredCaps: []llm.ModelCapability{llm.CapFast}, + Status: StatusPending, + MaxRetries: 3, + }) + } + + plan := &TaskPlan{ + Query: query, + Summary: "Auto-generated plan for: " + query, + SubTasks: subTasks, + EstimatedCost: float64(len(subTasks)) * 0.01, + EstimatedTime: len(subTasks) * 30, + } + + plan.ExecutionOrder = p.calculateExecutionOrder(subTasks) + + return plan +} + +func (p *Planner) Replan(ctx context.Context, plan *TaskPlan, newContext string) (*TaskPlan, error) { + completedTasks := make([]SubTask, 0) + pendingTasks := make([]SubTask, 0) + + for _, st := range plan.SubTasks { + if st.Status == StatusCompleted { + completedTasks = append(completedTasks, st) + } else if st.Status == StatusPending || st.Status == StatusFailed { + pendingTasks = append(pendingTasks, st) + } + } + + completedJSON, _ := json.Marshal(completedTasks) + pendingJSON, _ := json.Marshal(pendingTasks) + + client, _, err := p.registry.GetBest(llm.CapReasoning) + if err != nil { + return plan, nil + } + + prompt := fmt.Sprintf(`You need to replan a task based on new context. + +Original query: %s + +Completed subtasks: +%s + +Pending subtasks: +%s + +New context/feedback: +%s + +Adjust the plan. Keep completed tasks, modify or remove pending tasks as needed. +Add new subtasks if the new context requires it. + +Respond in the same JSON format as before.`, plan.Query, string(completedJSON), string(pendingJSON), newContext) + + messages := []llm.Message{ + {Role: llm.RoleUser, Content: prompt}, + } + + response, err := client.GenerateText(ctx, llm.StreamRequest{ + Messages: messages, + Options: llm.StreamOptions{MaxTokens: 4096}, + }) + if err != nil { + return plan, nil + } + + newPlan, err := p.parsePlanResponse(response) + if err != nil { + return plan, nil + } + + newPlan.Query = plan.Query + newPlan.ExecutionOrder = p.calculateExecutionOrder(newPlan.SubTasks) + + return newPlan, nil +} diff --git a/backend/internal/computer/router.go b/backend/internal/computer/router.go new file mode 100644 index 0000000..6de0a47 --- /dev/null +++ b/backend/internal/computer/router.go @@ -0,0 +1,244 @@ +package computer + +import ( + "errors" + "sort" + + "github.com/gooseek/backend/internal/llm" +) + +type RoutingRule struct { + TaskType TaskType + Preferred []llm.ModelCapability + Fallback []string + MaxCost float64 + MaxLatency int +} + +type Router struct { + registry *llm.ModelRegistry + rules map[TaskType]RoutingRule +} + +func NewRouter(registry *llm.ModelRegistry) *Router { + r := &Router{ + registry: registry, + rules: make(map[TaskType]RoutingRule), + } + + r.rules[TaskResearch] = RoutingRule{ + TaskType: TaskResearch, + Preferred: []llm.ModelCapability{llm.CapSearch, llm.CapLongContext}, + Fallback: []string{"gemini-1.5-pro", "gpt-4o"}, + MaxCost: 0.1, + } + + r.rules[TaskCode] = RoutingRule{ + TaskType: TaskCode, + Preferred: []llm.ModelCapability{llm.CapCoding}, + Fallback: []string{"claude-3-sonnet", "claude-3-opus", "gpt-4o"}, + MaxCost: 0.2, + } + + r.rules[TaskAnalysis] = RoutingRule{ + TaskType: TaskAnalysis, + Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapMath}, + Fallback: []string{"claude-3-opus", "gpt-4o"}, + MaxCost: 0.15, + } + + r.rules[TaskDesign] = RoutingRule{ + TaskType: TaskDesign, + Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapCreative}, + Fallback: []string{"claude-3-opus", "gpt-4o"}, + MaxCost: 0.15, + } + + r.rules[TaskDeploy] = RoutingRule{ + TaskType: TaskDeploy, + Preferred: []llm.ModelCapability{llm.CapCoding, llm.CapFast}, + Fallback: []string{"claude-3-sonnet", "gpt-4o-mini"}, + MaxCost: 0.05, + } + + r.rules[TaskMonitor] = RoutingRule{ + TaskType: TaskMonitor, + Preferred: []llm.ModelCapability{llm.CapFast}, + Fallback: []string{"gpt-4o-mini", "gemini-1.5-flash"}, + MaxCost: 0.02, + } + + r.rules[TaskReport] = RoutingRule{ + TaskType: TaskReport, + Preferred: []llm.ModelCapability{llm.CapCreative, llm.CapLongContext}, + Fallback: []string{"claude-3-opus", "gpt-4o"}, + MaxCost: 0.1, + } + + r.rules[TaskCommunicate] = RoutingRule{ + TaskType: TaskCommunicate, + Preferred: []llm.ModelCapability{llm.CapFast, llm.CapCreative}, + Fallback: []string{"gpt-4o-mini", "gemini-1.5-flash"}, + MaxCost: 0.02, + } + + r.rules[TaskTransform] = RoutingRule{ + TaskType: TaskTransform, + Preferred: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, + Fallback: []string{"gpt-4o-mini", "claude-3-sonnet"}, + MaxCost: 0.03, + } + + r.rules[TaskValidate] = RoutingRule{ + TaskType: TaskValidate, + Preferred: []llm.ModelCapability{llm.CapReasoning}, + Fallback: []string{"gpt-4o", "claude-3-sonnet"}, + MaxCost: 0.05, + } + + return r +} + +func (r *Router) Route(task *SubTask, budget float64) (llm.Client, llm.ModelSpec, error) { + if task.ModelID != "" { + client, spec, err := r.registry.GetByID(task.ModelID) + if err == nil && spec.CostPer1K <= budget { + return client, spec, nil + } + } + + if len(task.RequiredCaps) > 0 { + for _, cap := range task.RequiredCaps { + client, spec, err := r.registry.GetBest(cap) + if err == nil && spec.CostPer1K <= budget { + return client, spec, nil + } + } + } + + rule, ok := r.rules[task.Type] + if ok { + for _, cap := range rule.Preferred { + client, spec, err := r.registry.GetBest(cap) + if err == nil && spec.CostPer1K <= budget { + return client, spec, nil + } + } + + for _, modelID := range rule.Fallback { + client, spec, err := r.registry.GetByID(modelID) + if err == nil && spec.CostPer1K <= budget { + return client, spec, nil + } + } + } + + models := r.registry.GetAll() + if len(models) == 0 { + return nil, llm.ModelSpec{}, errors.New("no models available") + } + + sort.Slice(models, func(i, j int) bool { + return models[i].CostPer1K < models[j].CostPer1K + }) + + for _, spec := range models { + if spec.CostPer1K <= budget { + client, err := r.registry.GetClient(spec.ID) + if err == nil { + return client, spec, nil + } + } + } + + client, err := r.registry.GetClient(models[0].ID) + if err != nil { + return nil, llm.ModelSpec{}, err + } + return client, models[0], nil +} + +func (r *Router) RouteMultiple(task *SubTask, count int, budget float64) ([]llm.Client, []llm.ModelSpec, error) { + var clients []llm.Client + var specs []llm.ModelSpec + + usedModels := make(map[string]bool) + perModelBudget := budget / float64(count) + + rule, ok := r.rules[task.Type] + if !ok { + rule = RoutingRule{ + Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapCoding, llm.CapFast}, + } + } + + for _, cap := range rule.Preferred { + if len(clients) >= count { + break + } + + models := r.registry.GetAllWithCapability(cap) + for _, spec := range models { + if len(clients) >= count { + break + } + if usedModels[spec.ID] { + continue + } + if spec.CostPer1K > perModelBudget { + continue + } + + client, err := r.registry.GetClient(spec.ID) + if err == nil { + clients = append(clients, client) + specs = append(specs, spec) + usedModels[spec.ID] = true + } + } + } + + if len(clients) < count { + models := r.registry.GetAll() + for _, spec := range models { + if len(clients) >= count { + break + } + if usedModels[spec.ID] { + continue + } + + client, err := r.registry.GetClient(spec.ID) + if err == nil { + clients = append(clients, client) + specs = append(specs, spec) + usedModels[spec.ID] = true + } + } + } + + if len(clients) == 0 { + return nil, nil, errors.New("no models available for consensus") + } + + return clients, specs, nil +} + +func (r *Router) SetRule(taskType TaskType, rule RoutingRule) { + r.rules[taskType] = rule +} + +func (r *Router) GetRule(taskType TaskType) (RoutingRule, bool) { + rule, ok := r.rules[taskType] + return rule, ok +} + +func (r *Router) EstimateCost(task *SubTask, inputTokens, outputTokens int) float64 { + _, spec, err := r.Route(task, 1.0) + if err != nil { + return 0.01 + } + + totalTokens := inputTokens + outputTokens + return spec.CostPer1K * float64(totalTokens) / 1000.0 +} diff --git a/backend/internal/computer/sandbox.go b/backend/internal/computer/sandbox.go new file mode 100644 index 0000000..b715266 --- /dev/null +++ b/backend/internal/computer/sandbox.go @@ -0,0 +1,431 @@ +package computer + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/google/uuid" +) + +type SandboxConfig struct { + Image string + Timeout time.Duration + MemoryLimit string + CPULimit string + NetworkMode string + WorkDir string + MaxFileSize int64 + AllowNetwork bool +} + +func DefaultSandboxConfig() SandboxConfig { + return SandboxConfig{ + Image: "gooseek/sandbox:latest", + Timeout: 5 * time.Minute, + MemoryLimit: "512m", + CPULimit: "1.0", + NetworkMode: "none", + WorkDir: "/workspace", + MaxFileSize: 10 * 1024 * 1024, + AllowNetwork: false, + } +} + +type Sandbox struct { + ID string + ContainerID string + WorkDir string + Status string + TaskID string + CreatedAt time.Time +} + +type SandboxManager struct { + cfg SandboxConfig + sandboxes map[string]*Sandbox + mu sync.RWMutex + useDocker bool +} + +func NewSandboxManager(cfg SandboxConfig) *SandboxManager { + if cfg.Timeout == 0 { + cfg.Timeout = 5 * time.Minute + } + if cfg.MemoryLimit == "" { + cfg.MemoryLimit = "512m" + } + if cfg.WorkDir == "" { + cfg.WorkDir = "/workspace" + } + + useDocker := isDockerAvailable() + + return &SandboxManager{ + cfg: cfg, + sandboxes: make(map[string]*Sandbox), + useDocker: useDocker, + } +} + +func isDockerAvailable() bool { + cmd := exec.Command("docker", "version") + return cmd.Run() == nil +} + +func (sm *SandboxManager) Create(ctx context.Context, taskID string) (*Sandbox, error) { + sandboxID := uuid.New().String()[:8] + + sandbox := &Sandbox{ + ID: sandboxID, + TaskID: taskID, + Status: "creating", + CreatedAt: time.Now(), + } + + if sm.useDocker { + workDir, err := os.MkdirTemp("", fmt.Sprintf("sandbox-%s-", sandboxID)) + if err != nil { + return nil, fmt.Errorf("failed to create temp dir: %w", err) + } + sandbox.WorkDir = workDir + + args := []string{ + "create", + "--name", fmt.Sprintf("gooseek-sandbox-%s", sandboxID), + "-v", fmt.Sprintf("%s:%s", workDir, sm.cfg.WorkDir), + "-w", sm.cfg.WorkDir, + "--memory", sm.cfg.MemoryLimit, + "--cpus", sm.cfg.CPULimit, + } + + if !sm.cfg.AllowNetwork { + args = append(args, "--network", "none") + } + + args = append(args, sm.cfg.Image, "tail", "-f", "/dev/null") + + cmd := exec.CommandContext(ctx, "docker", args...) + output, err := cmd.CombinedOutput() + if err != nil { + os.RemoveAll(workDir) + return nil, fmt.Errorf("failed to create container: %w - %s", err, string(output)) + } + + sandbox.ContainerID = strings.TrimSpace(string(output)) + + startCmd := exec.CommandContext(ctx, "docker", "start", sandbox.ContainerID) + if err := startCmd.Run(); err != nil { + sm.cleanupContainer(sandbox) + return nil, fmt.Errorf("failed to start container: %w", err) + } + } else { + workDir, err := os.MkdirTemp("", fmt.Sprintf("sandbox-%s-", sandboxID)) + if err != nil { + return nil, fmt.Errorf("failed to create temp dir: %w", err) + } + sandbox.WorkDir = workDir + } + + sandbox.Status = "running" + + sm.mu.Lock() + sm.sandboxes[sandboxID] = sandbox + sm.mu.Unlock() + + return sandbox, nil +} + +func (sm *SandboxManager) Execute(ctx context.Context, sandbox *Sandbox, code string, lang string) (*SandboxResult, error) { + ctx, cancel := context.WithTimeout(ctx, sm.cfg.Timeout) + defer cancel() + + startTime := time.Now() + + filename, err := sm.writeCodeFile(sandbox, code, lang) + if err != nil { + return nil, err + } + + var cmd *exec.Cmd + var stdout, stderr bytes.Buffer + + if sm.useDocker { + runCmd := sm.getRunCommand(lang, filename) + cmd = exec.CommandContext(ctx, "docker", "exec", sandbox.ContainerID, "sh", "-c", runCmd) + } else { + cmd = sm.getLocalCommand(ctx, lang, filepath.Join(sandbox.WorkDir, filename)) + } + + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + err = cmd.Run() + exitCode := 0 + if err != nil { + if exitErr, ok := err.(*exec.ExitError); ok { + exitCode = exitErr.ExitCode() + } else if ctx.Err() == context.DeadlineExceeded { + return &SandboxResult{ + Stderr: "Execution timeout exceeded", + ExitCode: -1, + Duration: time.Since(startTime), + }, nil + } + } + + files, _ := sm.collectOutputFiles(sandbox) + + return &SandboxResult{ + Stdout: stdout.String(), + Stderr: stderr.String(), + ExitCode: exitCode, + Files: files, + Duration: time.Since(startTime), + }, nil +} + +func (sm *SandboxManager) RunCommand(ctx context.Context, sandbox *Sandbox, command string) (*SandboxResult, error) { + ctx, cancel := context.WithTimeout(ctx, sm.cfg.Timeout) + defer cancel() + + startTime := time.Now() + + var cmd *exec.Cmd + var stdout, stderr bytes.Buffer + + if sm.useDocker { + cmd = exec.CommandContext(ctx, "docker", "exec", sandbox.ContainerID, "sh", "-c", command) + } else { + cmd = exec.CommandContext(ctx, "sh", "-c", command) + cmd.Dir = sandbox.WorkDir + } + + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + err := cmd.Run() + exitCode := 0 + if err != nil { + if exitErr, ok := err.(*exec.ExitError); ok { + exitCode = exitErr.ExitCode() + } + } + + return &SandboxResult{ + Stdout: stdout.String(), + Stderr: stderr.String(), + ExitCode: exitCode, + Duration: time.Since(startTime), + }, nil +} + +func (sm *SandboxManager) WriteFile(ctx context.Context, sandbox *Sandbox, path string, content []byte) error { + if int64(len(content)) > sm.cfg.MaxFileSize { + return fmt.Errorf("file size exceeds limit: %d > %d", len(content), sm.cfg.MaxFileSize) + } + + fullPath := filepath.Join(sandbox.WorkDir, path) + dir := filepath.Dir(fullPath) + + if err := os.MkdirAll(dir, 0755); err != nil { + return fmt.Errorf("failed to create directory: %w", err) + } + + return os.WriteFile(fullPath, content, 0644) +} + +func (sm *SandboxManager) ReadFile(ctx context.Context, sandbox *Sandbox, path string) ([]byte, error) { + fullPath := filepath.Join(sandbox.WorkDir, path) + return os.ReadFile(fullPath) +} + +func (sm *SandboxManager) Destroy(ctx context.Context, sandbox *Sandbox) error { + sm.mu.Lock() + delete(sm.sandboxes, sandbox.ID) + sm.mu.Unlock() + + if sm.useDocker && sandbox.ContainerID != "" { + sm.cleanupContainer(sandbox) + } + + if sandbox.WorkDir != "" { + os.RemoveAll(sandbox.WorkDir) + } + + return nil +} + +func (sm *SandboxManager) cleanupContainer(sandbox *Sandbox) { + exec.Command("docker", "stop", sandbox.ContainerID).Run() + exec.Command("docker", "rm", "-f", sandbox.ContainerID).Run() +} + +func (sm *SandboxManager) writeCodeFile(sandbox *Sandbox, code string, lang string) (string, error) { + var filename string + switch lang { + case "python", "py": + filename = "main.py" + case "javascript", "js", "node": + filename = "main.js" + case "typescript", "ts": + filename = "main.ts" + case "go", "golang": + filename = "main.go" + case "bash", "sh", "shell": + filename = "script.sh" + case "ruby", "rb": + filename = "main.rb" + default: + filename = "main.txt" + } + + fullPath := filepath.Join(sandbox.WorkDir, filename) + if err := os.WriteFile(fullPath, []byte(code), 0755); err != nil { + return "", fmt.Errorf("failed to write code file: %w", err) + } + + return filename, nil +} + +func (sm *SandboxManager) getRunCommand(lang, filename string) string { + switch lang { + case "python", "py": + return fmt.Sprintf("python3 %s/%s", sm.cfg.WorkDir, filename) + case "javascript", "js", "node": + return fmt.Sprintf("node %s/%s", sm.cfg.WorkDir, filename) + case "typescript", "ts": + return fmt.Sprintf("npx ts-node %s/%s", sm.cfg.WorkDir, filename) + case "go", "golang": + return fmt.Sprintf("go run %s/%s", sm.cfg.WorkDir, filename) + case "bash", "sh", "shell": + return fmt.Sprintf("bash %s/%s", sm.cfg.WorkDir, filename) + case "ruby", "rb": + return fmt.Sprintf("ruby %s/%s", sm.cfg.WorkDir, filename) + default: + return fmt.Sprintf("cat %s/%s", sm.cfg.WorkDir, filename) + } +} + +func (sm *SandboxManager) getLocalCommand(ctx context.Context, lang, filepath string) *exec.Cmd { + switch lang { + case "python", "py": + return exec.CommandContext(ctx, "python3", filepath) + case "javascript", "js", "node": + return exec.CommandContext(ctx, "node", filepath) + case "go", "golang": + return exec.CommandContext(ctx, "go", "run", filepath) + case "bash", "sh", "shell": + return exec.CommandContext(ctx, "bash", filepath) + case "ruby", "rb": + return exec.CommandContext(ctx, "ruby", filepath) + default: + return exec.CommandContext(ctx, "cat", filepath) + } +} + +func (sm *SandboxManager) collectOutputFiles(sandbox *Sandbox) (map[string][]byte, error) { + files := make(map[string][]byte) + + err := filepath.Walk(sandbox.WorkDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if info.IsDir() { + return nil + } + + relPath, err := filepath.Rel(sandbox.WorkDir, path) + if err != nil { + return nil + } + + if strings.HasPrefix(relPath, "main.") || strings.HasPrefix(relPath, "script.") { + return nil + } + + if info.Size() > sm.cfg.MaxFileSize { + return nil + } + + content, err := os.ReadFile(path) + if err != nil { + return nil + } + + files[relPath] = content + return nil + }) + + return files, err +} + +func (sm *SandboxManager) ListSandboxes() []*Sandbox { + sm.mu.RLock() + defer sm.mu.RUnlock() + + result := make([]*Sandbox, 0, len(sm.sandboxes)) + for _, s := range sm.sandboxes { + result = append(result, s) + } + return result +} + +func (sm *SandboxManager) GetSandbox(id string) (*Sandbox, bool) { + sm.mu.RLock() + defer sm.mu.RUnlock() + s, ok := sm.sandboxes[id] + return s, ok +} + +func (sm *SandboxManager) CopyToContainer(ctx context.Context, sandbox *Sandbox, src string, dst string) error { + if !sm.useDocker { + srcData, err := os.ReadFile(src) + if err != nil { + return err + } + return sm.WriteFile(ctx, sandbox, dst, srcData) + } + + cmd := exec.CommandContext(ctx, "docker", "cp", src, fmt.Sprintf("%s:%s", sandbox.ContainerID, dst)) + return cmd.Run() +} + +func (sm *SandboxManager) CopyFromContainer(ctx context.Context, sandbox *Sandbox, src string, dst string) error { + if !sm.useDocker { + srcPath := filepath.Join(sandbox.WorkDir, src) + srcData, err := os.ReadFile(srcPath) + if err != nil { + return err + } + return os.WriteFile(dst, srcData, 0644) + } + + cmd := exec.CommandContext(ctx, "docker", "cp", fmt.Sprintf("%s:%s", sandbox.ContainerID, src), dst) + return cmd.Run() +} + +func (sm *SandboxManager) StreamLogs(ctx context.Context, sandbox *Sandbox) (io.ReadCloser, error) { + if !sm.useDocker { + return nil, fmt.Errorf("streaming not supported without Docker") + } + + cmd := exec.CommandContext(ctx, "docker", "logs", "-f", sandbox.ContainerID) + stdout, err := cmd.StdoutPipe() + if err != nil { + return nil, err + } + + if err := cmd.Start(); err != nil { + return nil, err + } + + return stdout, nil +} diff --git a/backend/internal/computer/scheduler.go b/backend/internal/computer/scheduler.go new file mode 100644 index 0000000..9d76e56 --- /dev/null +++ b/backend/internal/computer/scheduler.go @@ -0,0 +1,386 @@ +package computer + +import ( + "context" + "log" + "sync" + "time" + + "github.com/robfig/cron/v3" +) + +type Scheduler struct { + taskRepo TaskRepository + computer *Computer + cron *cron.Cron + jobs map[string]cron.EntryID + running map[string]bool + mu sync.RWMutex + stopCh chan struct{} +} + +func NewScheduler(taskRepo TaskRepository, computer *Computer) *Scheduler { + return &Scheduler{ + taskRepo: taskRepo, + computer: computer, + cron: cron.New(cron.WithSeconds()), + jobs: make(map[string]cron.EntryID), + running: make(map[string]bool), + stopCh: make(chan struct{}), + } +} + +func (s *Scheduler) Start(ctx context.Context) { + s.cron.Start() + + go s.pollScheduledTasks(ctx) + + log.Println("[Scheduler] Started") +} + +func (s *Scheduler) Stop() { + close(s.stopCh) + s.cron.Stop() + log.Println("[Scheduler] Stopped") +} + +func (s *Scheduler) pollScheduledTasks(ctx context.Context) { + ticker := time.NewTicker(30 * time.Second) + defer ticker.Stop() + + s.loadScheduledTasks(ctx) + + for { + select { + case <-ctx.Done(): + return + case <-s.stopCh: + return + case <-ticker.C: + s.checkAndExecute(ctx) + } + } +} + +func (s *Scheduler) loadScheduledTasks(ctx context.Context) { + tasks, err := s.taskRepo.GetScheduled(ctx) + if err != nil { + log.Printf("[Scheduler] Failed to load scheduled tasks: %v", err) + return + } + + for _, task := range tasks { + if task.Schedule != nil && task.Schedule.Enabled { + s.scheduleTask(&task) + } + } + + log.Printf("[Scheduler] Loaded %d scheduled tasks", len(tasks)) +} + +func (s *Scheduler) scheduleTask(task *ComputerTask) error { + s.mu.Lock() + defer s.mu.Unlock() + + if oldID, exists := s.jobs[task.ID]; exists { + s.cron.Remove(oldID) + } + + if task.Schedule == nil || !task.Schedule.Enabled { + return nil + } + + var entryID cron.EntryID + var err error + + switch task.Schedule.Type { + case "cron": + if task.Schedule.CronExpr == "" { + return nil + } + entryID, err = s.cron.AddFunc(task.Schedule.CronExpr, func() { + s.executeScheduledTask(task.ID) + }) + + case "interval": + if task.Schedule.Interval <= 0 { + return nil + } + cronExpr := s.intervalToCron(task.Schedule.Interval) + entryID, err = s.cron.AddFunc(cronExpr, func() { + s.executeScheduledTask(task.ID) + }) + + case "once": + go func() { + if task.Schedule.NextRun.After(time.Now()) { + time.Sleep(time.Until(task.Schedule.NextRun)) + } + s.executeScheduledTask(task.ID) + }() + return nil + + case "daily": + entryID, err = s.cron.AddFunc("0 0 9 * * *", func() { + s.executeScheduledTask(task.ID) + }) + + case "hourly": + entryID, err = s.cron.AddFunc("0 0 * * * *", func() { + s.executeScheduledTask(task.ID) + }) + + case "weekly": + entryID, err = s.cron.AddFunc("0 0 9 * * 1", func() { + s.executeScheduledTask(task.ID) + }) + + case "monthly": + entryID, err = s.cron.AddFunc("0 0 9 1 * *", func() { + s.executeScheduledTask(task.ID) + }) + + default: + return nil + } + + if err != nil { + log.Printf("[Scheduler] Failed to schedule task %s: %v", task.ID, err) + return err + } + + s.jobs[task.ID] = entryID + log.Printf("[Scheduler] Scheduled task %s with type %s", task.ID, task.Schedule.Type) + + return nil +} + +func (s *Scheduler) intervalToCron(seconds int) string { + if seconds < 60 { + return "*/30 * * * * *" + } + if seconds < 3600 { + minutes := seconds / 60 + return "0 */" + itoa(minutes) + " * * * *" + } + if seconds < 86400 { + hours := seconds / 3600 + return "0 0 */" + itoa(hours) + " * * *" + } + return "0 0 0 * * *" +} + +func itoa(i int) string { + if i < 10 { + return string(rune('0' + i)) + } + return "" +} + +func (s *Scheduler) executeScheduledTask(taskID string) { + s.mu.Lock() + if s.running[taskID] { + s.mu.Unlock() + log.Printf("[Scheduler] Task %s is already running, skipping", taskID) + return + } + s.running[taskID] = true + s.mu.Unlock() + + defer func() { + s.mu.Lock() + delete(s.running, taskID) + s.mu.Unlock() + }() + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Minute) + defer cancel() + + task, err := s.taskRepo.GetByID(ctx, taskID) + if err != nil { + log.Printf("[Scheduler] Failed to get task %s: %v", taskID, err) + return + } + + if task.Schedule != nil { + if task.Schedule.ExpiresAt != nil && time.Now().After(*task.Schedule.ExpiresAt) { + log.Printf("[Scheduler] Task %s has expired, removing", taskID) + s.Cancel(taskID) + return + } + + if task.Schedule.MaxRuns > 0 && task.Schedule.RunCount >= task.Schedule.MaxRuns { + log.Printf("[Scheduler] Task %s reached max runs (%d), removing", taskID, task.Schedule.MaxRuns) + s.Cancel(taskID) + return + } + } + + log.Printf("[Scheduler] Executing scheduled task %s (run #%d)", taskID, task.RunCount+1) + + _, err = s.computer.Execute(ctx, task.UserID, task.Query, ExecuteOptions{ + Async: false, + Context: task.Memory, + }) + + if err != nil { + log.Printf("[Scheduler] Task %s execution failed: %v", taskID, err) + } else { + log.Printf("[Scheduler] Task %s completed successfully", taskID) + } + + task.RunCount++ + if task.Schedule != nil { + task.Schedule.RunCount = task.RunCount + task.Schedule.NextRun = s.calculateNextRun(task.Schedule) + task.NextRunAt = &task.Schedule.NextRun + } + task.UpdatedAt = time.Now() + + if err := s.taskRepo.Update(ctx, task); err != nil { + log.Printf("[Scheduler] Failed to update task %s: %v", taskID, err) + } +} + +func (s *Scheduler) calculateNextRun(schedule *Schedule) time.Time { + switch schedule.Type { + case "interval": + return time.Now().Add(time.Duration(schedule.Interval) * time.Second) + case "hourly": + return time.Now().Add(time.Hour).Truncate(time.Hour) + case "daily": + next := time.Now().Add(24 * time.Hour) + return time.Date(next.Year(), next.Month(), next.Day(), 9, 0, 0, 0, next.Location()) + case "weekly": + next := time.Now().Add(7 * 24 * time.Hour) + return time.Date(next.Year(), next.Month(), next.Day(), 9, 0, 0, 0, next.Location()) + case "monthly": + next := time.Now().AddDate(0, 1, 0) + return time.Date(next.Year(), next.Month(), 1, 9, 0, 0, 0, next.Location()) + default: + return time.Now().Add(time.Hour) + } +} + +func (s *Scheduler) checkAndExecute(ctx context.Context) { + tasks, err := s.taskRepo.GetScheduled(ctx) + if err != nil { + return + } + + now := time.Now() + for _, task := range tasks { + if task.NextRunAt != nil && task.NextRunAt.Before(now) { + if task.Schedule != nil && task.Schedule.Enabled { + go s.executeScheduledTask(task.ID) + } + } + } +} + +func (s *Scheduler) Schedule(taskID string, schedule Schedule) error { + ctx := context.Background() + task, err := s.taskRepo.GetByID(ctx, taskID) + if err != nil { + return err + } + + task.Schedule = &schedule + task.Schedule.Enabled = true + task.Schedule.NextRun = s.calculateNextRun(&schedule) + task.NextRunAt = &task.Schedule.NextRun + task.Status = StatusScheduled + task.UpdatedAt = time.Now() + + if err := s.taskRepo.Update(ctx, task); err != nil { + return err + } + + return s.scheduleTask(task) +} + +func (s *Scheduler) Cancel(taskID string) error { + s.mu.Lock() + defer s.mu.Unlock() + + if entryID, exists := s.jobs[taskID]; exists { + s.cron.Remove(entryID) + delete(s.jobs, taskID) + } + + ctx := context.Background() + task, err := s.taskRepo.GetByID(ctx, taskID) + if err != nil { + return err + } + + if task.Schedule != nil { + task.Schedule.Enabled = false + } + task.Status = StatusCancelled + task.UpdatedAt = time.Now() + + return s.taskRepo.Update(ctx, task) +} + +func (s *Scheduler) Pause(taskID string) error { + s.mu.Lock() + defer s.mu.Unlock() + + if entryID, exists := s.jobs[taskID]; exists { + s.cron.Remove(entryID) + delete(s.jobs, taskID) + } + + ctx := context.Background() + task, err := s.taskRepo.GetByID(ctx, taskID) + if err != nil { + return err + } + + if task.Schedule != nil { + task.Schedule.Enabled = false + } + task.UpdatedAt = time.Now() + + return s.taskRepo.Update(ctx, task) +} + +func (s *Scheduler) Resume(taskID string) error { + ctx := context.Background() + task, err := s.taskRepo.GetByID(ctx, taskID) + if err != nil { + return err + } + + if task.Schedule != nil { + task.Schedule.Enabled = true + task.Schedule.NextRun = s.calculateNextRun(task.Schedule) + task.NextRunAt = &task.Schedule.NextRun + } + task.Status = StatusScheduled + task.UpdatedAt = time.Now() + + if err := s.taskRepo.Update(ctx, task); err != nil { + return err + } + + return s.scheduleTask(task) +} + +func (s *Scheduler) GetScheduledTasks() []string { + s.mu.RLock() + defer s.mu.RUnlock() + + result := make([]string, 0, len(s.jobs)) + for taskID := range s.jobs { + result = append(result, taskID) + } + return result +} + +func (s *Scheduler) IsRunning(taskID string) bool { + s.mu.RLock() + defer s.mu.RUnlock() + return s.running[taskID] +} diff --git a/backend/internal/computer/types.go b/backend/internal/computer/types.go new file mode 100644 index 0000000..3a3ba0b --- /dev/null +++ b/backend/internal/computer/types.go @@ -0,0 +1,376 @@ +package computer + +import ( + "time" + + "github.com/gooseek/backend/internal/llm" +) + +type TaskStatus string + +const ( + StatusPending TaskStatus = "pending" + StatusPlanning TaskStatus = "planning" + StatusExecuting TaskStatus = "executing" + StatusWaiting TaskStatus = "waiting_user" + StatusCompleted TaskStatus = "completed" + StatusFailed TaskStatus = "failed" + StatusCancelled TaskStatus = "cancelled" + StatusScheduled TaskStatus = "scheduled" + StatusPaused TaskStatus = "paused" + StatusCheckpoint TaskStatus = "checkpoint" + StatusLongRunning TaskStatus = "long_running" +) + +type TaskType string + +const ( + TaskResearch TaskType = "research" + TaskCode TaskType = "code" + TaskAnalysis TaskType = "analysis" + TaskDesign TaskType = "design" + TaskDeploy TaskType = "deploy" + TaskMonitor TaskType = "monitor" + TaskReport TaskType = "report" + TaskCommunicate TaskType = "communicate" + TaskSchedule TaskType = "schedule" + TaskTransform TaskType = "transform" + TaskValidate TaskType = "validate" +) + +type ComputerTask struct { + ID string `json:"id"` + UserID string `json:"userId"` + Query string `json:"query"` + Status TaskStatus `json:"status"` + Plan *TaskPlan `json:"plan,omitempty"` + SubTasks []SubTask `json:"subTasks,omitempty"` + Artifacts []Artifact `json:"artifacts,omitempty"` + Memory map[string]interface{} `json:"memory,omitempty"` + Progress int `json:"progress"` + Message string `json:"message,omitempty"` + Error string `json:"error,omitempty"` + Schedule *Schedule `json:"schedule,omitempty"` + NextRunAt *time.Time `json:"nextRunAt,omitempty"` + RunCount int `json:"runCount"` + TotalCost float64 `json:"totalCost"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + CompletedAt *time.Time `json:"completedAt,omitempty"` + + DurationMode DurationMode `json:"durationMode"` + Checkpoint *Checkpoint `json:"checkpoint,omitempty"` + Checkpoints []Checkpoint `json:"checkpoints,omitempty"` + MaxDuration time.Duration `json:"maxDuration"` + EstimatedEnd *time.Time `json:"estimatedEnd,omitempty"` + Iterations int `json:"iterations"` + MaxIterations int `json:"maxIterations"` + PausedAt *time.Time `json:"pausedAt,omitempty"` + ResumedAt *time.Time `json:"resumedAt,omitempty"` + TotalRuntime time.Duration `json:"totalRuntime"` + HeartbeatAt *time.Time `json:"heartbeatAt,omitempty"` + Priority TaskPriority `json:"priority"` + ResourceLimits *ResourceLimits `json:"resourceLimits,omitempty"` +} + +type DurationMode string + +const ( + DurationShort DurationMode = "short" + DurationMedium DurationMode = "medium" + DurationLong DurationMode = "long" + DurationExtended DurationMode = "extended" + DurationUnlimited DurationMode = "unlimited" +) + +type TaskPriority string + +const ( + PriorityLow TaskPriority = "low" + PriorityNormal TaskPriority = "normal" + PriorityHigh TaskPriority = "high" + PriorityCritical TaskPriority = "critical" +) + +type Checkpoint struct { + ID string `json:"id"` + TaskID string `json:"taskId"` + SubTaskIndex int `json:"subTaskIndex"` + WaveIndex int `json:"waveIndex"` + State map[string]interface{} `json:"state"` + Progress int `json:"progress"` + Artifacts []string `json:"artifacts"` + Memory map[string]interface{} `json:"memory"` + CreatedAt time.Time `json:"createdAt"` + RuntimeSoFar time.Duration `json:"runtimeSoFar"` + CostSoFar float64 `json:"costSoFar"` + Reason string `json:"reason"` +} + +type ResourceLimits struct { + MaxCPU float64 `json:"maxCpu"` + MaxMemoryMB int `json:"maxMemoryMb"` + MaxDiskMB int `json:"maxDiskMb"` + MaxNetworkMbps int `json:"maxNetworkMbps"` + MaxCostPerHour float64 `json:"maxCostPerHour"` + MaxTotalCost float64 `json:"maxTotalCost"` + MaxConcurrent int `json:"maxConcurrent"` + IdleTimeoutMins int `json:"idleTimeoutMins"` +} + +var DurationModeConfigs = map[DurationMode]struct { + MaxDuration time.Duration + CheckpointFreq time.Duration + HeartbeatFreq time.Duration + MaxIterations int +}{ + DurationShort: {30 * time.Minute, 5 * time.Minute, 30 * time.Second, 10}, + DurationMedium: {4 * time.Hour, 15 * time.Minute, time.Minute, 50}, + DurationLong: {24 * time.Hour, 30 * time.Minute, 2 * time.Minute, 200}, + DurationExtended: {7 * 24 * time.Hour, time.Hour, 5 * time.Minute, 1000}, + DurationUnlimited: {365 * 24 * time.Hour, 4 * time.Hour, 10 * time.Minute, 0}, +} + +type SubTask struct { + ID string `json:"id"` + Type TaskType `json:"type"` + Description string `json:"description"` + Dependencies []string `json:"dependencies,omitempty"` + ModelID string `json:"modelId,omitempty"` + RequiredCaps []llm.ModelCapability `json:"requiredCaps,omitempty"` + Input map[string]interface{} `json:"input,omitempty"` + Output map[string]interface{} `json:"output,omitempty"` + Status TaskStatus `json:"status"` + Progress int `json:"progress"` + Error string `json:"error,omitempty"` + Cost float64 `json:"cost"` + StartedAt *time.Time `json:"startedAt,omitempty"` + CompletedAt *time.Time `json:"completedAt,omitempty"` + Retries int `json:"retries"` + MaxRetries int `json:"maxRetries"` +} + +type TaskPlan struct { + Query string `json:"query"` + Summary string `json:"summary"` + SubTasks []SubTask `json:"subTasks"` + ExecutionOrder [][]string `json:"executionOrder"` + EstimatedCost float64 `json:"estimatedCost"` + EstimatedTime int `json:"estimatedTimeSeconds"` +} + +type Artifact struct { + ID string `json:"id"` + TaskID string `json:"taskId"` + Type string `json:"type"` + Name string `json:"name"` + Content []byte `json:"-"` + URL string `json:"url,omitempty"` + Size int64 `json:"size"` + MimeType string `json:"mimeType,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` + CreatedAt time.Time `json:"createdAt"` +} + +type Schedule struct { + Type string `json:"type"` + CronExpr string `json:"cronExpr,omitempty"` + Interval int `json:"intervalSeconds,omitempty"` + NextRun time.Time `json:"nextRun"` + MaxRuns int `json:"maxRuns"` + RunCount int `json:"runCount"` + ExpiresAt *time.Time `json:"expiresAt,omitempty"` + Enabled bool `json:"enabled"` + + DurationMode DurationMode `json:"durationMode,omitempty"` + RetryOnFail bool `json:"retryOnFail"` + MaxRetries int `json:"maxRetries"` + RetryDelay time.Duration `json:"retryDelay"` + Timezone string `json:"timezone,omitempty"` + WindowStart string `json:"windowStart,omitempty"` + WindowEnd string `json:"windowEnd,omitempty"` + Conditions []Condition `json:"conditions,omitempty"` +} + +type Condition struct { + Type string `json:"type"` + Field string `json:"field"` + Operator string `json:"operator"` + Value interface{} `json:"value"` + Params map[string]interface{} `json:"params,omitempty"` +} + +const ( + ScheduleOnce = "once" + ScheduleInterval = "interval" + ScheduleCron = "cron" + ScheduleHourly = "hourly" + ScheduleDaily = "daily" + ScheduleWeekly = "weekly" + ScheduleMonthly = "monthly" + ScheduleQuarterly = "quarterly" + ScheduleYearly = "yearly" + ScheduleContinuous = "continuous" + ScheduleOnCondition = "on_condition" +) + +type TaskEvent struct { + Type string `json:"type"` + TaskID string `json:"taskId"` + SubTaskID string `json:"subTaskId,omitempty"` + Status TaskStatus `json:"status,omitempty"` + Progress int `json:"progress,omitempty"` + Message string `json:"message,omitempty"` + Data map[string]interface{} `json:"data,omitempty"` + Timestamp time.Time `json:"timestamp"` +} + +type ExecuteOptions struct { + Async bool `json:"async"` + MaxCost float64 `json:"maxCost"` + Timeout int `json:"timeoutSeconds"` + EnableSandbox bool `json:"enableSandbox"` + Schedule *Schedule `json:"schedule,omitempty"` + Context map[string]interface{} `json:"context,omitempty"` + + DurationMode DurationMode `json:"durationMode,omitempty"` + Priority TaskPriority `json:"priority,omitempty"` + ResourceLimits *ResourceLimits `json:"resourceLimits,omitempty"` + ResumeFromID string `json:"resumeFromId,omitempty"` + EnableBrowser bool `json:"enableBrowser"` + BrowserOptions *BrowserOptions `json:"browserOptions,omitempty"` + NotifyOnEvents []string `json:"notifyOnEvents,omitempty"` + WebhookURL string `json:"webhookUrl,omitempty"` + Tags []string `json:"tags,omitempty"` +} + +type BrowserOptions struct { + Headless bool `json:"headless"` + UserAgent string `json:"userAgent,omitempty"` + Viewport *Viewport `json:"viewport,omitempty"` + ProxyURL string `json:"proxyUrl,omitempty"` + Timeout int `json:"timeout"` + Screenshots bool `json:"screenshots"` + RecordVideo bool `json:"recordVideo"` + BlockAds bool `json:"blockAds"` + AcceptCookies bool `json:"acceptCookies"` +} + +type Viewport struct { + Width int `json:"width"` + Height int `json:"height"` +} + +type ExecutionResult struct { + TaskID string + SubTaskID string + Output map[string]interface{} + Artifacts []Artifact + Duration time.Duration + Cost float64 + Error error +} + +type SandboxResult struct { + Stdout string + Stderr string + ExitCode int + Files map[string][]byte + Duration time.Duration +} + +type MemoryEntry struct { + ID string `json:"id"` + UserID string `json:"userId"` + TaskID string `json:"taskId,omitempty"` + Key string `json:"key"` + Value interface{} `json:"value"` + Type string `json:"type"` + Tags []string `json:"tags,omitempty"` + CreatedAt time.Time `json:"createdAt"` + ExpiresAt *time.Time `json:"expiresAt,omitempty"` +} + +const ( + EventTaskCreated = "task_created" + EventTaskStarted = "task_started" + EventTaskProgress = "task_progress" + EventTaskCompleted = "task_completed" + EventTaskFailed = "task_failed" + EventSubTaskStart = "subtask_start" + EventSubTaskDone = "subtask_done" + EventSubTaskFail = "subtask_fail" + EventArtifact = "artifact" + EventMessage = "message" + EventUserInput = "user_input_required" + EventCheckpoint = "checkpoint" + EventCheckpointSaved = "checkpoint_saved" + EventResumed = "resumed" + EventPaused = "paused" + EventHeartbeat = "heartbeat" + EventIteration = "iteration" + EventBrowserAction = "browser_action" + EventScreenshot = "screenshot" + EventResourceAlert = "resource_alert" + EventScheduleUpdate = "schedule_update" +) + +type BrowserAction struct { + ID string `json:"id"` + Type BrowserActionType `json:"type"` + Selector string `json:"selector,omitempty"` + URL string `json:"url,omitempty"` + Value string `json:"value,omitempty"` + Options map[string]interface{} `json:"options,omitempty"` + Screenshot bool `json:"screenshot"` + WaitAfter int `json:"waitAfterMs"` + Timeout int `json:"timeoutMs"` + Result *BrowserActionResult `json:"result,omitempty"` +} + +type BrowserActionType string + +const ( + BrowserNavigate BrowserActionType = "navigate" + BrowserClick BrowserActionType = "click" + BrowserType BrowserActionType = "type" + BrowserScroll BrowserActionType = "scroll" + BrowserScreenshot BrowserActionType = "screenshot" + BrowserWait BrowserActionType = "wait" + BrowserWaitSelector BrowserActionType = "wait_selector" + BrowserExtract BrowserActionType = "extract" + BrowserEval BrowserActionType = "eval" + BrowserSelect BrowserActionType = "select" + BrowserUpload BrowserActionType = "upload" + BrowserDownload BrowserActionType = "download" + BrowserPDF BrowserActionType = "pdf" + BrowserClose BrowserActionType = "close" +) + +type BrowserActionResult struct { + Success bool `json:"success"` + Data interface{} `json:"data,omitempty"` + Screenshot string `json:"screenshot,omitempty"` + Error string `json:"error,omitempty"` + Duration time.Duration `json:"duration"` + PageTitle string `json:"pageTitle,omitempty"` + PageURL string `json:"pageUrl,omitempty"` + Cookies []map[string]string `json:"cookies,omitempty"` + LocalStorage map[string]string `json:"localStorage,omitempty"` +} + +const ( + ArtifactTypeFile = "file" + ArtifactTypeCode = "code" + ArtifactTypeReport = "report" + ArtifactTypeDeployment = "deployment" + ArtifactTypeImage = "image" + ArtifactTypeData = "data" +) + +const ( + MemoryTypeFact = "fact" + MemoryTypePreference = "preference" + MemoryTypeContext = "context" + MemoryTypeResult = "result" +) diff --git a/backend/internal/db/article_summary_repo.go b/backend/internal/db/article_summary_repo.go new file mode 100644 index 0000000..52d57b1 --- /dev/null +++ b/backend/internal/db/article_summary_repo.go @@ -0,0 +1,97 @@ +package db + +import ( + "context" + "crypto/sha256" + "database/sql" + "encoding/hex" + "encoding/json" + "strings" + "time" +) + +type ArticleSummary struct { + ID int64 `json:"id"` + URLHash string `json:"urlHash"` + URL string `json:"url"` + Events []string `json:"events"` + CreatedAt time.Time `json:"createdAt"` + ExpiresAt time.Time `json:"expiresAt"` +} + +type ArticleSummaryRepository struct { + db *PostgresDB +} + +func NewArticleSummaryRepository(db *PostgresDB) *ArticleSummaryRepository { + return &ArticleSummaryRepository{db: db} +} + +func (r *ArticleSummaryRepository) hashURL(url string) string { + normalized := strings.TrimSpace(url) + normalized = strings.TrimSuffix(normalized, "/") + normalized = strings.TrimPrefix(normalized, "https://") + normalized = strings.TrimPrefix(normalized, "http://") + normalized = strings.TrimPrefix(normalized, "www.") + + hash := sha256.Sum256([]byte(normalized)) + return hex.EncodeToString(hash[:]) +} + +func (r *ArticleSummaryRepository) GetByURL(ctx context.Context, url string) (*ArticleSummary, error) { + urlHash := r.hashURL(url) + + query := ` + SELECT id, url_hash, url, events, created_at, expires_at + FROM article_summaries + WHERE url_hash = $1 AND expires_at > NOW() + ` + + var a ArticleSummary + var eventsJSON []byte + + err := r.db.db.QueryRowContext(ctx, query, urlHash).Scan( + &a.ID, &a.URLHash, &a.URL, &eventsJSON, &a.CreatedAt, &a.ExpiresAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(eventsJSON, &a.Events) + return &a, nil +} + +func (r *ArticleSummaryRepository) Save(ctx context.Context, url string, events []string, ttl time.Duration) error { + urlHash := r.hashURL(url) + eventsJSON, _ := json.Marshal(events) + expiresAt := time.Now().Add(ttl) + + query := ` + INSERT INTO article_summaries (url_hash, url, events, expires_at) + VALUES ($1, $2, $3, $4) + ON CONFLICT (url_hash) + DO UPDATE SET + events = EXCLUDED.events, + expires_at = EXCLUDED.expires_at + ` + + _, err := r.db.db.ExecContext(ctx, query, urlHash, url, eventsJSON, expiresAt) + return err +} + +func (r *ArticleSummaryRepository) Delete(ctx context.Context, url string) error { + urlHash := r.hashURL(url) + _, err := r.db.db.ExecContext(ctx, "DELETE FROM article_summaries WHERE url_hash = $1", urlHash) + return err +} + +func (r *ArticleSummaryRepository) CleanupExpired(ctx context.Context) (int64, error) { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM article_summaries WHERE expires_at < NOW()") + if err != nil { + return 0, err + } + return result.RowsAffected() +} diff --git a/backend/internal/db/collection_repo.go b/backend/internal/db/collection_repo.go new file mode 100644 index 0000000..b6fbcc6 --- /dev/null +++ b/backend/internal/db/collection_repo.go @@ -0,0 +1,204 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type Collection struct { + ID string `json:"id"` + UserID string `json:"userId"` + Name string `json:"name"` + Description string `json:"description"` + IsPublic bool `json:"isPublic"` + ContextEnabled bool `json:"contextEnabled"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Items []CollectionItem `json:"items,omitempty"` + ItemCount int `json:"itemCount,omitempty"` +} + +type CollectionItem struct { + ID string `json:"id"` + CollectionID string `json:"collectionId"` + ItemType string `json:"itemType"` + Title string `json:"title"` + Content string `json:"content"` + URL string `json:"url"` + Metadata map[string]interface{} `json:"metadata"` + CreatedAt time.Time `json:"createdAt"` + SortOrder int `json:"sortOrder"` +} + +type CollectionRepository struct { + db *PostgresDB +} + +func NewCollectionRepository(db *PostgresDB) *CollectionRepository { + return &CollectionRepository{db: db} +} + +func (r *CollectionRepository) Create(ctx context.Context, c *Collection) error { + query := ` + INSERT INTO collections (user_id, name, description, is_public, context_enabled) + VALUES ($1, $2, $3, $4, $5) + RETURNING id, created_at, updated_at + ` + return r.db.db.QueryRowContext(ctx, query, + c.UserID, c.Name, c.Description, c.IsPublic, c.ContextEnabled, + ).Scan(&c.ID, &c.CreatedAt, &c.UpdatedAt) +} + +func (r *CollectionRepository) GetByID(ctx context.Context, id string) (*Collection, error) { + query := ` + SELECT id, user_id, name, description, is_public, context_enabled, created_at, updated_at, + (SELECT COUNT(*) FROM collection_items WHERE collection_id = collections.id) as item_count + FROM collections + WHERE id = $1 + ` + + var c Collection + err := r.db.db.QueryRowContext(ctx, query, id).Scan( + &c.ID, &c.UserID, &c.Name, &c.Description, &c.IsPublic, + &c.ContextEnabled, &c.CreatedAt, &c.UpdatedAt, &c.ItemCount, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + return &c, nil +} + +func (r *CollectionRepository) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]*Collection, error) { + query := ` + SELECT id, user_id, name, description, is_public, context_enabled, created_at, updated_at, + (SELECT COUNT(*) FROM collection_items WHERE collection_id = collections.id) as item_count + FROM collections + WHERE user_id = $1 + ORDER BY updated_at DESC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, userID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var collections []*Collection + for rows.Next() { + var c Collection + if err := rows.Scan( + &c.ID, &c.UserID, &c.Name, &c.Description, &c.IsPublic, + &c.ContextEnabled, &c.CreatedAt, &c.UpdatedAt, &c.ItemCount, + ); err != nil { + return nil, err + } + collections = append(collections, &c) + } + + return collections, nil +} + +func (r *CollectionRepository) Update(ctx context.Context, c *Collection) error { + query := ` + UPDATE collections + SET name = $2, description = $3, is_public = $4, context_enabled = $5, updated_at = NOW() + WHERE id = $1 + ` + _, err := r.db.db.ExecContext(ctx, query, + c.ID, c.Name, c.Description, c.IsPublic, c.ContextEnabled, + ) + return err +} + +func (r *CollectionRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM collections WHERE id = $1", id) + return err +} + +func (r *CollectionRepository) AddItem(ctx context.Context, item *CollectionItem) error { + metadataJSON, _ := json.Marshal(item.Metadata) + + query := ` + INSERT INTO collection_items (collection_id, item_type, title, content, url, metadata, sort_order) + VALUES ($1, $2, $3, $4, $5, $6, COALESCE((SELECT MAX(sort_order) + 1 FROM collection_items WHERE collection_id = $1), 0)) + RETURNING id, created_at, sort_order + ` + return r.db.db.QueryRowContext(ctx, query, + item.CollectionID, item.ItemType, item.Title, item.Content, item.URL, metadataJSON, + ).Scan(&item.ID, &item.CreatedAt, &item.SortOrder) +} + +func (r *CollectionRepository) GetItems(ctx context.Context, collectionID string) ([]CollectionItem, error) { + query := ` + SELECT id, collection_id, item_type, title, content, url, metadata, created_at, sort_order + FROM collection_items + WHERE collection_id = $1 + ORDER BY sort_order ASC + ` + + rows, err := r.db.db.QueryContext(ctx, query, collectionID) + if err != nil { + return nil, err + } + defer rows.Close() + + var items []CollectionItem + for rows.Next() { + var item CollectionItem + var metadataJSON []byte + + if err := rows.Scan( + &item.ID, &item.CollectionID, &item.ItemType, &item.Title, + &item.Content, &item.URL, &metadataJSON, &item.CreatedAt, &item.SortOrder, + ); err != nil { + return nil, err + } + + json.Unmarshal(metadataJSON, &item.Metadata) + items = append(items, item) + } + + return items, nil +} + +func (r *CollectionRepository) RemoveItem(ctx context.Context, itemID string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM collection_items WHERE id = $1", itemID) + return err +} + +func (r *CollectionRepository) GetCollectionContext(ctx context.Context, collectionID string) (string, error) { + items, err := r.GetItems(ctx, collectionID) + if err != nil { + return "", err + } + + var context string + for _, item := range items { + switch item.ItemType { + case "search": + context += "Previous search: " + item.Title + "\n" + if item.Content != "" { + context += "Summary: " + item.Content + "\n" + } + case "note": + context += "User note: " + item.Content + "\n" + case "url": + context += "Saved URL: " + item.URL + " - " + item.Title + "\n" + case "file": + context += "Uploaded file: " + item.Title + "\n" + if item.Content != "" { + context += "Content: " + item.Content + "\n" + } + } + context += "\n" + } + + return context, nil +} diff --git a/backend/internal/db/computer_artifact_repo.go b/backend/internal/db/computer_artifact_repo.go new file mode 100644 index 0000000..1181044 --- /dev/null +++ b/backend/internal/db/computer_artifact_repo.go @@ -0,0 +1,322 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" + + "github.com/gooseek/backend/internal/computer" +) + +type ComputerArtifactRepo struct { + db *sql.DB +} + +func NewComputerArtifactRepo(db *sql.DB) *ComputerArtifactRepo { + return &ComputerArtifactRepo{db: db} +} + +func (r *ComputerArtifactRepo) Migrate() error { + query := ` + CREATE TABLE IF NOT EXISTS computer_artifacts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + task_id UUID NOT NULL, + type VARCHAR(50) NOT NULL, + name VARCHAR(255), + content BYTEA, + url TEXT, + size BIGINT DEFAULT 0, + mime_type VARCHAR(100), + metadata JSONB, + created_at TIMESTAMPTZ DEFAULT NOW() + ); + + CREATE INDEX IF NOT EXISTS idx_computer_artifacts_task_id ON computer_artifacts(task_id); + CREATE INDEX IF NOT EXISTS idx_computer_artifacts_type ON computer_artifacts(type); + CREATE INDEX IF NOT EXISTS idx_computer_artifacts_created ON computer_artifacts(created_at DESC); + ` + + _, err := r.db.Exec(query) + return err +} + +func (r *ComputerArtifactRepo) Create(ctx context.Context, artifact *computer.Artifact) error { + metadataJSON, _ := json.Marshal(artifact.Metadata) + + query := ` + INSERT INTO computer_artifacts (id, task_id, type, name, content, url, size, mime_type, metadata, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ` + + _, err := r.db.ExecContext(ctx, query, + artifact.ID, + artifact.TaskID, + artifact.Type, + artifact.Name, + artifact.Content, + artifact.URL, + artifact.Size, + artifact.MimeType, + metadataJSON, + artifact.CreatedAt, + ) + + return err +} + +func (r *ComputerArtifactRepo) GetByID(ctx context.Context, id string) (*computer.Artifact, error) { + query := ` + SELECT id, task_id, type, name, content, url, size, mime_type, metadata, created_at + FROM computer_artifacts + WHERE id = $1 + ` + + var artifact computer.Artifact + var content []byte + var url, mimeType sql.NullString + var metadataJSON []byte + + err := r.db.QueryRowContext(ctx, query, id).Scan( + &artifact.ID, + &artifact.TaskID, + &artifact.Type, + &artifact.Name, + &content, + &url, + &artifact.Size, + &mimeType, + &metadataJSON, + &artifact.CreatedAt, + ) + + if err != nil { + return nil, err + } + + artifact.Content = content + if url.Valid { + artifact.URL = url.String + } + if mimeType.Valid { + artifact.MimeType = mimeType.String + } + if len(metadataJSON) > 0 { + json.Unmarshal(metadataJSON, &artifact.Metadata) + } + + return &artifact, nil +} + +func (r *ComputerArtifactRepo) GetByTaskID(ctx context.Context, taskID string) ([]computer.Artifact, error) { + query := ` + SELECT id, task_id, type, name, url, size, mime_type, metadata, created_at + FROM computer_artifacts + WHERE task_id = $1 + ORDER BY created_at ASC + ` + + rows, err := r.db.QueryContext(ctx, query, taskID) + if err != nil { + return nil, err + } + defer rows.Close() + + var artifacts []computer.Artifact + + for rows.Next() { + var artifact computer.Artifact + var url, mimeType sql.NullString + var metadataJSON []byte + + err := rows.Scan( + &artifact.ID, + &artifact.TaskID, + &artifact.Type, + &artifact.Name, + &url, + &artifact.Size, + &mimeType, + &metadataJSON, + &artifact.CreatedAt, + ) + if err != nil { + continue + } + + if url.Valid { + artifact.URL = url.String + } + if mimeType.Valid { + artifact.MimeType = mimeType.String + } + if len(metadataJSON) > 0 { + json.Unmarshal(metadataJSON, &artifact.Metadata) + } + + artifacts = append(artifacts, artifact) + } + + return artifacts, nil +} + +func (r *ComputerArtifactRepo) GetByType(ctx context.Context, taskID, artifactType string) ([]computer.Artifact, error) { + query := ` + SELECT id, task_id, type, name, url, size, mime_type, metadata, created_at + FROM computer_artifacts + WHERE task_id = $1 AND type = $2 + ORDER BY created_at ASC + ` + + rows, err := r.db.QueryContext(ctx, query, taskID, artifactType) + if err != nil { + return nil, err + } + defer rows.Close() + + var artifacts []computer.Artifact + + for rows.Next() { + var artifact computer.Artifact + var url, mimeType sql.NullString + var metadataJSON []byte + + err := rows.Scan( + &artifact.ID, + &artifact.TaskID, + &artifact.Type, + &artifact.Name, + &url, + &artifact.Size, + &mimeType, + &metadataJSON, + &artifact.CreatedAt, + ) + if err != nil { + continue + } + + if url.Valid { + artifact.URL = url.String + } + if mimeType.Valid { + artifact.MimeType = mimeType.String + } + if len(metadataJSON) > 0 { + json.Unmarshal(metadataJSON, &artifact.Metadata) + } + + artifacts = append(artifacts, artifact) + } + + return artifacts, nil +} + +func (r *ComputerArtifactRepo) GetContent(ctx context.Context, id string) ([]byte, error) { + query := `SELECT content FROM computer_artifacts WHERE id = $1` + var content []byte + err := r.db.QueryRowContext(ctx, query, id).Scan(&content) + return content, err +} + +func (r *ComputerArtifactRepo) UpdateURL(ctx context.Context, id, url string) error { + query := `UPDATE computer_artifacts SET url = $1 WHERE id = $2` + _, err := r.db.ExecContext(ctx, query, url, id) + return err +} + +func (r *ComputerArtifactRepo) Delete(ctx context.Context, id string) error { + query := `DELETE FROM computer_artifacts WHERE id = $1` + _, err := r.db.ExecContext(ctx, query, id) + return err +} + +func (r *ComputerArtifactRepo) DeleteByTaskID(ctx context.Context, taskID string) error { + query := `DELETE FROM computer_artifacts WHERE task_id = $1` + _, err := r.db.ExecContext(ctx, query, taskID) + return err +} + +func (r *ComputerArtifactRepo) DeleteOlderThan(ctx context.Context, days int) (int64, error) { + query := ` + DELETE FROM computer_artifacts + WHERE created_at < NOW() - INTERVAL '1 day' * $1 + ` + result, err := r.db.ExecContext(ctx, query, days) + if err != nil { + return 0, err + } + return result.RowsAffected() +} + +func (r *ComputerArtifactRepo) GetTotalSize(ctx context.Context, taskID string) (int64, error) { + query := `SELECT COALESCE(SUM(size), 0) FROM computer_artifacts WHERE task_id = $1` + var size int64 + err := r.db.QueryRowContext(ctx, query, taskID).Scan(&size) + return size, err +} + +func (r *ComputerArtifactRepo) Count(ctx context.Context, taskID string) (int64, error) { + query := `SELECT COUNT(*) FROM computer_artifacts WHERE task_id = $1` + var count int64 + err := r.db.QueryRowContext(ctx, query, taskID).Scan(&count) + return count, err +} + +type ArtifactSummary struct { + ID string `json:"id"` + TaskID string `json:"taskId"` + Type string `json:"type"` + Name string `json:"name"` + URL string `json:"url"` + Size int64 `json:"size"` + MimeType string `json:"mimeType"` + CreatedAt time.Time `json:"createdAt"` +} + +func (r *ComputerArtifactRepo) GetSummaries(ctx context.Context, taskID string) ([]ArtifactSummary, error) { + query := ` + SELECT id, task_id, type, name, url, size, mime_type, created_at + FROM computer_artifacts + WHERE task_id = $1 + ORDER BY created_at ASC + ` + + rows, err := r.db.QueryContext(ctx, query, taskID) + if err != nil { + return nil, err + } + defer rows.Close() + + var summaries []ArtifactSummary + + for rows.Next() { + var s ArtifactSummary + var url, mimeType sql.NullString + + err := rows.Scan( + &s.ID, + &s.TaskID, + &s.Type, + &s.Name, + &url, + &s.Size, + &mimeType, + &s.CreatedAt, + ) + if err != nil { + continue + } + + if url.Valid { + s.URL = url.String + } + if mimeType.Valid { + s.MimeType = mimeType.String + } + + summaries = append(summaries, s) + } + + return summaries, nil +} diff --git a/backend/internal/db/computer_memory_repo.go b/backend/internal/db/computer_memory_repo.go new file mode 100644 index 0000000..1dc9e31 --- /dev/null +++ b/backend/internal/db/computer_memory_repo.go @@ -0,0 +1,306 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "strings" + "time" + + "github.com/gooseek/backend/internal/computer" +) + +type ComputerMemoryRepo struct { + db *sql.DB +} + +func NewComputerMemoryRepo(db *sql.DB) *ComputerMemoryRepo { + return &ComputerMemoryRepo{db: db} +} + +func (r *ComputerMemoryRepo) Migrate() error { + query := ` + CREATE TABLE IF NOT EXISTS computer_memory ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + task_id UUID, + key VARCHAR(255) NOT NULL, + value JSONB NOT NULL, + type VARCHAR(50), + tags TEXT[], + created_at TIMESTAMPTZ DEFAULT NOW(), + expires_at TIMESTAMPTZ + ); + + CREATE INDEX IF NOT EXISTS idx_computer_memory_user_id ON computer_memory(user_id); + CREATE INDEX IF NOT EXISTS idx_computer_memory_task_id ON computer_memory(task_id); + CREATE INDEX IF NOT EXISTS idx_computer_memory_type ON computer_memory(type); + CREATE INDEX IF NOT EXISTS idx_computer_memory_expires ON computer_memory(expires_at) WHERE expires_at IS NOT NULL; + CREATE INDEX IF NOT EXISTS idx_computer_memory_key ON computer_memory(key); + ` + + _, err := r.db.Exec(query) + return err +} + +func (r *ComputerMemoryRepo) Store(ctx context.Context, entry *computer.MemoryEntry) error { + valueJSON, err := json.Marshal(entry.Value) + if err != nil { + return err + } + + query := ` + INSERT INTO computer_memory (id, user_id, task_id, key, value, type, tags, created_at, expires_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + ON CONFLICT (id) DO UPDATE SET + value = EXCLUDED.value, + type = EXCLUDED.type, + tags = EXCLUDED.tags, + expires_at = EXCLUDED.expires_at + ` + + var taskID interface{} + if entry.TaskID != "" { + taskID = entry.TaskID + } + + _, err = r.db.ExecContext(ctx, query, + entry.ID, + entry.UserID, + taskID, + entry.Key, + valueJSON, + entry.Type, + entry.Tags, + entry.CreatedAt, + entry.ExpiresAt, + ) + + return err +} + +func (r *ComputerMemoryRepo) GetByUser(ctx context.Context, userID string, limit int) ([]computer.MemoryEntry, error) { + query := ` + SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at + FROM computer_memory + WHERE user_id = $1 + AND (expires_at IS NULL OR expires_at > NOW()) + ORDER BY created_at DESC + LIMIT $2 + ` + + rows, err := r.db.QueryContext(ctx, query, userID, limit) + if err != nil { + return nil, err + } + defer rows.Close() + + return r.scanEntries(rows) +} + +func (r *ComputerMemoryRepo) GetByTask(ctx context.Context, taskID string) ([]computer.MemoryEntry, error) { + query := ` + SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at + FROM computer_memory + WHERE task_id = $1 + AND (expires_at IS NULL OR expires_at > NOW()) + ORDER BY created_at ASC + ` + + rows, err := r.db.QueryContext(ctx, query, taskID) + if err != nil { + return nil, err + } + defer rows.Close() + + return r.scanEntries(rows) +} + +func (r *ComputerMemoryRepo) Search(ctx context.Context, userID, query string, limit int) ([]computer.MemoryEntry, error) { + searchTerms := strings.Fields(strings.ToLower(query)) + if len(searchTerms) == 0 { + return r.GetByUser(ctx, userID, limit) + } + + likePatterns := make([]string, len(searchTerms)) + args := make([]interface{}, len(searchTerms)+2) + args[0] = userID + + for i, term := range searchTerms { + likePatterns[i] = "%" + term + "%" + args[i+1] = likePatterns[i] + } + args[len(args)-1] = limit + + var conditions []string + for i := range searchTerms { + conditions = append(conditions, "(LOWER(key) LIKE $"+string(rune('2'+i))+" OR LOWER(value::text) LIKE $"+string(rune('2'+i))+")") + } + + sqlQuery := ` + SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at + FROM computer_memory + WHERE user_id = $1 + AND (expires_at IS NULL OR expires_at > NOW()) + AND (` + strings.Join(conditions, " OR ") + `) + ORDER BY created_at DESC + LIMIT $` + string(rune('2'+len(searchTerms))) + + rows, err := r.db.QueryContext(ctx, sqlQuery, args...) + if err != nil { + return r.GetByUser(ctx, userID, limit) + } + defer rows.Close() + + return r.scanEntries(rows) +} + +func (r *ComputerMemoryRepo) GetByType(ctx context.Context, userID, memType string, limit int) ([]computer.MemoryEntry, error) { + query := ` + SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at + FROM computer_memory + WHERE user_id = $1 AND type = $2 + AND (expires_at IS NULL OR expires_at > NOW()) + ORDER BY created_at DESC + LIMIT $3 + ` + + rows, err := r.db.QueryContext(ctx, query, userID, memType, limit) + if err != nil { + return nil, err + } + defer rows.Close() + + return r.scanEntries(rows) +} + +func (r *ComputerMemoryRepo) GetByKey(ctx context.Context, userID, key string) (*computer.MemoryEntry, error) { + query := ` + SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at + FROM computer_memory + WHERE user_id = $1 AND key = $2 + AND (expires_at IS NULL OR expires_at > NOW()) + ORDER BY created_at DESC + LIMIT 1 + ` + + var entry computer.MemoryEntry + var valueJSON []byte + var taskID sql.NullString + var expiresAt sql.NullTime + var tags []string + + err := r.db.QueryRowContext(ctx, query, userID, key).Scan( + &entry.ID, + &entry.UserID, + &taskID, + &entry.Key, + &valueJSON, + &entry.Type, + &tags, + &entry.CreatedAt, + &expiresAt, + ) + + if err != nil { + return nil, err + } + + if taskID.Valid { + entry.TaskID = taskID.String + } + if expiresAt.Valid { + entry.ExpiresAt = &expiresAt.Time + } + entry.Tags = tags + + json.Unmarshal(valueJSON, &entry.Value) + + return &entry, nil +} + +func (r *ComputerMemoryRepo) Delete(ctx context.Context, id string) error { + query := `DELETE FROM computer_memory WHERE id = $1` + _, err := r.db.ExecContext(ctx, query, id) + return err +} + +func (r *ComputerMemoryRepo) DeleteByUser(ctx context.Context, userID string) error { + query := `DELETE FROM computer_memory WHERE user_id = $1` + _, err := r.db.ExecContext(ctx, query, userID) + return err +} + +func (r *ComputerMemoryRepo) DeleteByTask(ctx context.Context, taskID string) error { + query := `DELETE FROM computer_memory WHERE task_id = $1` + _, err := r.db.ExecContext(ctx, query, taskID) + return err +} + +func (r *ComputerMemoryRepo) DeleteExpired(ctx context.Context) (int64, error) { + query := `DELETE FROM computer_memory WHERE expires_at IS NOT NULL AND expires_at < NOW()` + result, err := r.db.ExecContext(ctx, query) + if err != nil { + return 0, err + } + return result.RowsAffected() +} + +func (r *ComputerMemoryRepo) scanEntries(rows *sql.Rows) ([]computer.MemoryEntry, error) { + var entries []computer.MemoryEntry + + for rows.Next() { + var entry computer.MemoryEntry + var valueJSON []byte + var taskID sql.NullString + var expiresAt sql.NullTime + var tags []string + + err := rows.Scan( + &entry.ID, + &entry.UserID, + &taskID, + &entry.Key, + &valueJSON, + &entry.Type, + &tags, + &entry.CreatedAt, + &expiresAt, + ) + if err != nil { + continue + } + + if taskID.Valid { + entry.TaskID = taskID.String + } + if expiresAt.Valid { + entry.ExpiresAt = &expiresAt.Time + } + entry.Tags = tags + + json.Unmarshal(valueJSON, &entry.Value) + + entries = append(entries, entry) + } + + return entries, nil +} + +func (r *ComputerMemoryRepo) Count(ctx context.Context, userID string) (int64, error) { + query := ` + SELECT COUNT(*) + FROM computer_memory + WHERE user_id = $1 + AND (expires_at IS NULL OR expires_at > NOW()) + ` + var count int64 + err := r.db.QueryRowContext(ctx, query, userID).Scan(&count) + return count, err +} + +func (r *ComputerMemoryRepo) UpdateExpiry(ctx context.Context, id string, expiresAt time.Time) error { + query := `UPDATE computer_memory SET expires_at = $1 WHERE id = $2` + _, err := r.db.ExecContext(ctx, query, expiresAt, id) + return err +} diff --git a/backend/internal/db/computer_task_repo.go b/backend/internal/db/computer_task_repo.go new file mode 100644 index 0000000..d1dbb2c --- /dev/null +++ b/backend/internal/db/computer_task_repo.go @@ -0,0 +1,411 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "time" + + "github.com/gooseek/backend/internal/computer" +) + +type ComputerTaskRepo struct { + db *sql.DB +} + +func NewComputerTaskRepo(db *sql.DB) *ComputerTaskRepo { + return &ComputerTaskRepo{db: db} +} + +func (r *ComputerTaskRepo) Migrate() error { + query := ` + CREATE TABLE IF NOT EXISTS computer_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + query TEXT NOT NULL, + status VARCHAR(20) NOT NULL DEFAULT 'pending', + plan JSONB, + sub_tasks JSONB, + artifacts JSONB, + memory JSONB, + progress INT DEFAULT 0, + message TEXT, + error TEXT, + schedule JSONB, + next_run_at TIMESTAMPTZ, + run_count INT DEFAULT 0, + total_cost DECIMAL(10,6) DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + completed_at TIMESTAMPTZ + ); + + CREATE INDEX IF NOT EXISTS idx_computer_tasks_user_id ON computer_tasks(user_id); + CREATE INDEX IF NOT EXISTS idx_computer_tasks_status ON computer_tasks(status); + CREATE INDEX IF NOT EXISTS idx_computer_tasks_next_run ON computer_tasks(next_run_at) WHERE next_run_at IS NOT NULL; + CREATE INDEX IF NOT EXISTS idx_computer_tasks_created ON computer_tasks(created_at DESC); + ` + + _, err := r.db.Exec(query) + return err +} + +func (r *ComputerTaskRepo) Create(ctx context.Context, task *computer.ComputerTask) error { + planJSON, _ := json.Marshal(task.Plan) + subTasksJSON, _ := json.Marshal(task.SubTasks) + artifactsJSON, _ := json.Marshal(task.Artifacts) + memoryJSON, _ := json.Marshal(task.Memory) + scheduleJSON, _ := json.Marshal(task.Schedule) + + query := ` + INSERT INTO computer_tasks ( + id, user_id, query, status, plan, sub_tasks, artifacts, memory, + progress, message, error, schedule, next_run_at, run_count, total_cost, + created_at, updated_at, completed_at + ) VALUES ( + $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18 + ) + ` + + _, err := r.db.ExecContext(ctx, query, + task.ID, + task.UserID, + task.Query, + task.Status, + planJSON, + subTasksJSON, + artifactsJSON, + memoryJSON, + task.Progress, + task.Message, + task.Error, + scheduleJSON, + task.NextRunAt, + task.RunCount, + task.TotalCost, + task.CreatedAt, + task.UpdatedAt, + task.CompletedAt, + ) + + return err +} + +func (r *ComputerTaskRepo) Update(ctx context.Context, task *computer.ComputerTask) error { + planJSON, _ := json.Marshal(task.Plan) + subTasksJSON, _ := json.Marshal(task.SubTasks) + artifactsJSON, _ := json.Marshal(task.Artifacts) + memoryJSON, _ := json.Marshal(task.Memory) + scheduleJSON, _ := json.Marshal(task.Schedule) + + query := ` + UPDATE computer_tasks SET + status = $1, + plan = $2, + sub_tasks = $3, + artifacts = $4, + memory = $5, + progress = $6, + message = $7, + error = $8, + schedule = $9, + next_run_at = $10, + run_count = $11, + total_cost = $12, + updated_at = $13, + completed_at = $14 + WHERE id = $15 + ` + + _, err := r.db.ExecContext(ctx, query, + task.Status, + planJSON, + subTasksJSON, + artifactsJSON, + memoryJSON, + task.Progress, + task.Message, + task.Error, + scheduleJSON, + task.NextRunAt, + task.RunCount, + task.TotalCost, + time.Now(), + task.CompletedAt, + task.ID, + ) + + return err +} + +func (r *ComputerTaskRepo) GetByID(ctx context.Context, id string) (*computer.ComputerTask, error) { + query := ` + SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, + progress, message, error, schedule, next_run_at, run_count, total_cost, + created_at, updated_at, completed_at + FROM computer_tasks + WHERE id = $1 + ` + + var task computer.ComputerTask + var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte + var message, errStr sql.NullString + var nextRunAt, completedAt sql.NullTime + + err := r.db.QueryRowContext(ctx, query, id).Scan( + &task.ID, + &task.UserID, + &task.Query, + &task.Status, + &planJSON, + &subTasksJSON, + &artifactsJSON, + &memoryJSON, + &task.Progress, + &message, + &errStr, + &scheduleJSON, + &nextRunAt, + &task.RunCount, + &task.TotalCost, + &task.CreatedAt, + &task.UpdatedAt, + &completedAt, + ) + + if err != nil { + return nil, err + } + + if len(planJSON) > 0 { + json.Unmarshal(planJSON, &task.Plan) + } + if len(subTasksJSON) > 0 { + json.Unmarshal(subTasksJSON, &task.SubTasks) + } + if len(artifactsJSON) > 0 { + json.Unmarshal(artifactsJSON, &task.Artifacts) + } + if len(memoryJSON) > 0 { + json.Unmarshal(memoryJSON, &task.Memory) + } + if len(scheduleJSON) > 0 { + json.Unmarshal(scheduleJSON, &task.Schedule) + } + + if message.Valid { + task.Message = message.String + } + if errStr.Valid { + task.Error = errStr.String + } + if nextRunAt.Valid { + task.NextRunAt = &nextRunAt.Time + } + if completedAt.Valid { + task.CompletedAt = &completedAt.Time + } + + return &task, nil +} + +func (r *ComputerTaskRepo) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]computer.ComputerTask, error) { + query := ` + SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, + progress, message, error, schedule, next_run_at, run_count, total_cost, + created_at, updated_at, completed_at + FROM computer_tasks + WHERE user_id = $1 + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.QueryContext(ctx, query, userID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var tasks []computer.ComputerTask + + for rows.Next() { + var task computer.ComputerTask + var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte + var message, errStr sql.NullString + var nextRunAt, completedAt sql.NullTime + + err := rows.Scan( + &task.ID, + &task.UserID, + &task.Query, + &task.Status, + &planJSON, + &subTasksJSON, + &artifactsJSON, + &memoryJSON, + &task.Progress, + &message, + &errStr, + &scheduleJSON, + &nextRunAt, + &task.RunCount, + &task.TotalCost, + &task.CreatedAt, + &task.UpdatedAt, + &completedAt, + ) + if err != nil { + continue + } + + if len(planJSON) > 0 { + json.Unmarshal(planJSON, &task.Plan) + } + if len(subTasksJSON) > 0 { + json.Unmarshal(subTasksJSON, &task.SubTasks) + } + if len(artifactsJSON) > 0 { + json.Unmarshal(artifactsJSON, &task.Artifacts) + } + if len(memoryJSON) > 0 { + json.Unmarshal(memoryJSON, &task.Memory) + } + if len(scheduleJSON) > 0 { + json.Unmarshal(scheduleJSON, &task.Schedule) + } + + if message.Valid { + task.Message = message.String + } + if errStr.Valid { + task.Error = errStr.String + } + if nextRunAt.Valid { + task.NextRunAt = &nextRunAt.Time + } + if completedAt.Valid { + task.CompletedAt = &completedAt.Time + } + + tasks = append(tasks, task) + } + + return tasks, nil +} + +func (r *ComputerTaskRepo) GetScheduled(ctx context.Context) ([]computer.ComputerTask, error) { + query := ` + SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, + progress, message, error, schedule, next_run_at, run_count, total_cost, + created_at, updated_at, completed_at + FROM computer_tasks + WHERE status = 'scheduled' AND schedule IS NOT NULL + ORDER BY next_run_at ASC + ` + + rows, err := r.db.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var tasks []computer.ComputerTask + + for rows.Next() { + var task computer.ComputerTask + var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte + var message, errStr sql.NullString + var nextRunAt, completedAt sql.NullTime + + err := rows.Scan( + &task.ID, + &task.UserID, + &task.Query, + &task.Status, + &planJSON, + &subTasksJSON, + &artifactsJSON, + &memoryJSON, + &task.Progress, + &message, + &errStr, + &scheduleJSON, + &nextRunAt, + &task.RunCount, + &task.TotalCost, + &task.CreatedAt, + &task.UpdatedAt, + &completedAt, + ) + if err != nil { + continue + } + + if len(planJSON) > 0 { + json.Unmarshal(planJSON, &task.Plan) + } + if len(subTasksJSON) > 0 { + json.Unmarshal(subTasksJSON, &task.SubTasks) + } + if len(artifactsJSON) > 0 { + json.Unmarshal(artifactsJSON, &task.Artifacts) + } + if len(memoryJSON) > 0 { + json.Unmarshal(memoryJSON, &task.Memory) + } + if len(scheduleJSON) > 0 { + json.Unmarshal(scheduleJSON, &task.Schedule) + } + + if message.Valid { + task.Message = message.String + } + if errStr.Valid { + task.Error = errStr.String + } + if nextRunAt.Valid { + task.NextRunAt = &nextRunAt.Time + } + if completedAt.Valid { + task.CompletedAt = &completedAt.Time + } + + tasks = append(tasks, task) + } + + return tasks, nil +} + +func (r *ComputerTaskRepo) Delete(ctx context.Context, id string) error { + query := `DELETE FROM computer_tasks WHERE id = $1` + _, err := r.db.ExecContext(ctx, query, id) + return err +} + +func (r *ComputerTaskRepo) DeleteOlderThan(ctx context.Context, days int) (int64, error) { + query := ` + DELETE FROM computer_tasks + WHERE created_at < NOW() - INTERVAL '%d days' + AND status IN ('completed', 'failed', 'cancelled') + ` + result, err := r.db.ExecContext(ctx, fmt.Sprintf(query, days)) + if err != nil { + return 0, err + } + return result.RowsAffected() +} + +func (r *ComputerTaskRepo) CountByUser(ctx context.Context, userID string) (int64, error) { + query := `SELECT COUNT(*) FROM computer_tasks WHERE user_id = $1` + var count int64 + err := r.db.QueryRowContext(ctx, query, userID).Scan(&count) + return count, err +} + +func (r *ComputerTaskRepo) CountByStatus(ctx context.Context, status string) (int64, error) { + query := `SELECT COUNT(*) FROM computer_tasks WHERE status = $1` + var count int64 + err := r.db.QueryRowContext(ctx, query, status).Scan(&count) + return count, err +} diff --git a/backend/internal/db/digest_repo.go b/backend/internal/db/digest_repo.go new file mode 100644 index 0000000..9961c26 --- /dev/null +++ b/backend/internal/db/digest_repo.go @@ -0,0 +1,177 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type DigestCitation struct { + Index int `json:"index"` + URL string `json:"url"` + Title string `json:"title"` + Domain string `json:"domain"` +} + +type Digest struct { + ID int64 `json:"id"` + Topic string `json:"topic"` + Region string `json:"region"` + ClusterTitle string `json:"clusterTitle"` + SummaryRu string `json:"summaryRu"` + Citations []DigestCitation `json:"citations"` + SourcesCount int `json:"sourcesCount"` + FollowUp []string `json:"followUp"` + Thumbnail string `json:"thumbnail"` + ShortDescription string `json:"shortDescription"` + MainURL string `json:"mainUrl"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type DigestRepository struct { + db *PostgresDB +} + +func NewDigestRepository(db *PostgresDB) *DigestRepository { + return &DigestRepository{db: db} +} + +func (r *DigestRepository) GetByTopicRegionTitle(ctx context.Context, topic, region, title string) (*Digest, error) { + query := ` + SELECT id, topic, region, cluster_title, summary_ru, citations, sources_count, + follow_up, thumbnail, short_description, main_url, created_at, updated_at + FROM digests + WHERE topic = $1 AND region = $2 AND cluster_title = $3 + ` + + var d Digest + var citationsJSON, followUpJSON []byte + + err := r.db.db.QueryRowContext(ctx, query, topic, region, title).Scan( + &d.ID, &d.Topic, &d.Region, &d.ClusterTitle, &d.SummaryRu, + &citationsJSON, &d.SourcesCount, &followUpJSON, + &d.Thumbnail, &d.ShortDescription, &d.MainURL, + &d.CreatedAt, &d.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(citationsJSON, &d.Citations) + json.Unmarshal(followUpJSON, &d.FollowUp) + + return &d, nil +} + +func (r *DigestRepository) GetByURL(ctx context.Context, url string) (*Digest, error) { + query := ` + SELECT id, topic, region, cluster_title, summary_ru, citations, sources_count, + follow_up, thumbnail, short_description, main_url, created_at, updated_at + FROM digests + WHERE main_url = $1 + LIMIT 1 + ` + + var d Digest + var citationsJSON, followUpJSON []byte + + err := r.db.db.QueryRowContext(ctx, query, url).Scan( + &d.ID, &d.Topic, &d.Region, &d.ClusterTitle, &d.SummaryRu, + &citationsJSON, &d.SourcesCount, &followUpJSON, + &d.Thumbnail, &d.ShortDescription, &d.MainURL, + &d.CreatedAt, &d.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(citationsJSON, &d.Citations) + json.Unmarshal(followUpJSON, &d.FollowUp) + + return &d, nil +} + +func (r *DigestRepository) GetByTopicRegion(ctx context.Context, topic, region string, limit int) ([]*Digest, error) { + query := ` + SELECT id, topic, region, cluster_title, summary_ru, citations, sources_count, + follow_up, thumbnail, short_description, main_url, created_at, updated_at + FROM digests + WHERE topic = $1 AND region = $2 + ORDER BY created_at DESC + LIMIT $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, topic, region, limit) + if err != nil { + return nil, err + } + defer rows.Close() + + var digests []*Digest + for rows.Next() { + var d Digest + var citationsJSON, followUpJSON []byte + + if err := rows.Scan( + &d.ID, &d.Topic, &d.Region, &d.ClusterTitle, &d.SummaryRu, + &citationsJSON, &d.SourcesCount, &followUpJSON, + &d.Thumbnail, &d.ShortDescription, &d.MainURL, + &d.CreatedAt, &d.UpdatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(citationsJSON, &d.Citations) + json.Unmarshal(followUpJSON, &d.FollowUp) + digests = append(digests, &d) + } + + return digests, nil +} + +func (r *DigestRepository) Upsert(ctx context.Context, d *Digest) error { + citationsJSON, _ := json.Marshal(d.Citations) + followUpJSON, _ := json.Marshal(d.FollowUp) + + query := ` + INSERT INTO digests (topic, region, cluster_title, summary_ru, citations, sources_count, + follow_up, thumbnail, short_description, main_url) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ON CONFLICT (topic, region, cluster_title) + DO UPDATE SET + summary_ru = EXCLUDED.summary_ru, + citations = EXCLUDED.citations, + sources_count = EXCLUDED.sources_count, + follow_up = EXCLUDED.follow_up, + thumbnail = EXCLUDED.thumbnail, + short_description = EXCLUDED.short_description, + main_url = EXCLUDED.main_url, + updated_at = NOW() + ` + + _, err := r.db.db.ExecContext(ctx, query, + d.Topic, d.Region, d.ClusterTitle, d.SummaryRu, + citationsJSON, d.SourcesCount, followUpJSON, + d.Thumbnail, d.ShortDescription, d.MainURL, + ) + return err +} + +func (r *DigestRepository) DeleteByTopicRegion(ctx context.Context, topic, region string) (int64, error) { + result, err := r.db.db.ExecContext(ctx, + "DELETE FROM digests WHERE topic = $1 AND region = $2", + topic, region, + ) + if err != nil { + return 0, err + } + return result.RowsAffected() +} diff --git a/backend/internal/db/file_repo.go b/backend/internal/db/file_repo.go new file mode 100644 index 0000000..326b3d7 --- /dev/null +++ b/backend/internal/db/file_repo.go @@ -0,0 +1,149 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type UploadedFile struct { + ID string `json:"id"` + UserID string `json:"userId"` + Filename string `json:"filename"` + FileType string `json:"fileType"` + FileSize int64 `json:"fileSize"` + StoragePath string `json:"storagePath"` + ExtractedText string `json:"extractedText,omitempty"` + Metadata map[string]interface{} `json:"metadata"` + CreatedAt time.Time `json:"createdAt"` +} + +type FileRepository struct { + db *PostgresDB +} + +func NewFileRepository(db *PostgresDB) *FileRepository { + return &FileRepository{db: db} +} + +func (r *FileRepository) Create(ctx context.Context, f *UploadedFile) error { + metadataJSON, _ := json.Marshal(f.Metadata) + + query := ` + INSERT INTO uploaded_files (user_id, filename, file_type, file_size, storage_path, extracted_text, metadata) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, created_at + ` + return r.db.db.QueryRowContext(ctx, query, + f.UserID, f.Filename, f.FileType, f.FileSize, f.StoragePath, f.ExtractedText, metadataJSON, + ).Scan(&f.ID, &f.CreatedAt) +} + +func (r *FileRepository) GetByID(ctx context.Context, id string) (*UploadedFile, error) { + query := ` + SELECT id, user_id, filename, file_type, file_size, storage_path, extracted_text, metadata, created_at + FROM uploaded_files + WHERE id = $1 + ` + + var f UploadedFile + var metadataJSON []byte + + err := r.db.db.QueryRowContext(ctx, query, id).Scan( + &f.ID, &f.UserID, &f.Filename, &f.FileType, &f.FileSize, + &f.StoragePath, &f.ExtractedText, &metadataJSON, &f.CreatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(metadataJSON, &f.Metadata) + return &f, nil +} + +func (r *FileRepository) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]*UploadedFile, error) { + query := ` + SELECT id, user_id, filename, file_type, file_size, storage_path, extracted_text, metadata, created_at + FROM uploaded_files + WHERE user_id = $1 + ORDER BY created_at DESC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, userID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var files []*UploadedFile + for rows.Next() { + var f UploadedFile + var metadataJSON []byte + + if err := rows.Scan( + &f.ID, &f.UserID, &f.Filename, &f.FileType, &f.FileSize, + &f.StoragePath, &f.ExtractedText, &metadataJSON, &f.CreatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(metadataJSON, &f.Metadata) + files = append(files, &f) + } + + return files, nil +} + +func (r *FileRepository) UpdateExtractedText(ctx context.Context, id, text string) error { + _, err := r.db.db.ExecContext(ctx, + "UPDATE uploaded_files SET extracted_text = $2 WHERE id = $1", + id, text, + ) + return err +} + +func (r *FileRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM uploaded_files WHERE id = $1", id) + return err +} + +func (r *FileRepository) GetByIDs(ctx context.Context, ids []string) ([]*UploadedFile, error) { + if len(ids) == 0 { + return nil, nil + } + + query := ` + SELECT id, user_id, filename, file_type, file_size, storage_path, extracted_text, metadata, created_at + FROM uploaded_files + WHERE id = ANY($1) + ` + + rows, err := r.db.db.QueryContext(ctx, query, ids) + if err != nil { + return nil, err + } + defer rows.Close() + + var files []*UploadedFile + for rows.Next() { + var f UploadedFile + var metadataJSON []byte + + if err := rows.Scan( + &f.ID, &f.UserID, &f.Filename, &f.FileType, &f.FileSize, + &f.StoragePath, &f.ExtractedText, &metadataJSON, &f.CreatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(metadataJSON, &f.Metadata) + files = append(files, &f) + } + + return files, nil +} diff --git a/backend/internal/db/memory_repo.go b/backend/internal/db/memory_repo.go new file mode 100644 index 0000000..6640b83 --- /dev/null +++ b/backend/internal/db/memory_repo.go @@ -0,0 +1,170 @@ +package db + +import ( + "context" + "encoding/json" + "time" +) + +type UserMemory struct { + ID string `json:"id"` + UserID string `json:"userId"` + MemoryType string `json:"memoryType"` + Key string `json:"key"` + Value string `json:"value"` + Metadata map[string]interface{} `json:"metadata"` + Importance int `json:"importance"` + LastUsed time.Time `json:"lastUsed"` + UseCount int `json:"useCount"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type MemoryRepository struct { + db *PostgresDB +} + +func NewMemoryRepository(db *PostgresDB) *MemoryRepository { + return &MemoryRepository{db: db} +} + +func (r *MemoryRepository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS user_memories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + memory_type VARCHAR(50) NOT NULL, + key VARCHAR(255) NOT NULL, + value TEXT NOT NULL, + metadata JSONB DEFAULT '{}', + importance INT DEFAULT 5, + last_used TIMESTAMPTZ DEFAULT NOW(), + use_count INT DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(user_id, memory_type, key) + )`, + `CREATE INDEX IF NOT EXISTS idx_user_memories_user ON user_memories(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_user_memories_type ON user_memories(user_id, memory_type)`, + `CREATE INDEX IF NOT EXISTS idx_user_memories_importance ON user_memories(user_id, importance DESC)`, + } + + for _, m := range migrations { + if _, err := r.db.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *MemoryRepository) Save(ctx context.Context, mem *UserMemory) error { + metadataJSON, _ := json.Marshal(mem.Metadata) + + query := ` + INSERT INTO user_memories (user_id, memory_type, key, value, metadata, importance) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (user_id, memory_type, key) + DO UPDATE SET + value = EXCLUDED.value, + metadata = EXCLUDED.metadata, + importance = EXCLUDED.importance, + updated_at = NOW() + RETURNING id, created_at, updated_at + ` + return r.db.db.QueryRowContext(ctx, query, + mem.UserID, mem.MemoryType, mem.Key, mem.Value, metadataJSON, mem.Importance, + ).Scan(&mem.ID, &mem.CreatedAt, &mem.UpdatedAt) +} + +func (r *MemoryRepository) GetByUserID(ctx context.Context, userID string, memoryType string, limit int) ([]*UserMemory, error) { + query := ` + SELECT id, user_id, memory_type, key, value, metadata, importance, last_used, use_count, created_at, updated_at + FROM user_memories + WHERE user_id = $1 + ` + args := []interface{}{userID} + + if memoryType != "" { + query += " AND memory_type = $2" + args = append(args, memoryType) + } + + query += " ORDER BY importance DESC, last_used DESC" + + if limit > 0 { + query += " LIMIT $" + string(rune('0'+len(args)+1)) + args = append(args, limit) + } + + rows, err := r.db.db.QueryContext(ctx, query, args...) + if err != nil { + return nil, err + } + defer rows.Close() + + var memories []*UserMemory + for rows.Next() { + var mem UserMemory + var metadataJSON []byte + + if err := rows.Scan( + &mem.ID, &mem.UserID, &mem.MemoryType, &mem.Key, &mem.Value, + &metadataJSON, &mem.Importance, &mem.LastUsed, &mem.UseCount, + &mem.CreatedAt, &mem.UpdatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(metadataJSON, &mem.Metadata) + memories = append(memories, &mem) + } + + return memories, nil +} + +func (r *MemoryRepository) GetContextForUser(ctx context.Context, userID string) (string, error) { + memories, err := r.GetByUserID(ctx, userID, "", 20) + if err != nil { + return "", err + } + + var context string + for _, mem := range memories { + switch mem.MemoryType { + case "preference": + context += "User preference: " + mem.Key + " = " + mem.Value + "\n" + case "fact": + context += "Known fact about user: " + mem.Value + "\n" + case "instruction": + context += "User instruction: " + mem.Value + "\n" + case "interest": + context += "User interest: " + mem.Value + "\n" + default: + context += mem.Key + ": " + mem.Value + "\n" + } + } + + return context, nil +} + +func (r *MemoryRepository) IncrementUseCount(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, + "UPDATE user_memories SET use_count = use_count + 1, last_used = NOW() WHERE id = $1", + id, + ) + return err +} + +func (r *MemoryRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM user_memories WHERE id = $1", id) + return err +} + +func (r *MemoryRepository) DeleteByUserID(ctx context.Context, userID string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM user_memories WHERE user_id = $1", userID) + return err +} + +func ExtractMemoriesFromConversation(ctx context.Context, llmClient interface{}, conversation, answer string) ([]UserMemory, error) { + return nil, nil +} diff --git a/backend/internal/db/page_repo.go b/backend/internal/db/page_repo.go new file mode 100644 index 0000000..c4c5b7a --- /dev/null +++ b/backend/internal/db/page_repo.go @@ -0,0 +1,219 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + + "github.com/gooseek/backend/internal/pages" +) + +type PageRepository struct { + db *PostgresDB +} + +func NewPageRepository(db *PostgresDB) *PageRepository { + return &PageRepository{db: db} +} + +func (r *PageRepository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS pages ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + thread_id UUID REFERENCES threads(id) ON DELETE SET NULL, + title VARCHAR(500) NOT NULL, + subtitle TEXT, + sections JSONB NOT NULL DEFAULT '[]', + sources JSONB NOT NULL DEFAULT '[]', + thumbnail TEXT, + is_public BOOLEAN DEFAULT FALSE, + share_id VARCHAR(100) UNIQUE, + view_count INT DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_pages_user ON pages(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_pages_share ON pages(share_id)`, + } + + for _, m := range migrations { + if _, err := r.db.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *PageRepository) Create(ctx context.Context, p *pages.Page) error { + sectionsJSON, _ := json.Marshal(p.Sections) + sourcesJSON, _ := json.Marshal(p.Sources) + + query := ` + INSERT INTO pages (user_id, thread_id, title, subtitle, sections, sources, thumbnail, is_public) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING id, created_at, updated_at + ` + + var threadID *string + if p.ThreadID != "" { + threadID = &p.ThreadID + } + + return r.db.db.QueryRowContext(ctx, query, + p.UserID, threadID, p.Title, p.Subtitle, sectionsJSON, sourcesJSON, p.Thumbnail, p.IsPublic, + ).Scan(&p.ID, &p.CreatedAt, &p.UpdatedAt) +} + +func (r *PageRepository) GetByID(ctx context.Context, id string) (*pages.Page, error) { + query := ` + SELECT id, user_id, thread_id, title, subtitle, sections, sources, thumbnail, is_public, share_id, view_count, created_at, updated_at + FROM pages + WHERE id = $1 + ` + + var p pages.Page + var sectionsJSON, sourcesJSON []byte + var threadID, shareID sql.NullString + + err := r.db.db.QueryRowContext(ctx, query, id).Scan( + &p.ID, &p.UserID, &threadID, &p.Title, &p.Subtitle, + §ionsJSON, &sourcesJSON, &p.Thumbnail, + &p.IsPublic, &shareID, &p.ViewCount, &p.CreatedAt, &p.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(sectionsJSON, &p.Sections) + json.Unmarshal(sourcesJSON, &p.Sources) + + if threadID.Valid { + p.ThreadID = threadID.String + } + if shareID.Valid { + p.ShareID = shareID.String + } + + return &p, nil +} + +func (r *PageRepository) GetByShareID(ctx context.Context, shareID string) (*pages.Page, error) { + query := ` + SELECT id, user_id, thread_id, title, subtitle, sections, sources, thumbnail, is_public, share_id, view_count, created_at, updated_at + FROM pages + WHERE share_id = $1 AND is_public = true + ` + + var p pages.Page + var sectionsJSON, sourcesJSON []byte + var threadID, shareIDVal sql.NullString + + err := r.db.db.QueryRowContext(ctx, query, shareID).Scan( + &p.ID, &p.UserID, &threadID, &p.Title, &p.Subtitle, + §ionsJSON, &sourcesJSON, &p.Thumbnail, + &p.IsPublic, &shareIDVal, &p.ViewCount, &p.CreatedAt, &p.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(sectionsJSON, &p.Sections) + json.Unmarshal(sourcesJSON, &p.Sources) + + if threadID.Valid { + p.ThreadID = threadID.String + } + if shareIDVal.Valid { + p.ShareID = shareIDVal.String + } + + return &p, nil +} + +func (r *PageRepository) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]*pages.Page, error) { + query := ` + SELECT id, user_id, thread_id, title, subtitle, sections, sources, thumbnail, is_public, share_id, view_count, created_at, updated_at + FROM pages + WHERE user_id = $1 + ORDER BY updated_at DESC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, userID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var pagesList []*pages.Page + for rows.Next() { + var p pages.Page + var sectionsJSON, sourcesJSON []byte + var threadID, shareID sql.NullString + + if err := rows.Scan( + &p.ID, &p.UserID, &threadID, &p.Title, &p.Subtitle, + §ionsJSON, &sourcesJSON, &p.Thumbnail, + &p.IsPublic, &shareID, &p.ViewCount, &p.CreatedAt, &p.UpdatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(sectionsJSON, &p.Sections) + json.Unmarshal(sourcesJSON, &p.Sources) + + if threadID.Valid { + p.ThreadID = threadID.String + } + if shareID.Valid { + p.ShareID = shareID.String + } + + pagesList = append(pagesList, &p) + } + + return pagesList, nil +} + +func (r *PageRepository) Update(ctx context.Context, p *pages.Page) error { + sectionsJSON, _ := json.Marshal(p.Sections) + sourcesJSON, _ := json.Marshal(p.Sources) + + query := ` + UPDATE pages + SET title = $2, subtitle = $3, sections = $4, sources = $5, thumbnail = $6, is_public = $7, updated_at = NOW() + WHERE id = $1 + ` + _, err := r.db.db.ExecContext(ctx, query, + p.ID, p.Title, p.Subtitle, sectionsJSON, sourcesJSON, p.Thumbnail, p.IsPublic, + ) + return err +} + +func (r *PageRepository) SetShareID(ctx context.Context, pageID, shareID string) error { + _, err := r.db.db.ExecContext(ctx, + "UPDATE pages SET share_id = $2, is_public = true WHERE id = $1", + pageID, shareID, + ) + return err +} + +func (r *PageRepository) IncrementViewCount(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, + "UPDATE pages SET view_count = view_count + 1 WHERE id = $1", + id, + ) + return err +} + +func (r *PageRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM pages WHERE id = $1", id) + return err +} diff --git a/backend/internal/db/postgres.go b/backend/internal/db/postgres.go new file mode 100644 index 0000000..9de25a9 --- /dev/null +++ b/backend/internal/db/postgres.go @@ -0,0 +1,134 @@ +package db + +import ( + "context" + "database/sql" + "fmt" + "time" + + _ "github.com/lib/pq" +) + +type PostgresDB struct { + db *sql.DB +} + +func NewPostgresDB(databaseURL string) (*PostgresDB, error) { + db, err := sql.Open("postgres", databaseURL) + if err != nil { + return nil, fmt.Errorf("failed to open database: %w", err) + } + + db.SetMaxOpenConns(25) + db.SetMaxIdleConns(5) + db.SetConnMaxLifetime(5 * time.Minute) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := db.PingContext(ctx); err != nil { + return nil, fmt.Errorf("failed to ping database: %w", err) + } + + return &PostgresDB{db: db}, nil +} + +func (p *PostgresDB) Close() error { + return p.db.Close() +} + +func (p *PostgresDB) DB() *sql.DB { + return p.db +} + +func (p *PostgresDB) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS digests ( + id SERIAL PRIMARY KEY, + topic VARCHAR(100) NOT NULL, + region VARCHAR(50) NOT NULL, + cluster_title VARCHAR(500) NOT NULL, + summary_ru TEXT NOT NULL, + citations JSONB DEFAULT '[]', + sources_count INT DEFAULT 0, + follow_up JSONB DEFAULT '[]', + thumbnail TEXT, + short_description TEXT, + main_url TEXT, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(topic, region, cluster_title) + )`, + `CREATE INDEX IF NOT EXISTS idx_digests_topic_region ON digests(topic, region)`, + `CREATE INDEX IF NOT EXISTS idx_digests_main_url ON digests(main_url)`, + `CREATE TABLE IF NOT EXISTS article_summaries ( + id SERIAL PRIMARY KEY, + url_hash VARCHAR(64) NOT NULL UNIQUE, + url TEXT NOT NULL, + events JSONB NOT NULL DEFAULT '[]', + created_at TIMESTAMPTZ DEFAULT NOW(), + expires_at TIMESTAMPTZ DEFAULT NOW() + INTERVAL '7 days' + )`, + `CREATE INDEX IF NOT EXISTS idx_article_summaries_url_hash ON article_summaries(url_hash)`, + `CREATE INDEX IF NOT EXISTS idx_article_summaries_expires ON article_summaries(expires_at)`, + `CREATE TABLE IF NOT EXISTS collections ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + name VARCHAR(255) NOT NULL, + description TEXT, + is_public BOOLEAN DEFAULT FALSE, + context_enabled BOOLEAN DEFAULT TRUE, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_collections_user ON collections(user_id)`, + `CREATE TABLE IF NOT EXISTS collection_items ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + collection_id UUID NOT NULL REFERENCES collections(id) ON DELETE CASCADE, + item_type VARCHAR(50) NOT NULL, + title VARCHAR(500), + content TEXT, + url TEXT, + metadata JSONB DEFAULT '{}', + created_at TIMESTAMPTZ DEFAULT NOW(), + sort_order INT DEFAULT 0 + )`, + `CREATE INDEX IF NOT EXISTS idx_collection_items_collection ON collection_items(collection_id)`, + `CREATE TABLE IF NOT EXISTS uploaded_files ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + filename VARCHAR(500) NOT NULL, + file_type VARCHAR(100) NOT NULL, + file_size BIGINT NOT NULL, + storage_path TEXT NOT NULL, + extracted_text TEXT, + metadata JSONB DEFAULT '{}', + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_uploaded_files_user ON uploaded_files(user_id)`, + `CREATE TABLE IF NOT EXISTS research_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID, + collection_id UUID REFERENCES collections(id) ON DELETE SET NULL, + query TEXT NOT NULL, + focus_mode VARCHAR(50) DEFAULT 'all', + optimization_mode VARCHAR(50) DEFAULT 'balanced', + sources JSONB DEFAULT '[]', + response_blocks JSONB DEFAULT '[]', + final_answer TEXT, + citations JSONB DEFAULT '[]', + created_at TIMESTAMPTZ DEFAULT NOW(), + completed_at TIMESTAMPTZ + )`, + `CREATE INDEX IF NOT EXISTS idx_research_sessions_user ON research_sessions(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_research_sessions_collection ON research_sessions(collection_id)`, + } + + for _, migration := range migrations { + if _, err := p.db.ExecContext(ctx, migration); err != nil { + return fmt.Errorf("migration failed: %w", err) + } + } + + return nil +} diff --git a/backend/internal/db/space_repo.go b/backend/internal/db/space_repo.go new file mode 100644 index 0000000..0d20bb4 --- /dev/null +++ b/backend/internal/db/space_repo.go @@ -0,0 +1,163 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type Space struct { + ID string `json:"id"` + UserID string `json:"userId"` + Name string `json:"name"` + Description string `json:"description"` + Icon string `json:"icon"` + Color string `json:"color"` + CustomInstructions string `json:"customInstructions"` + DefaultFocusMode string `json:"defaultFocusMode"` + DefaultModel string `json:"defaultModel"` + IsPublic bool `json:"isPublic"` + Settings map[string]interface{} `json:"settings"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + ThreadCount int `json:"threadCount,omitempty"` +} + +type SpaceRepository struct { + db *PostgresDB +} + +func NewSpaceRepository(db *PostgresDB) *SpaceRepository { + return &SpaceRepository{db: db} +} + +func (r *SpaceRepository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS spaces ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + name VARCHAR(255) NOT NULL, + description TEXT, + icon VARCHAR(50), + color VARCHAR(20), + custom_instructions TEXT, + default_focus_mode VARCHAR(50) DEFAULT 'all', + default_model VARCHAR(100), + is_public BOOLEAN DEFAULT FALSE, + settings JSONB DEFAULT '{}', + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_spaces_user ON spaces(user_id)`, + } + + for _, m := range migrations { + if _, err := r.db.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *SpaceRepository) Create(ctx context.Context, s *Space) error { + settingsJSON, _ := json.Marshal(s.Settings) + + query := ` + INSERT INTO spaces (user_id, name, description, icon, color, custom_instructions, default_focus_mode, default_model, is_public, settings) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + RETURNING id, created_at, updated_at + ` + return r.db.db.QueryRowContext(ctx, query, + s.UserID, s.Name, s.Description, s.Icon, s.Color, + s.CustomInstructions, s.DefaultFocusMode, s.DefaultModel, + s.IsPublic, settingsJSON, + ).Scan(&s.ID, &s.CreatedAt, &s.UpdatedAt) +} + +func (r *SpaceRepository) GetByID(ctx context.Context, id string) (*Space, error) { + query := ` + SELECT id, user_id, name, description, icon, color, custom_instructions, + default_focus_mode, default_model, is_public, settings, created_at, updated_at, + (SELECT COUNT(*) FROM threads WHERE space_id = spaces.id) as thread_count + FROM spaces + WHERE id = $1 + ` + + var s Space + var settingsJSON []byte + + err := r.db.db.QueryRowContext(ctx, query, id).Scan( + &s.ID, &s.UserID, &s.Name, &s.Description, &s.Icon, &s.Color, + &s.CustomInstructions, &s.DefaultFocusMode, &s.DefaultModel, + &s.IsPublic, &settingsJSON, &s.CreatedAt, &s.UpdatedAt, &s.ThreadCount, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + json.Unmarshal(settingsJSON, &s.Settings) + return &s, nil +} + +func (r *SpaceRepository) GetByUserID(ctx context.Context, userID string) ([]*Space, error) { + query := ` + SELECT id, user_id, name, description, icon, color, custom_instructions, + default_focus_mode, default_model, is_public, settings, created_at, updated_at, + (SELECT COUNT(*) FROM threads WHERE space_id = spaces.id) as thread_count + FROM spaces + WHERE user_id = $1 + ORDER BY updated_at DESC + ` + + rows, err := r.db.db.QueryContext(ctx, query, userID) + if err != nil { + return nil, err + } + defer rows.Close() + + var spaces []*Space + for rows.Next() { + var s Space + var settingsJSON []byte + + if err := rows.Scan( + &s.ID, &s.UserID, &s.Name, &s.Description, &s.Icon, &s.Color, + &s.CustomInstructions, &s.DefaultFocusMode, &s.DefaultModel, + &s.IsPublic, &settingsJSON, &s.CreatedAt, &s.UpdatedAt, &s.ThreadCount, + ); err != nil { + return nil, err + } + + json.Unmarshal(settingsJSON, &s.Settings) + spaces = append(spaces, &s) + } + + return spaces, nil +} + +func (r *SpaceRepository) Update(ctx context.Context, s *Space) error { + settingsJSON, _ := json.Marshal(s.Settings) + + query := ` + UPDATE spaces + SET name = $2, description = $3, icon = $4, color = $5, + custom_instructions = $6, default_focus_mode = $7, default_model = $8, + is_public = $9, settings = $10, updated_at = NOW() + WHERE id = $1 + ` + _, err := r.db.db.ExecContext(ctx, query, + s.ID, s.Name, s.Description, s.Icon, s.Color, + s.CustomInstructions, s.DefaultFocusMode, s.DefaultModel, + s.IsPublic, settingsJSON, + ) + return err +} + +func (r *SpaceRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM spaces WHERE id = $1", id) + return err +} diff --git a/backend/internal/db/thread_repo.go b/backend/internal/db/thread_repo.go new file mode 100644 index 0000000..0d39c13 --- /dev/null +++ b/backend/internal/db/thread_repo.go @@ -0,0 +1,270 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type Thread struct { + ID string `json:"id"` + UserID string `json:"userId"` + SpaceID *string `json:"spaceId,omitempty"` + Title string `json:"title"` + FocusMode string `json:"focusMode"` + IsPublic bool `json:"isPublic"` + ShareID *string `json:"shareId,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Messages []ThreadMessage `json:"messages,omitempty"` + MessageCount int `json:"messageCount,omitempty"` +} + +type ThreadMessage struct { + ID string `json:"id"` + ThreadID string `json:"threadId"` + Role string `json:"role"` + Content string `json:"content"` + Sources []ThreadSource `json:"sources,omitempty"` + Widgets []map[string]interface{} `json:"widgets,omitempty"` + RelatedQuestions []string `json:"relatedQuestions,omitempty"` + Model string `json:"model,omitempty"` + TokensUsed int `json:"tokensUsed,omitempty"` + CreatedAt time.Time `json:"createdAt"` +} + +type ThreadSource struct { + Index int `json:"index"` + URL string `json:"url"` + Title string `json:"title"` + Domain string `json:"domain"` + Snippet string `json:"snippet,omitempty"` +} + +type ThreadRepository struct { + db *PostgresDB +} + +func NewThreadRepository(db *PostgresDB) *ThreadRepository { + return &ThreadRepository{db: db} +} + +func (r *ThreadRepository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS threads ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + space_id UUID REFERENCES spaces(id) ON DELETE SET NULL, + title VARCHAR(500) NOT NULL DEFAULT 'New Thread', + focus_mode VARCHAR(50) DEFAULT 'all', + is_public BOOLEAN DEFAULT FALSE, + share_id VARCHAR(100) UNIQUE, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_threads_user ON threads(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_threads_space ON threads(space_id)`, + `CREATE INDEX IF NOT EXISTS idx_threads_share ON threads(share_id)`, + `CREATE TABLE IF NOT EXISTS thread_messages ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + thread_id UUID NOT NULL REFERENCES threads(id) ON DELETE CASCADE, + role VARCHAR(20) NOT NULL, + content TEXT NOT NULL, + sources JSONB DEFAULT '[]', + widgets JSONB DEFAULT '[]', + related_questions JSONB DEFAULT '[]', + model VARCHAR(100), + tokens_used INT DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_thread_messages_thread ON thread_messages(thread_id)`, + } + + for _, m := range migrations { + if _, err := r.db.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *ThreadRepository) Create(ctx context.Context, t *Thread) error { + query := ` + INSERT INTO threads (user_id, space_id, title, focus_mode, is_public) + VALUES ($1, $2, $3, $4, $5) + RETURNING id, created_at, updated_at + ` + return r.db.db.QueryRowContext(ctx, query, + t.UserID, t.SpaceID, t.Title, t.FocusMode, t.IsPublic, + ).Scan(&t.ID, &t.CreatedAt, &t.UpdatedAt) +} + +func (r *ThreadRepository) GetByID(ctx context.Context, id string) (*Thread, error) { + query := ` + SELECT id, user_id, space_id, title, focus_mode, is_public, share_id, created_at, updated_at, + (SELECT COUNT(*) FROM thread_messages WHERE thread_id = threads.id) as message_count + FROM threads + WHERE id = $1 + ` + + var t Thread + err := r.db.db.QueryRowContext(ctx, query, id).Scan( + &t.ID, &t.UserID, &t.SpaceID, &t.Title, &t.FocusMode, + &t.IsPublic, &t.ShareID, &t.CreatedAt, &t.UpdatedAt, &t.MessageCount, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + return &t, nil +} + +func (r *ThreadRepository) GetByShareID(ctx context.Context, shareID string) (*Thread, error) { + query := ` + SELECT id, user_id, space_id, title, focus_mode, is_public, share_id, created_at, updated_at + FROM threads + WHERE share_id = $1 AND is_public = true + ` + + var t Thread + err := r.db.db.QueryRowContext(ctx, query, shareID).Scan( + &t.ID, &t.UserID, &t.SpaceID, &t.Title, &t.FocusMode, + &t.IsPublic, &t.ShareID, &t.CreatedAt, &t.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + return &t, nil +} + +func (r *ThreadRepository) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]*Thread, error) { + query := ` + SELECT id, user_id, space_id, title, focus_mode, is_public, share_id, created_at, updated_at, + (SELECT COUNT(*) FROM thread_messages WHERE thread_id = threads.id) as message_count + FROM threads + WHERE user_id = $1 + ORDER BY updated_at DESC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, userID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var threads []*Thread + for rows.Next() { + var t Thread + if err := rows.Scan( + &t.ID, &t.UserID, &t.SpaceID, &t.Title, &t.FocusMode, + &t.IsPublic, &t.ShareID, &t.CreatedAt, &t.UpdatedAt, &t.MessageCount, + ); err != nil { + return nil, err + } + threads = append(threads, &t) + } + + return threads, nil +} + +func (r *ThreadRepository) Update(ctx context.Context, t *Thread) error { + query := ` + UPDATE threads + SET title = $2, focus_mode = $3, is_public = $4, updated_at = NOW() + WHERE id = $1 + ` + _, err := r.db.db.ExecContext(ctx, query, t.ID, t.Title, t.FocusMode, t.IsPublic) + return err +} + +func (r *ThreadRepository) SetShareID(ctx context.Context, threadID, shareID string) error { + _, err := r.db.db.ExecContext(ctx, + "UPDATE threads SET share_id = $2, is_public = true WHERE id = $1", + threadID, shareID, + ) + return err +} + +func (r *ThreadRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.db.ExecContext(ctx, "DELETE FROM threads WHERE id = $1", id) + return err +} + +func (r *ThreadRepository) AddMessage(ctx context.Context, msg *ThreadMessage) error { + sourcesJSON, _ := json.Marshal(msg.Sources) + widgetsJSON, _ := json.Marshal(msg.Widgets) + relatedJSON, _ := json.Marshal(msg.RelatedQuestions) + + query := ` + INSERT INTO thread_messages (thread_id, role, content, sources, widgets, related_questions, model, tokens_used) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING id, created_at + ` + err := r.db.db.QueryRowContext(ctx, query, + msg.ThreadID, msg.Role, msg.Content, sourcesJSON, widgetsJSON, relatedJSON, msg.Model, msg.TokensUsed, + ).Scan(&msg.ID, &msg.CreatedAt) + + if err == nil { + r.db.db.ExecContext(ctx, "UPDATE threads SET updated_at = NOW() WHERE id = $1", msg.ThreadID) + } + + return err +} + +func (r *ThreadRepository) GetMessages(ctx context.Context, threadID string, limit, offset int) ([]ThreadMessage, error) { + query := ` + SELECT id, thread_id, role, content, sources, widgets, related_questions, model, tokens_used, created_at + FROM thread_messages + WHERE thread_id = $1 + ORDER BY created_at ASC + LIMIT $2 OFFSET $3 + ` + + rows, err := r.db.db.QueryContext(ctx, query, threadID, limit, offset) + if err != nil { + return nil, err + } + defer rows.Close() + + var messages []ThreadMessage + for rows.Next() { + var msg ThreadMessage + var sourcesJSON, widgetsJSON, relatedJSON []byte + + if err := rows.Scan( + &msg.ID, &msg.ThreadID, &msg.Role, &msg.Content, + &sourcesJSON, &widgetsJSON, &relatedJSON, + &msg.Model, &msg.TokensUsed, &msg.CreatedAt, + ); err != nil { + return nil, err + } + + json.Unmarshal(sourcesJSON, &msg.Sources) + json.Unmarshal(widgetsJSON, &msg.Widgets) + json.Unmarshal(relatedJSON, &msg.RelatedQuestions) + messages = append(messages, msg) + } + + return messages, nil +} + +func (r *ThreadRepository) GenerateTitle(ctx context.Context, threadID, firstMessage string) error { + title := firstMessage + if len(title) > 100 { + title = title[:97] + "..." + } + _, err := r.db.db.ExecContext(ctx, + "UPDATE threads SET title = $2 WHERE id = $1", + threadID, title, + ) + return err +} diff --git a/backend/internal/db/user_interests_repo.go b/backend/internal/db/user_interests_repo.go new file mode 100644 index 0000000..7492a5c --- /dev/null +++ b/backend/internal/db/user_interests_repo.go @@ -0,0 +1,323 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "time" +) + +type UserInterestsData struct { + UserID string `json:"userId"` + Topics json.RawMessage `json:"topics"` + Sources json.RawMessage `json:"sources"` + Keywords json.RawMessage `json:"keywords"` + ViewHistory json.RawMessage `json:"viewHistory"` + SavedArticles json.RawMessage `json:"savedArticles"` + BlockedSources json.RawMessage `json:"blockedSources"` + BlockedTopics json.RawMessage `json:"blockedTopics"` + PreferredLang string `json:"preferredLang"` + Region string `json:"region"` + ReadingLevel string `json:"readingLevel"` + Notifications json.RawMessage `json:"notifications"` + CustomCategories json.RawMessage `json:"customCategories"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type UserInterestsRepository struct { + db *PostgresDB +} + +func NewUserInterestsRepository(db *PostgresDB) *UserInterestsRepository { + return &UserInterestsRepository{db: db} +} + +func (r *UserInterestsRepository) createTable(ctx context.Context) error { + query := ` + CREATE TABLE IF NOT EXISTS user_interests ( + user_id VARCHAR(255) PRIMARY KEY, + topics JSONB DEFAULT '{}', + sources JSONB DEFAULT '{}', + keywords JSONB DEFAULT '{}', + view_history JSONB DEFAULT '[]', + saved_articles JSONB DEFAULT '[]', + blocked_sources JSONB DEFAULT '[]', + blocked_topics JSONB DEFAULT '[]', + preferred_lang VARCHAR(10) DEFAULT 'ru', + region VARCHAR(50) DEFAULT 'russia', + reading_level VARCHAR(20) DEFAULT 'general', + notifications JSONB DEFAULT '{}', + custom_categories JSONB DEFAULT '[]', + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + ); + + CREATE INDEX IF NOT EXISTS idx_user_interests_updated ON user_interests(updated_at); + CREATE INDEX IF NOT EXISTS idx_user_interests_region ON user_interests(region); + ` + + _, err := r.db.DB().ExecContext(ctx, query) + return err +} + +func (r *UserInterestsRepository) Get(ctx context.Context, userID string) (*UserInterestsData, error) { + query := ` + SELECT user_id, topics, sources, keywords, view_history, saved_articles, + blocked_sources, blocked_topics, preferred_lang, region, reading_level, + notifications, custom_categories, created_at, updated_at + FROM user_interests + WHERE user_id = $1 + ` + + row := r.db.DB().QueryRowContext(ctx, query, userID) + + var data UserInterestsData + err := row.Scan( + &data.UserID, &data.Topics, &data.Sources, &data.Keywords, + &data.ViewHistory, &data.SavedArticles, &data.BlockedSources, + &data.BlockedTopics, &data.PreferredLang, &data.Region, + &data.ReadingLevel, &data.Notifications, &data.CustomCategories, + &data.CreatedAt, &data.UpdatedAt, + ) + + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + + return &data, nil +} + +func (r *UserInterestsRepository) Save(ctx context.Context, data *UserInterestsData) error { + query := ` + INSERT INTO user_interests ( + user_id, topics, sources, keywords, view_history, saved_articles, + blocked_sources, blocked_topics, preferred_lang, region, reading_level, + notifications, custom_categories, created_at, updated_at + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) + ON CONFLICT (user_id) DO UPDATE SET + topics = EXCLUDED.topics, + sources = EXCLUDED.sources, + keywords = EXCLUDED.keywords, + view_history = EXCLUDED.view_history, + saved_articles = EXCLUDED.saved_articles, + blocked_sources = EXCLUDED.blocked_sources, + blocked_topics = EXCLUDED.blocked_topics, + preferred_lang = EXCLUDED.preferred_lang, + region = EXCLUDED.region, + reading_level = EXCLUDED.reading_level, + notifications = EXCLUDED.notifications, + custom_categories = EXCLUDED.custom_categories, + updated_at = NOW() + ` + + now := time.Now() + if data.CreatedAt.IsZero() { + data.CreatedAt = now + } + data.UpdatedAt = now + + if data.Topics == nil { + data.Topics = json.RawMessage("{}") + } + if data.Sources == nil { + data.Sources = json.RawMessage("{}") + } + if data.Keywords == nil { + data.Keywords = json.RawMessage("{}") + } + if data.ViewHistory == nil { + data.ViewHistory = json.RawMessage("[]") + } + if data.SavedArticles == nil { + data.SavedArticles = json.RawMessage("[]") + } + if data.BlockedSources == nil { + data.BlockedSources = json.RawMessage("[]") + } + if data.BlockedTopics == nil { + data.BlockedTopics = json.RawMessage("[]") + } + if data.Notifications == nil { + data.Notifications = json.RawMessage("{}") + } + if data.CustomCategories == nil { + data.CustomCategories = json.RawMessage("[]") + } + + _, err := r.db.DB().ExecContext(ctx, query, + data.UserID, data.Topics, data.Sources, data.Keywords, + data.ViewHistory, data.SavedArticles, data.BlockedSources, + data.BlockedTopics, data.PreferredLang, data.Region, + data.ReadingLevel, data.Notifications, data.CustomCategories, + data.CreatedAt, data.UpdatedAt, + ) + + return err +} + +func (r *UserInterestsRepository) Delete(ctx context.Context, userID string) error { + query := `DELETE FROM user_interests WHERE user_id = $1` + _, err := r.db.DB().ExecContext(ctx, query, userID) + return err +} + +func (r *UserInterestsRepository) AddViewEvent(ctx context.Context, userID string, event json.RawMessage) error { + query := ` + UPDATE user_interests + SET view_history = CASE + WHEN jsonb_array_length(view_history) >= 500 + THEN jsonb_build_array($2) || view_history[0:499] + ELSE jsonb_build_array($2) || view_history + END, + updated_at = NOW() + WHERE user_id = $1 + ` + + result, err := r.db.DB().ExecContext(ctx, query, userID, event) + if err != nil { + return err + } + + rowsAffected, _ := result.RowsAffected() + if rowsAffected == 0 { + insertQuery := ` + INSERT INTO user_interests (user_id, view_history, updated_at) + VALUES ($1, jsonb_build_array($2), NOW()) + ` + _, err = r.db.DB().ExecContext(ctx, insertQuery, userID, event) + } + + return err +} + +func (r *UserInterestsRepository) UpdateTopicScore(ctx context.Context, userID, topic string, delta float64) error { + query := ` + UPDATE user_interests + SET topics = topics || jsonb_build_object($2, COALESCE((topics->>$2)::float, 0) + $3), + updated_at = NOW() + WHERE user_id = $1 + ` + + result, err := r.db.DB().ExecContext(ctx, query, userID, topic, delta) + if err != nil { + return err + } + + rowsAffected, _ := result.RowsAffected() + if rowsAffected == 0 { + insertQuery := ` + INSERT INTO user_interests (user_id, topics, updated_at) + VALUES ($1, jsonb_build_object($2, $3), NOW()) + ` + _, err = r.db.DB().ExecContext(ctx, insertQuery, userID, topic, delta) + } + + return err +} + +func (r *UserInterestsRepository) SaveArticle(ctx context.Context, userID, articleURL string) error { + query := ` + UPDATE user_interests + SET saved_articles = CASE + WHEN NOT saved_articles ? $2 + THEN saved_articles || jsonb_build_array($2) + ELSE saved_articles + END, + updated_at = NOW() + WHERE user_id = $1 + ` + + _, err := r.db.DB().ExecContext(ctx, query, userID, articleURL) + return err +} + +func (r *UserInterestsRepository) UnsaveArticle(ctx context.Context, userID, articleURL string) error { + query := ` + UPDATE user_interests + SET saved_articles = saved_articles - $2, + updated_at = NOW() + WHERE user_id = $1 + ` + + _, err := r.db.DB().ExecContext(ctx, query, userID, articleURL) + return err +} + +func (r *UserInterestsRepository) BlockSource(ctx context.Context, userID, source string) error { + query := ` + UPDATE user_interests + SET blocked_sources = CASE + WHEN NOT blocked_sources ? $2 + THEN blocked_sources || jsonb_build_array($2) + ELSE blocked_sources + END, + updated_at = NOW() + WHERE user_id = $1 + ` + + _, err := r.db.DB().ExecContext(ctx, query, userID, source) + return err +} + +func (r *UserInterestsRepository) UnblockSource(ctx context.Context, userID, source string) error { + query := ` + UPDATE user_interests + SET blocked_sources = blocked_sources - $2, + updated_at = NOW() + WHERE user_id = $1 + ` + + _, err := r.db.DB().ExecContext(ctx, query, userID, source) + return err +} + +func (r *UserInterestsRepository) GetTopUsers(ctx context.Context, limit int) ([]string, error) { + query := ` + SELECT user_id FROM user_interests + ORDER BY updated_at DESC + LIMIT $1 + ` + + rows, err := r.db.DB().QueryContext(ctx, query, limit) + if err != nil { + return nil, err + } + defer rows.Close() + + var userIDs []string + for rows.Next() { + var userID string + if err := rows.Scan(&userID); err != nil { + return nil, err + } + userIDs = append(userIDs, userID) + } + + return userIDs, rows.Err() +} + +func (r *UserInterestsRepository) DecayAllInterests(ctx context.Context, decayFactor float64) error { + query := ` + UPDATE user_interests + SET topics = ( + SELECT jsonb_object_agg(key, (value::text::float * $1)) + FROM jsonb_each(topics) WHERE (value::text::float * $1) > 0.01 + ), + sources = ( + SELECT jsonb_object_agg(key, (value::text::float * $1)) + FROM jsonb_each(sources) WHERE (value::text::float * $1) > 0.01 + ), + keywords = ( + SELECT jsonb_object_agg(key, (value::text::float * $1)) + FROM jsonb_each(keywords) WHERE (value::text::float * $1) > 0.01 + ), + updated_at = NOW() + ` + + _, err := r.db.DB().ExecContext(ctx, query, decayFactor) + return err +} diff --git a/backend/internal/discover/personalization.go b/backend/internal/discover/personalization.go new file mode 100644 index 0000000..014177c --- /dev/null +++ b/backend/internal/discover/personalization.go @@ -0,0 +1,691 @@ +package discover + +import ( + "context" + "encoding/json" + "fmt" + "math" + "sort" + "strings" + "sync" + "time" +) + +type UserInterests struct { + UserID string `json:"userId"` + Topics map[string]float64 `json:"topics"` + Sources map[string]float64 `json:"sources"` + Keywords map[string]float64 `json:"keywords"` + ViewHistory []ViewEvent `json:"viewHistory"` + SavedArticles []string `json:"savedArticles"` + BlockedSources []string `json:"blockedSources"` + BlockedTopics []string `json:"blockedTopics"` + PreferredLang string `json:"preferredLang"` + Region string `json:"region"` + ReadingLevel string `json:"readingLevel"` + Notifications NotificationPrefs `json:"notifications"` + LastUpdated time.Time `json:"lastUpdated"` + CustomCategories []CustomCategory `json:"customCategories,omitempty"` +} + +type ViewEvent struct { + ArticleID string `json:"articleId"` + URL string `json:"url"` + Topic string `json:"topic"` + Source string `json:"source"` + Keywords []string `json:"keywords"` + TimeSpent int `json:"timeSpentSeconds"` + Completed bool `json:"completed"` + Saved bool `json:"saved"` + Shared bool `json:"shared"` + Timestamp time.Time `json:"timestamp"` + Engagement float64 `json:"engagement"` +} + +type NotificationPrefs struct { + Enabled bool `json:"enabled"` + DailyDigest bool `json:"dailyDigest"` + DigestTime string `json:"digestTime"` + BreakingNews bool `json:"breakingNews"` + TopicAlerts []string `json:"topicAlerts"` + Frequency string `json:"frequency"` +} + +type CustomCategory struct { + ID string `json:"id"` + Name string `json:"name"` + Keywords []string `json:"keywords"` + Sources []string `json:"sources"` + Weight float64 `json:"weight"` +} + +type PersonalizedFeed struct { + UserID string `json:"userId"` + Items []FeedItem `json:"items"` + Categories []FeedCategory `json:"categories"` + TrendingIn []string `json:"trendingIn"` + UpdatedAt time.Time `json:"updatedAt"` + NextUpdate time.Time `json:"nextUpdate"` +} + +type FeedItem struct { + ID string `json:"id"` + URL string `json:"url"` + Title string `json:"title"` + Summary string `json:"summary"` + Thumbnail string `json:"thumbnail"` + Source string `json:"source"` + SourceLogo string `json:"sourceLogo"` + Topic string `json:"topic"` + Keywords []string `json:"keywords"` + PublishedAt time.Time `json:"publishedAt"` + RelevanceScore float64 `json:"relevanceScore"` + Reason string `json:"reason"` + SourcesCount int `json:"sourcesCount"` + ReadTime int `json:"readTimeMinutes"` + HasDigest bool `json:"hasDigest"` + IsBreaking bool `json:"isBreaking"` + IsTrending bool `json:"isTrending"` + IsSaved bool `json:"isSaved"` + IsRead bool `json:"isRead"` +} + +type FeedCategory struct { + ID string `json:"id"` + Name string `json:"name"` + Icon string `json:"icon"` + Color string `json:"color"` + Items []FeedItem `json:"items"` + IsCustom bool `json:"isCustom"` +} + +type PersonalizationEngine struct { + userStore UserInterestStore + contentRepo ContentRepository + mu sync.RWMutex + config PersonalizationConfig +} + +type PersonalizationConfig struct { + MaxFeedItems int + DecayFactor float64 + RecencyWeight float64 + EngagementWeight float64 + TopicMatchWeight float64 + SourceTrustWeight float64 + DiversityFactor float64 + TrendingBoost float64 + BreakingBoost float64 +} + +type UserInterestStore interface { + Get(ctx context.Context, userID string) (*UserInterests, error) + Save(ctx context.Context, interests *UserInterests) error + Delete(ctx context.Context, userID string) error +} + +type ContentRepository interface { + GetLatestContent(ctx context.Context, topics []string, limit int) ([]FeedItem, error) + GetTrending(ctx context.Context, region string, limit int) ([]FeedItem, error) + GetByKeywords(ctx context.Context, keywords []string, limit int) ([]FeedItem, error) +} + +func DefaultConfig() PersonalizationConfig { + return PersonalizationConfig{ + MaxFeedItems: 50, + DecayFactor: 0.95, + RecencyWeight: 0.25, + EngagementWeight: 0.20, + TopicMatchWeight: 0.30, + SourceTrustWeight: 0.15, + DiversityFactor: 0.10, + TrendingBoost: 1.5, + BreakingBoost: 2.0, + } +} + +func NewPersonalizationEngine(userStore UserInterestStore, contentRepo ContentRepository, cfg PersonalizationConfig) *PersonalizationEngine { + return &PersonalizationEngine{ + userStore: userStore, + contentRepo: contentRepo, + config: cfg, + } +} + +func (e *PersonalizationEngine) GenerateForYouFeed(ctx context.Context, userID string) (*PersonalizedFeed, error) { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + interests = &UserInterests{ + UserID: userID, + Topics: make(map[string]float64), + Sources: make(map[string]float64), + Keywords: make(map[string]float64), + PreferredLang: "ru", + Region: "russia", + } + } + + var allItems []FeedItem + var mu sync.Mutex + var wg sync.WaitGroup + + topTopics := e.getTopInterests(interests.Topics, 5) + wg.Add(1) + go func() { + defer wg.Done() + items, _ := e.contentRepo.GetLatestContent(ctx, topTopics, 30) + mu.Lock() + allItems = append(allItems, items...) + mu.Unlock() + }() + + wg.Add(1) + go func() { + defer wg.Done() + items, _ := e.contentRepo.GetTrending(ctx, interests.Region, 20) + for i := range items { + items[i].IsTrending = true + } + mu.Lock() + allItems = append(allItems, items...) + mu.Unlock() + }() + + topKeywords := e.getTopKeywords(interests.Keywords, 10) + if len(topKeywords) > 0 { + wg.Add(1) + go func() { + defer wg.Done() + items, _ := e.contentRepo.GetByKeywords(ctx, topKeywords, 15) + mu.Lock() + allItems = append(allItems, items...) + mu.Unlock() + }() + } + + wg.Wait() + + allItems = e.deduplicateItems(allItems) + allItems = e.filterBlockedContent(allItems, interests) + + for i := range allItems { + allItems[i].RelevanceScore = e.calculateRelevance(allItems[i], interests) + allItems[i].Reason = e.explainRecommendation(allItems[i], interests) + allItems[i].IsRead = e.isArticleRead(allItems[i].URL, interests) + allItems[i].IsSaved = e.isArticleSaved(allItems[i].URL, interests) + } + + sort.Slice(allItems, func(i, j int) bool { + return allItems[i].RelevanceScore > allItems[j].RelevanceScore + }) + + allItems = e.applyDiversity(allItems) + + if len(allItems) > e.config.MaxFeedItems { + allItems = allItems[:e.config.MaxFeedItems] + } + + categories := e.groupByCategory(allItems, interests) + + return &PersonalizedFeed{ + UserID: userID, + Items: allItems, + Categories: categories, + TrendingIn: topTopics, + UpdatedAt: time.Now(), + NextUpdate: time.Now().Add(15 * time.Minute), + }, nil +} + +func (e *PersonalizationEngine) RecordView(ctx context.Context, userID string, event ViewEvent) error { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + interests = &UserInterests{ + UserID: userID, + Topics: make(map[string]float64), + Sources: make(map[string]float64), + Keywords: make(map[string]float64), + } + } + + event.Engagement = e.calculateEngagement(event) + + interests.ViewHistory = append([]ViewEvent{event}, interests.ViewHistory...) + if len(interests.ViewHistory) > 500 { + interests.ViewHistory = interests.ViewHistory[:500] + } + + topicWeight := event.Engagement * 0.1 + interests.Topics[event.Topic] += topicWeight + + sourceWeight := event.Engagement * 0.05 + interests.Sources[event.Source] += sourceWeight + + keywordWeight := event.Engagement * 0.02 + for _, kw := range event.Keywords { + interests.Keywords[kw] += keywordWeight + } + + if event.Saved { + interests.SavedArticles = append(interests.SavedArticles, event.URL) + } + + interests.LastUpdated = time.Now() + + e.decayInterests(interests) + + return e.userStore.Save(ctx, interests) +} + +func (e *PersonalizationEngine) UpdateTopicPreference(ctx context.Context, userID, topic string, weight float64) error { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + interests = &UserInterests{ + UserID: userID, + Topics: make(map[string]float64), + Sources: make(map[string]float64), + Keywords: make(map[string]float64), + } + } + + interests.Topics[topic] = weight + interests.LastUpdated = time.Now() + + return e.userStore.Save(ctx, interests) +} + +func (e *PersonalizationEngine) BlockSource(ctx context.Context, userID, source string) error { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + return err + } + + for _, blocked := range interests.BlockedSources { + if blocked == source { + return nil + } + } + + interests.BlockedSources = append(interests.BlockedSources, source) + interests.LastUpdated = time.Now() + + return e.userStore.Save(ctx, interests) +} + +func (e *PersonalizationEngine) BlockTopic(ctx context.Context, userID, topic string) error { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + return err + } + + for _, blocked := range interests.BlockedTopics { + if blocked == topic { + return nil + } + } + + interests.BlockedTopics = append(interests.BlockedTopics, topic) + delete(interests.Topics, topic) + interests.LastUpdated = time.Now() + + return e.userStore.Save(ctx, interests) +} + +func (e *PersonalizationEngine) AddCustomCategory(ctx context.Context, userID string, category CustomCategory) error { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + return err + } + + interests.CustomCategories = append(interests.CustomCategories, category) + interests.LastUpdated = time.Now() + + return e.userStore.Save(ctx, interests) +} + +func (e *PersonalizationEngine) GetUserTopics(ctx context.Context, userID string) (map[string]float64, error) { + interests, err := e.userStore.Get(ctx, userID) + if err != nil { + return nil, err + } + return interests.Topics, nil +} + +func (e *PersonalizationEngine) calculateRelevance(item FeedItem, interests *UserInterests) float64 { + score := 0.0 + + if topicScore, ok := interests.Topics[item.Topic]; ok { + score += topicScore * e.config.TopicMatchWeight + } + + if sourceScore, ok := interests.Sources[item.Source]; ok { + score += sourceScore * e.config.SourceTrustWeight + } + + keywordScore := 0.0 + for _, kw := range item.Keywords { + if kwScore, ok := interests.Keywords[strings.ToLower(kw)]; ok { + keywordScore += kwScore + } + } + score += keywordScore * 0.1 + + hoursSincePublish := time.Since(item.PublishedAt).Hours() + recencyScore := math.Max(0, 1.0-hoursSincePublish/168.0) + score += recencyScore * e.config.RecencyWeight + + if item.IsTrending { + score *= e.config.TrendingBoost + } + + if item.IsBreaking { + score *= e.config.BreakingBoost + } + + return score +} + +func (e *PersonalizationEngine) calculateEngagement(event ViewEvent) float64 { + engagement := 0.0 + + if event.TimeSpent > 0 { + readTimeScore := math.Min(1.0, float64(event.TimeSpent)/300.0) + engagement += readTimeScore * 0.4 + } + + if event.Completed { + engagement += 0.3 + } + + if event.Saved { + engagement += 0.2 + } + + if event.Shared { + engagement += 0.1 + } + + return engagement +} + +func (e *PersonalizationEngine) explainRecommendation(item FeedItem, interests *UserInterests) string { + if item.IsBreaking { + return "Срочная новость" + } + + if item.IsTrending { + return "Популярно сейчас" + } + + if topicScore, ok := interests.Topics[item.Topic]; ok && topicScore > 0.5 { + return fmt.Sprintf("Из вашей категории: %s", item.Topic) + } + + if sourceScore, ok := interests.Sources[item.Source]; ok && sourceScore > 0.3 { + return fmt.Sprintf("Из источника, который вы читаете: %s", item.Source) + } + + for _, kw := range item.Keywords { + if kwScore, ok := interests.Keywords[strings.ToLower(kw)]; ok && kwScore > 0.2 { + return fmt.Sprintf("По вашему интересу: %s", kw) + } + } + + return "Рекомендуем для вас" +} + +func (e *PersonalizationEngine) getTopInterests(interests map[string]float64, limit int) []string { + type kv struct { + Key string + Value float64 + } + + var sorted []kv + for k, v := range interests { + sorted = append(sorted, kv{k, v}) + } + + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].Value > sorted[j].Value + }) + + result := make([]string, 0, limit) + for i, item := range sorted { + if i >= limit { + break + } + result = append(result, item.Key) + } + + return result +} + +func (e *PersonalizationEngine) getTopKeywords(keywords map[string]float64, limit int) []string { + return e.getTopInterests(keywords, limit) +} + +func (e *PersonalizationEngine) deduplicateItems(items []FeedItem) []FeedItem { + seen := make(map[string]bool) + result := make([]FeedItem, 0, len(items)) + + for _, item := range items { + if !seen[item.URL] { + seen[item.URL] = true + result = append(result, item) + } + } + + return result +} + +func (e *PersonalizationEngine) filterBlockedContent(items []FeedItem, interests *UserInterests) []FeedItem { + blockedSources := make(map[string]bool) + for _, s := range interests.BlockedSources { + blockedSources[strings.ToLower(s)] = true + } + + blockedTopics := make(map[string]bool) + for _, t := range interests.BlockedTopics { + blockedTopics[strings.ToLower(t)] = true + } + + result := make([]FeedItem, 0, len(items)) + for _, item := range items { + if blockedSources[strings.ToLower(item.Source)] { + continue + } + if blockedTopics[strings.ToLower(item.Topic)] { + continue + } + result = append(result, item) + } + + return result +} + +func (e *PersonalizationEngine) applyDiversity(items []FeedItem) []FeedItem { + if len(items) <= 10 { + return items + } + + topicCounts := make(map[string]int) + sourceCounts := make(map[string]int) + maxPerTopic := len(items) / 5 + maxPerSource := len(items) / 4 + + if maxPerTopic < 3 { + maxPerTopic = 3 + } + if maxPerSource < 3 { + maxPerSource = 3 + } + + result := make([]FeedItem, 0, len(items)) + deferred := make([]FeedItem, 0) + + for _, item := range items { + if topicCounts[item.Topic] >= maxPerTopic || sourceCounts[item.Source] >= maxPerSource { + deferred = append(deferred, item) + continue + } + + topicCounts[item.Topic]++ + sourceCounts[item.Source]++ + result = append(result, item) + } + + for _, item := range deferred { + if len(result) >= e.config.MaxFeedItems { + break + } + result = append(result, item) + } + + return result +} + +func (e *PersonalizationEngine) groupByCategory(items []FeedItem, interests *UserInterests) []FeedCategory { + categoryMap := make(map[string][]FeedItem) + + for _, item := range items { + categoryMap[item.Topic] = append(categoryMap[item.Topic], item) + } + + categories := make([]FeedCategory, 0, len(categoryMap)) + categoryMeta := map[string]struct { + Icon string + Color string + }{ + "tech": {"💻", "#3B82F6"}, + "finance": {"💰", "#10B981"}, + "sports": {"⚽", "#F59E0B"}, + "politics": {"🏛️", "#6366F1"}, + "science": {"🔬", "#8B5CF6"}, + "health": {"🏥", "#EC4899"}, + "entertainment": {"🎬", "#F97316"}, + "world": {"🌍", "#14B8A6"}, + "business": {"📊", "#6B7280"}, + "culture": {"🎭", "#A855F7"}, + } + + for topic, topicItems := range categoryMap { + if len(topicItems) < 2 { + continue + } + + meta, ok := categoryMeta[strings.ToLower(topic)] + if !ok { + meta = struct { + Icon string + Color string + }{"📰", "#6B7280"} + } + + categories = append(categories, FeedCategory{ + ID: topic, + Name: topic, + Icon: meta.Icon, + Color: meta.Color, + Items: topicItems, + }) + } + + for _, custom := range interests.CustomCategories { + customItems := make([]FeedItem, 0) + for _, item := range items { + for _, kw := range custom.Keywords { + if containsKeyword(item, kw) { + customItems = append(customItems, item) + break + } + } + } + if len(customItems) > 0 { + categories = append(categories, FeedCategory{ + ID: custom.ID, + Name: custom.Name, + Icon: "⭐", + Color: "#FBBF24", + Items: customItems, + IsCustom: true, + }) + } + } + + sort.Slice(categories, func(i, j int) bool { + iScore := interests.Topics[categories[i].ID] + jScore := interests.Topics[categories[j].ID] + return iScore > jScore + }) + + return categories +} + +func (e *PersonalizationEngine) decayInterests(interests *UserInterests) { + for k := range interests.Topics { + interests.Topics[k] *= e.config.DecayFactor + if interests.Topics[k] < 0.01 { + delete(interests.Topics, k) + } + } + + for k := range interests.Sources { + interests.Sources[k] *= e.config.DecayFactor + if interests.Sources[k] < 0.01 { + delete(interests.Sources, k) + } + } + + for k := range interests.Keywords { + interests.Keywords[k] *= e.config.DecayFactor + if interests.Keywords[k] < 0.01 { + delete(interests.Keywords, k) + } + } +} + +func (e *PersonalizationEngine) isArticleRead(url string, interests *UserInterests) bool { + for _, event := range interests.ViewHistory { + if event.URL == url { + return true + } + } + return false +} + +func (e *PersonalizationEngine) isArticleSaved(url string, interests *UserInterests) bool { + for _, saved := range interests.SavedArticles { + if saved == url { + return true + } + } + return false +} + +func containsKeyword(item FeedItem, keyword string) bool { + kw := strings.ToLower(keyword) + if strings.Contains(strings.ToLower(item.Title), kw) { + return true + } + if strings.Contains(strings.ToLower(item.Summary), kw) { + return true + } + for _, itemKw := range item.Keywords { + if strings.ToLower(itemKw) == kw { + return true + } + } + return false +} + +func (u *UserInterests) ToJSON() ([]byte, error) { + return json.Marshal(u) +} + +func ParseUserInterests(data []byte) (*UserInterests, error) { + var interests UserInterests + if err := json.Unmarshal(data, &interests); err != nil { + return nil, err + } + return &interests, nil +} diff --git a/backend/internal/files/analyzer.go b/backend/internal/files/analyzer.go new file mode 100644 index 0000000..7e307f7 --- /dev/null +++ b/backend/internal/files/analyzer.go @@ -0,0 +1,343 @@ +package files + +import ( + "bytes" + "context" + "encoding/base64" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "strings" + + "github.com/gooseek/backend/internal/llm" + "github.com/ledongthuc/pdf" +) + +type FileAnalyzer struct { + llmClient llm.Client + storagePath string +} + +type AnalysisResult struct { + FileType string `json:"fileType"` + ExtractedText string `json:"extractedText"` + Summary string `json:"summary"` + KeyPoints []string `json:"keyPoints"` + Metadata map[string]interface{} `json:"metadata"` +} + +func NewFileAnalyzer(llmClient llm.Client, storagePath string) *FileAnalyzer { + if storagePath == "" { + storagePath = "/tmp/gooseek-files" + } + os.MkdirAll(storagePath, 0755) + + return &FileAnalyzer{ + llmClient: llmClient, + storagePath: storagePath, + } +} + +func (fa *FileAnalyzer) AnalyzeFile(ctx context.Context, filePath string, fileType string) (*AnalysisResult, error) { + switch { + case strings.HasPrefix(fileType, "application/pdf"): + return fa.analyzePDF(ctx, filePath) + case strings.HasPrefix(fileType, "image/"): + return fa.analyzeImage(ctx, filePath, fileType) + case strings.HasPrefix(fileType, "text/"): + return fa.analyzeText(ctx, filePath) + default: + return nil, fmt.Errorf("unsupported file type: %s", fileType) + } +} + +func (fa *FileAnalyzer) analyzePDF(ctx context.Context, filePath string) (*AnalysisResult, error) { + text, metadata, err := extractPDFContent(filePath) + if err != nil { + return nil, fmt.Errorf("failed to extract PDF content: %w", err) + } + + if len(text) > 50000 { + text = text[:50000] + "\n\n[Content truncated...]" + } + + summary, keyPoints, err := fa.generateSummary(ctx, text, "PDF document") + if err != nil { + summary = "" + keyPoints = nil + } + + return &AnalysisResult{ + FileType: "pdf", + ExtractedText: text, + Summary: summary, + KeyPoints: keyPoints, + Metadata: metadata, + }, nil +} + +func extractPDFContent(filePath string) (string, map[string]interface{}, error) { + f, r, err := pdf.Open(filePath) + if err != nil { + return "", nil, err + } + defer f.Close() + + var textBuilder strings.Builder + numPages := r.NumPage() + + for i := 1; i <= numPages; i++ { + p := r.Page(i) + if p.V.IsNull() { + continue + } + + text, err := p.GetPlainText(nil) + if err != nil { + continue + } + + textBuilder.WriteString(text) + textBuilder.WriteString("\n\n") + + if textBuilder.Len() > 100000 { + break + } + } + + metadata := map[string]interface{}{ + "numPages": numPages, + } + + return textBuilder.String(), metadata, nil +} + +func (fa *FileAnalyzer) analyzeImage(ctx context.Context, filePath string, mimeType string) (*AnalysisResult, error) { + imageData, err := os.ReadFile(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read image: %w", err) + } + + base64Image := base64.StdEncoding.EncodeToString(imageData) + + description, err := fa.describeImage(ctx, base64Image, mimeType) + if err != nil { + description = "Image analysis unavailable" + } + + metadata := map[string]interface{}{ + "size": len(imageData), + } + + return &AnalysisResult{ + FileType: "image", + ExtractedText: description, + Summary: description, + KeyPoints: extractKeyPointsFromDescription(description), + Metadata: metadata, + }, nil +} + +func (fa *FileAnalyzer) describeImage(ctx context.Context, base64Image, mimeType string) (string, error) { + prompt := `Analyze this image and provide: +1. A detailed description of what's shown +2. Any text visible in the image (OCR) +3. Key elements and their relationships +4. Any data, charts, or diagrams and their meaning + +Be thorough but concise.` + + messages := []llm.Message{ + { + Role: "user", + Content: prompt, + Images: []llm.ImageContent{ + { + Type: mimeType, + Data: base64Image, + IsBase64: true, + }, + }, + }, + } + + result, err := fa.llmClient.GenerateText(ctx, llm.StreamRequest{ + Messages: messages, + }) + if err != nil { + return "", err + } + + return result, nil +} + +func (fa *FileAnalyzer) analyzeText(ctx context.Context, filePath string) (*AnalysisResult, error) { + content, err := os.ReadFile(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read file: %w", err) + } + + text := string(content) + if len(text) > 50000 { + text = text[:50000] + "\n\n[Content truncated...]" + } + + summary, keyPoints, err := fa.generateSummary(ctx, text, "text document") + if err != nil { + summary = "" + keyPoints = nil + } + + return &AnalysisResult{ + FileType: "text", + ExtractedText: text, + Summary: summary, + KeyPoints: keyPoints, + Metadata: map[string]interface{}{ + "size": len(content), + "lineCount": strings.Count(text, "\n") + 1, + }, + }, nil +} + +func (fa *FileAnalyzer) generateSummary(ctx context.Context, text, docType string) (string, []string, error) { + if len(text) < 100 { + return text, nil, nil + } + + truncatedText := text + if len(text) > 15000 { + truncatedText = text[:15000] + "\n\n[Content truncated for analysis...]" + } + + prompt := fmt.Sprintf(`Analyze this %s and provide: + +1. A concise summary (2-3 paragraphs) +2. 5-7 key points as bullet points + +Document content: +%s + +Format your response as: +SUMMARY: +[your summary here] + +KEY POINTS: +- [point 1] +- [point 2] +...`, docType, truncatedText) + + result, err := fa.llmClient.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{ + {Role: llm.RoleUser, Content: prompt}, + }, + }) + if err != nil { + return "", nil, err + } + + summary, keyPoints := parseSummaryResponse(result) + return summary, keyPoints, nil +} + +func parseSummaryResponse(response string) (string, []string) { + var summary string + var keyPoints []string + + parts := strings.Split(response, "KEY POINTS:") + if len(parts) >= 2 { + summaryPart := strings.TrimPrefix(parts[0], "SUMMARY:") + summary = strings.TrimSpace(summaryPart) + + keyPointsPart := parts[1] + for _, line := range strings.Split(keyPointsPart, "\n") { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "-") || strings.HasPrefix(line, "•") || strings.HasPrefix(line, "*") { + point := strings.TrimPrefix(strings.TrimPrefix(strings.TrimPrefix(line, "-"), "•"), "*") + point = strings.TrimSpace(point) + if point != "" { + keyPoints = append(keyPoints, point) + } + } + } + } else { + summary = response + } + + return summary, keyPoints +} + +func extractKeyPointsFromDescription(description string) []string { + var points []string + sentences := strings.Split(description, ".") + + for i, s := range sentences { + s = strings.TrimSpace(s) + if len(s) > 20 && i < 5 { + points = append(points, s+".") + } + } + + return points +} + +func DetectMimeType(filename string, content []byte) string { + ext := strings.ToLower(filepath.Ext(filename)) + switch ext { + case ".pdf": + return "application/pdf" + case ".png": + return "image/png" + case ".jpg", ".jpeg": + return "image/jpeg" + case ".gif": + return "image/gif" + case ".webp": + return "image/webp" + case ".txt": + return "text/plain" + case ".md": + return "text/markdown" + case ".csv": + return "text/csv" + case ".json": + return "application/json" + default: + return http.DetectContentType(content[:min(512, len(content))]) + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func (fa *FileAnalyzer) SaveFile(filename string, content io.Reader) (string, int64, error) { + safeName := filepath.Base(filename) + destPath := filepath.Join(fa.storagePath, safeName) + + file, err := os.Create(destPath) + if err != nil { + return "", 0, err + } + defer file.Close() + + var buf bytes.Buffer + size, err := io.Copy(io.MultiWriter(file, &buf), content) + if err != nil { + return "", 0, err + } + + return destPath, size, nil +} + +func (fa *FileAnalyzer) DeleteFile(filePath string) error { + if !strings.HasPrefix(filePath, fa.storagePath) { + return fmt.Errorf("invalid file path") + } + return os.Remove(filePath) +} diff --git a/backend/internal/finance/heatmap.go b/backend/internal/finance/heatmap.go new file mode 100644 index 0000000..8cbfc80 --- /dev/null +++ b/backend/internal/finance/heatmap.go @@ -0,0 +1,537 @@ +package finance + +import ( + "context" + "encoding/json" + "fmt" + "math" + "net/http" + "sort" + "strings" + "sync" + "time" +) + +type HeatmapService struct { + cache map[string]*CachedHeatmap + mu sync.RWMutex + httpClient *http.Client + config HeatmapConfig +} + +type HeatmapConfig struct { + DataProviderURL string + CacheTTL time.Duration + RefreshInterval time.Duration +} + +type CachedHeatmap struct { + Data *MarketHeatmap + ExpiresAt time.Time +} + +type MarketHeatmap struct { + ID string `json:"id"` + Title string `json:"title"` + Type HeatmapType `json:"type"` + Market string `json:"market"` + Sectors []Sector `json:"sectors"` + Tickers []TickerData `json:"tickers"` + Summary MarketSummary `json:"summary"` + UpdatedAt time.Time `json:"updatedAt"` + TimeRange string `json:"timeRange"` + Colorscale Colorscale `json:"colorscale"` +} + +type HeatmapType string + +const ( + HeatmapTreemap HeatmapType = "treemap" + HeatmapGrid HeatmapType = "grid" + HeatmapBubble HeatmapType = "bubble" + HeatmapSectorChart HeatmapType = "sector_chart" +) + +type Sector struct { + ID string `json:"id"` + Name string `json:"name"` + Change float64 `json:"change"` + MarketCap float64 `json:"marketCap"` + Volume float64 `json:"volume"` + TickerCount int `json:"tickerCount"` + TopGainers []TickerData `json:"topGainers,omitempty"` + TopLosers []TickerData `json:"topLosers,omitempty"` + Color string `json:"color"` + Weight float64 `json:"weight"` +} + +type TickerData struct { + Symbol string `json:"symbol"` + Name string `json:"name"` + Price float64 `json:"price"` + Change float64 `json:"change"` + ChangePercent float64 `json:"changePercent"` + Volume float64 `json:"volume"` + MarketCap float64 `json:"marketCap"` + Sector string `json:"sector"` + Industry string `json:"industry"` + Color string `json:"color"` + Size float64 `json:"size"` + PrevClose float64 `json:"prevClose,omitempty"` + DayHigh float64 `json:"dayHigh,omitempty"` + DayLow float64 `json:"dayLow,omitempty"` + Week52High float64 `json:"week52High,omitempty"` + Week52Low float64 `json:"week52Low,omitempty"` + PE float64 `json:"pe,omitempty"` + EPS float64 `json:"eps,omitempty"` + Dividend float64 `json:"dividend,omitempty"` + DividendYield float64 `json:"dividendYield,omitempty"` +} + +type MarketSummary struct { + TotalMarketCap float64 `json:"totalMarketCap"` + TotalVolume float64 `json:"totalVolume"` + AdvancingCount int `json:"advancingCount"` + DecliningCount int `json:"decliningCount"` + UnchangedCount int `json:"unchangedCount"` + AverageChange float64 `json:"averageChange"` + TopGainer *TickerData `json:"topGainer,omitempty"` + TopLoser *TickerData `json:"topLoser,omitempty"` + MostActive *TickerData `json:"mostActive,omitempty"` + MarketSentiment string `json:"marketSentiment"` + VIX float64 `json:"vix,omitempty"` + FearGreedIndex int `json:"fearGreedIndex,omitempty"` +} + +type Colorscale struct { + Min float64 `json:"min"` + Max float64 `json:"max"` + MidPoint float64 `json:"midPoint"` + Colors []string `json:"colors"` + Thresholds []float64 `json:"thresholds"` +} + +var DefaultColorscale = Colorscale{ + Min: -10, + Max: 10, + MidPoint: 0, + Colors: []string{ + "#ef4444", + "#f87171", + "#fca5a5", + "#fecaca", + "#e5e7eb", + "#bbf7d0", + "#86efac", + "#4ade80", + "#22c55e", + }, + Thresholds: []float64{-5, -3, -2, -1, 1, 2, 3, 5}, +} + +func NewHeatmapService(cfg HeatmapConfig) *HeatmapService { + if cfg.CacheTTL == 0 { + cfg.CacheTTL = 5 * time.Minute + } + if cfg.RefreshInterval == 0 { + cfg.RefreshInterval = time.Minute + } + + return &HeatmapService{ + cache: make(map[string]*CachedHeatmap), + httpClient: &http.Client{Timeout: 30 * time.Second}, + config: cfg, + } +} + +func (s *HeatmapService) GetMarketHeatmap(ctx context.Context, market string, timeRange string) (*MarketHeatmap, error) { + cacheKey := fmt.Sprintf("%s:%s", market, timeRange) + + s.mu.RLock() + if cached, ok := s.cache[cacheKey]; ok && time.Now().Before(cached.ExpiresAt) { + s.mu.RUnlock() + return cached.Data, nil + } + s.mu.RUnlock() + + heatmap, err := s.fetchMarketData(ctx, market, timeRange) + if err != nil { + return nil, err + } + + s.mu.Lock() + s.cache[cacheKey] = &CachedHeatmap{ + Data: heatmap, + ExpiresAt: time.Now().Add(s.config.CacheTTL), + } + s.mu.Unlock() + + return heatmap, nil +} + +func (s *HeatmapService) GetSectorHeatmap(ctx context.Context, market, sector, timeRange string) (*MarketHeatmap, error) { + heatmap, err := s.GetMarketHeatmap(ctx, market, timeRange) + if err != nil { + return nil, err + } + + filteredTickers := make([]TickerData, 0) + for _, t := range heatmap.Tickers { + if strings.EqualFold(t.Sector, sector) { + filteredTickers = append(filteredTickers, t) + } + } + + sectorHeatmap := &MarketHeatmap{ + ID: fmt.Sprintf("%s-%s", market, sector), + Title: fmt.Sprintf("%s - %s", market, sector), + Type: HeatmapTreemap, + Market: market, + Tickers: filteredTickers, + TimeRange: timeRange, + UpdatedAt: time.Now(), + Colorscale: DefaultColorscale, + } + + sectorHeatmap.Summary = s.calculateSummary(filteredTickers) + + return sectorHeatmap, nil +} + +func (s *HeatmapService) fetchMarketData(ctx context.Context, market, timeRange string) (*MarketHeatmap, error) { + heatmap := s.generateMockMarketData(market) + heatmap.TimeRange = timeRange + + return heatmap, nil +} + +func (s *HeatmapService) generateMockMarketData(market string) *MarketHeatmap { + sectors := []struct { + name string + tickers []struct{ symbol, name string } + }{ + {"Technology", []struct{ symbol, name string }{ + {"AAPL", "Apple Inc."}, + {"MSFT", "Microsoft Corp."}, + {"GOOGL", "Alphabet Inc."}, + {"AMZN", "Amazon.com Inc."}, + {"META", "Meta Platforms"}, + {"NVDA", "NVIDIA Corp."}, + {"TSLA", "Tesla Inc."}, + }}, + {"Healthcare", []struct{ symbol, name string }{ + {"JNJ", "Johnson & Johnson"}, + {"UNH", "UnitedHealth Group"}, + {"PFE", "Pfizer Inc."}, + {"MRK", "Merck & Co."}, + {"ABBV", "AbbVie Inc."}, + }}, + {"Finance", []struct{ symbol, name string }{ + {"JPM", "JPMorgan Chase"}, + {"BAC", "Bank of America"}, + {"WFC", "Wells Fargo"}, + {"GS", "Goldman Sachs"}, + {"MS", "Morgan Stanley"}, + }}, + {"Energy", []struct{ symbol, name string }{ + {"XOM", "Exxon Mobil"}, + {"CVX", "Chevron Corp."}, + {"COP", "ConocoPhillips"}, + {"SLB", "Schlumberger"}, + }}, + {"Consumer", []struct{ symbol, name string }{ + {"WMT", "Walmart Inc."}, + {"PG", "Procter & Gamble"}, + {"KO", "Coca-Cola Co."}, + {"PEP", "PepsiCo Inc."}, + {"COST", "Costco Wholesale"}, + }}, + } + + allTickers := make([]TickerData, 0) + allSectors := make([]Sector, 0) + + for _, sec := range sectors { + sectorTickers := make([]TickerData, 0) + sectorChange := 0.0 + + for _, t := range sec.tickers { + change := (randomFloat(-5, 5)) + price := randomFloat(50, 500) + marketCap := randomFloat(50e9, 3000e9) + volume := randomFloat(1e6, 100e6) + + ticker := TickerData{ + Symbol: t.symbol, + Name: t.name, + Price: price, + Change: price * change / 100, + ChangePercent: change, + Volume: volume, + MarketCap: marketCap, + Sector: sec.name, + Color: getColorForChange(change), + Size: math.Log10(marketCap) * 10, + } + + sectorTickers = append(sectorTickers, ticker) + sectorChange += change + } + + if len(sectorTickers) > 0 { + sectorChange /= float64(len(sectorTickers)) + } + + sort.Slice(sectorTickers, func(i, j int) bool { + return sectorTickers[i].ChangePercent > sectorTickers[j].ChangePercent + }) + + var topGainers, topLosers []TickerData + if len(sectorTickers) >= 2 { + topGainers = sectorTickers[:2] + topLosers = sectorTickers[len(sectorTickers)-2:] + } + + sectorMarketCap := 0.0 + sectorVolume := 0.0 + for _, t := range sectorTickers { + sectorMarketCap += t.MarketCap + sectorVolume += t.Volume + } + + sector := Sector{ + ID: strings.ToLower(strings.ReplaceAll(sec.name, " ", "_")), + Name: sec.name, + Change: sectorChange, + MarketCap: sectorMarketCap, + Volume: sectorVolume, + TickerCount: len(sectorTickers), + TopGainers: topGainers, + TopLosers: topLosers, + Color: getColorForChange(sectorChange), + Weight: sectorMarketCap, + } + + allSectors = append(allSectors, sector) + allTickers = append(allTickers, sectorTickers...) + } + + sort.Slice(allTickers, func(i, j int) bool { + return allTickers[i].MarketCap > allTickers[j].MarketCap + }) + + return &MarketHeatmap{ + ID: market, + Title: getMarketTitle(market), + Type: HeatmapTreemap, + Market: market, + Sectors: allSectors, + Tickers: allTickers, + Summary: *s.calculateSummaryPtr(allTickers), + UpdatedAt: time.Now(), + Colorscale: DefaultColorscale, + } +} + +func (s *HeatmapService) calculateSummary(tickers []TickerData) MarketSummary { + return *s.calculateSummaryPtr(tickers) +} + +func (s *HeatmapService) calculateSummaryPtr(tickers []TickerData) *MarketSummary { + summary := &MarketSummary{} + + var totalChange float64 + var topGainer, topLoser, mostActive *TickerData + + for i := range tickers { + t := &tickers[i] + summary.TotalMarketCap += t.MarketCap + summary.TotalVolume += t.Volume + totalChange += t.ChangePercent + + if t.ChangePercent > 0 { + summary.AdvancingCount++ + } else if t.ChangePercent < 0 { + summary.DecliningCount++ + } else { + summary.UnchangedCount++ + } + + if topGainer == nil || t.ChangePercent > topGainer.ChangePercent { + topGainer = t + } + if topLoser == nil || t.ChangePercent < topLoser.ChangePercent { + topLoser = t + } + if mostActive == nil || t.Volume > mostActive.Volume { + mostActive = t + } + } + + if len(tickers) > 0 { + summary.AverageChange = totalChange / float64(len(tickers)) + } + + summary.TopGainer = topGainer + summary.TopLoser = topLoser + summary.MostActive = mostActive + + if summary.AverageChange > 1 { + summary.MarketSentiment = "bullish" + } else if summary.AverageChange < -1 { + summary.MarketSentiment = "bearish" + } else { + summary.MarketSentiment = "neutral" + } + + return summary +} + +func (s *HeatmapService) GenerateTreemapData(heatmap *MarketHeatmap) interface{} { + children := make([]map[string]interface{}, 0) + + for _, sector := range heatmap.Sectors { + sectorChildren := make([]map[string]interface{}, 0) + + for _, ticker := range heatmap.Tickers { + if ticker.Sector == sector.Name { + sectorChildren = append(sectorChildren, map[string]interface{}{ + "name": ticker.Symbol, + "value": ticker.MarketCap, + "change": ticker.ChangePercent, + "color": ticker.Color, + "data": ticker, + }) + } + } + + children = append(children, map[string]interface{}{ + "name": sector.Name, + "children": sectorChildren, + "change": sector.Change, + "color": sector.Color, + }) + } + + return map[string]interface{}{ + "name": heatmap.Market, + "children": children, + } +} + +func (s *HeatmapService) GenerateGridData(heatmap *MarketHeatmap, rows, cols int) [][]TickerData { + grid := make([][]TickerData, rows) + for i := range grid { + grid[i] = make([]TickerData, cols) + } + + idx := 0 + for i := 0; i < rows && idx < len(heatmap.Tickers); i++ { + for j := 0; j < cols && idx < len(heatmap.Tickers); j++ { + grid[i][j] = heatmap.Tickers[idx] + idx++ + } + } + + return grid +} + +func (s *HeatmapService) GetTopMovers(ctx context.Context, market string, count int) (*TopMovers, error) { + heatmap, err := s.GetMarketHeatmap(ctx, market, "1d") + if err != nil { + return nil, err + } + + tickers := make([]TickerData, len(heatmap.Tickers)) + copy(tickers, heatmap.Tickers) + + sort.Slice(tickers, func(i, j int) bool { + return tickers[i].ChangePercent > tickers[j].ChangePercent + }) + + gainers := tickers + if len(gainers) > count { + gainers = gainers[:count] + } + + sort.Slice(tickers, func(i, j int) bool { + return tickers[i].ChangePercent < tickers[j].ChangePercent + }) + + losers := tickers + if len(losers) > count { + losers = losers[:count] + } + + sort.Slice(tickers, func(i, j int) bool { + return tickers[i].Volume > tickers[j].Volume + }) + + active := tickers + if len(active) > count { + active = active[:count] + } + + return &TopMovers{ + Gainers: gainers, + Losers: losers, + MostActive: active, + UpdatedAt: time.Now(), + }, nil +} + +type TopMovers struct { + Gainers []TickerData `json:"gainers"` + Losers []TickerData `json:"losers"` + MostActive []TickerData `json:"mostActive"` + UpdatedAt time.Time `json:"updatedAt"` +} + +func getColorForChange(change float64) string { + if change >= 5 { + return "#22c55e" + } else if change >= 3 { + return "#4ade80" + } else if change >= 1 { + return "#86efac" + } else if change >= 0 { + return "#bbf7d0" + } else if change >= -1 { + return "#fecaca" + } else if change >= -3 { + return "#fca5a5" + } else if change >= -5 { + return "#f87171" + } + return "#ef4444" +} + +func getMarketTitle(market string) string { + titles := map[string]string{ + "sp500": "S&P 500", + "nasdaq": "NASDAQ", + "dow": "Dow Jones", + "moex": "MOEX", + "crypto": "Cryptocurrency", + "forex": "Forex", + "commodities": "Commodities", + } + if title, ok := titles[strings.ToLower(market)]; ok { + return title + } + return market +} + +var rng uint64 = uint64(time.Now().UnixNano()) + +func randomFloat(min, max float64) float64 { + rng ^= rng << 13 + rng ^= rng >> 17 + rng ^= rng << 5 + f := float64(rng) / float64(1<<64) + return min + f*(max-min) +} + +func (h *MarketHeatmap) ToJSON() ([]byte, error) { + return json.Marshal(h) +} diff --git a/backend/internal/labs/generator.go b/backend/internal/labs/generator.go new file mode 100644 index 0000000..44be03d --- /dev/null +++ b/backend/internal/labs/generator.go @@ -0,0 +1,759 @@ +package labs + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type Generator struct { + llm llm.Client +} + +func NewGenerator(llmClient llm.Client) *Generator { + return &Generator{llm: llmClient} +} + +type GenerateOptions struct { + Query string + Data interface{} + PreferredTypes []VisualizationType + Theme string + Locale string + MaxVisualizations int +} + +func (g *Generator) GenerateReport(ctx context.Context, opts GenerateOptions) (*Report, error) { + analysisPrompt := fmt.Sprintf(`Analyze this data and query to determine the best visualizations. + +Query: %s + +Data: %v + +Determine: +1. What visualizations would best represent this data? +2. How should the data be structured for each visualization? +3. What insights can be highlighted? + +Respond in JSON format: +{ + "title": "Report title", + "sections": [ + { + "title": "Section title", + "visualizations": [ + { + "type": "chart_type", + "title": "Viz title", + "dataMapping": { "how to map the data" }, + "insight": "Key insight" + } + ] + } + ] +} + +Available visualization types: bar_chart, line_chart, pie_chart, donut_chart, table, stat_cards, kpi, comparison, timeline, progress, heatmap, code_block, markdown, collapsible, tabs, accordion`, opts.Query, opts.Data) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: analysisPrompt}}, + }) + if err != nil { + return nil, err + } + + var analysis struct { + Title string `json:"title"` + Sections []struct { + Title string `json:"title"` + Visualizations []struct { + Type string `json:"type"` + Title string `json:"title"` + DataMapping map[string]interface{} `json:"dataMapping"` + Insight string `json:"insight"` + } `json:"visualizations"` + } `json:"sections"` + } + + jsonStr := extractJSON(result) + if err := json.Unmarshal([]byte(jsonStr), &analysis); err != nil { + return g.createDefaultReport(opts) + } + + report := &Report{ + ID: uuid.New().String(), + Title: analysis.Title, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + Theme: opts.Theme, + Sections: make([]ReportSection, 0), + } + + for _, sec := range analysis.Sections { + section := ReportSection{ + ID: uuid.New().String(), + Title: sec.Title, + Visualizations: make([]Visualization, 0), + } + + for _, viz := range sec.Visualizations { + visualization := g.createVisualization(VisualizationType(viz.Type), viz.Title, opts.Data, viz.DataMapping) + if visualization != nil { + section.Visualizations = append(section.Visualizations, *visualization) + } + } + + if len(section.Visualizations) > 0 { + report.Sections = append(report.Sections, section) + } + } + + return report, nil +} + +func (g *Generator) createDefaultReport(opts GenerateOptions) (*Report, error) { + report := &Report{ + ID: uuid.New().String(), + Title: "Анализ данных", + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + Sections: []ReportSection{ + { + ID: uuid.New().String(), + Title: "Обзор", + Visualizations: []Visualization{ + g.CreateMarkdown("", formatDataAsMarkdown(opts.Data)), + }, + }, + }, + } + return report, nil +} + +func (g *Generator) createVisualization(vizType VisualizationType, title string, data interface{}, mapping map[string]interface{}) *Visualization { + switch vizType { + case VizBarChart, VizLineChart, VizAreaChart: + return g.createChartVisualization(vizType, title, data, mapping) + case VizPieChart, VizDonutChart: + return g.createPieVisualization(vizType, title, data, mapping) + case VizTable: + return g.createTableVisualization(title, data, mapping) + case VizStatCards: + return g.createStatCardsVisualization(title, data, mapping) + case VizKPI: + return g.createKPIVisualization(title, data, mapping) + case VizTimeline: + return g.createTimelineVisualization(title, data, mapping) + case VizComparison: + return g.createComparisonVisualization(title, data, mapping) + case VizProgress: + return g.createProgressVisualization(title, data, mapping) + case VizMarkdown: + content := extractStringFromData(data, mapping, "content") + viz := g.CreateMarkdown(title, content) + return &viz + default: + viz := g.CreateMarkdown(title, formatDataAsMarkdown(data)) + return &viz + } +} + +func (g *Generator) createChartVisualization(vizType VisualizationType, title string, data interface{}, mapping map[string]interface{}) *Visualization { + chartData := &ChartData{ + Labels: make([]string, 0), + Datasets: make([]ChartDataset, 0), + } + + if dataMap, ok := data.(map[string]interface{}); ok { + labels := make([]string, 0) + values := make([]float64, 0) + + for k, v := range dataMap { + labels = append(labels, k) + values = append(values, toFloat64(v)) + } + + chartData.Labels = labels + chartData.Datasets = append(chartData.Datasets, ChartDataset{ + Label: title, + Data: values, + }) + } + + if dataSlice, ok := data.([]interface{}); ok { + for _, item := range dataSlice { + if itemMap, ok := item.(map[string]interface{}); ok { + if label, ok := itemMap["label"].(string); ok { + chartData.Labels = append(chartData.Labels, label) + } + if value, ok := itemMap["value"]; ok { + if len(chartData.Datasets) == 0 { + chartData.Datasets = append(chartData.Datasets, ChartDataset{Label: title, Data: []float64{}}) + } + chartData.Datasets[0].Data = append(chartData.Datasets[0].Data, toFloat64(value)) + } + } + } + } + + return &Visualization{ + ID: uuid.New().String(), + Type: vizType, + Title: title, + Data: chartData, + Config: VisualizationConfig{ + ShowLegend: true, + ShowTooltip: true, + ShowGrid: true, + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) createPieVisualization(vizType VisualizationType, title string, data interface{}, mapping map[string]interface{}) *Visualization { + chartData := &ChartData{ + Labels: make([]string, 0), + Datasets: make([]ChartDataset, 0), + } + + dataset := ChartDataset{Label: title, Data: []float64{}} + + if dataMap, ok := data.(map[string]interface{}); ok { + for k, v := range dataMap { + chartData.Labels = append(chartData.Labels, k) + dataset.Data = append(dataset.Data, toFloat64(v)) + } + } + + chartData.Datasets = append(chartData.Datasets, dataset) + + return &Visualization{ + ID: uuid.New().String(), + Type: vizType, + Title: title, + Data: chartData, + Config: VisualizationConfig{ + ShowLegend: true, + ShowTooltip: true, + ShowValues: true, + Animated: true, + }, + Style: VisualizationStyle{ + Height: "300px", + }, + Responsive: true, + } +} + +func (g *Generator) createTableVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + tableData := &TableData{ + Columns: make([]TableColumn, 0), + Rows: make([]TableRow, 0), + } + + if dataSlice, ok := data.([]interface{}); ok && len(dataSlice) > 0 { + if firstRow, ok := dataSlice[0].(map[string]interface{}); ok { + for key := range firstRow { + tableData.Columns = append(tableData.Columns, TableColumn{ + Key: key, + Label: formatColumnLabel(key), + Sortable: true, + }) + } + } + + for _, item := range dataSlice { + if rowMap, ok := item.(map[string]interface{}); ok { + tableData.Rows = append(tableData.Rows, TableRow(rowMap)) + } + } + } + + if dataMap, ok := data.(map[string]interface{}); ok { + tableData.Columns = []TableColumn{ + {Key: "key", Label: "Параметр", Sortable: true}, + {Key: "value", Label: "Значение", Sortable: true}, + } + + for k, v := range dataMap { + tableData.Rows = append(tableData.Rows, TableRow{ + "key": k, + "value": v, + }) + } + } + + tableData.Summary = &TableSummary{ + TotalRows: len(tableData.Rows), + } + + return &Visualization{ + ID: uuid.New().String(), + Type: VizTable, + Title: title, + Data: tableData, + Config: VisualizationConfig{ + Sortable: true, + Searchable: true, + Paginated: len(tableData.Rows) > 10, + PageSize: 10, + }, + Responsive: true, + } +} + +func (g *Generator) createStatCardsVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + cardsData := &StatCardsData{ + Cards: make([]StatCard, 0), + } + + colors := []string{"#3B82F6", "#10B981", "#F59E0B", "#EF4444", "#8B5CF6", "#EC4899"} + colorIdx := 0 + + if dataMap, ok := data.(map[string]interface{}); ok { + for k, v := range dataMap { + card := StatCard{ + ID: uuid.New().String(), + Title: formatColumnLabel(k), + Value: v, + Color: colors[colorIdx%len(colors)], + } + cardsData.Cards = append(cardsData.Cards, card) + colorIdx++ + } + } + + return &Visualization{ + ID: uuid.New().String(), + Type: VizStatCards, + Title: title, + Data: cardsData, + Config: VisualizationConfig{ + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) createKPIVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + kpiData := &KPIData{ + Value: data, + } + + if dataMap, ok := data.(map[string]interface{}); ok { + if v, ok := dataMap["value"]; ok { + kpiData.Value = v + } + if v, ok := dataMap["change"].(float64); ok { + kpiData.Change = v + if v >= 0 { + kpiData.ChangeType = "increase" + } else { + kpiData.ChangeType = "decrease" + } + } + if v, ok := dataMap["target"]; ok { + kpiData.Target = v + } + if v, ok := dataMap["unit"].(string); ok { + kpiData.Unit = v + } + } + + return &Visualization{ + ID: uuid.New().String(), + Type: VizKPI, + Title: title, + Data: kpiData, + Config: VisualizationConfig{ + Animated: true, + ShowValues: true, + }, + Style: VisualizationStyle{ + MinHeight: "150px", + }, + Responsive: true, + } +} + +func (g *Generator) createTimelineVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + timelineData := &TimelineData{ + Events: make([]TimelineEvent, 0), + } + + if dataSlice, ok := data.([]interface{}); ok { + for _, item := range dataSlice { + if itemMap, ok := item.(map[string]interface{}); ok { + event := TimelineEvent{ + ID: uuid.New().String(), + } + + if v, ok := itemMap["date"].(string); ok { + event.Date, _ = time.Parse(time.RFC3339, v) + } + if v, ok := itemMap["title"].(string); ok { + event.Title = v + } + if v, ok := itemMap["description"].(string); ok { + event.Description = v + } + + timelineData.Events = append(timelineData.Events, event) + } + } + } + + sort.Slice(timelineData.Events, func(i, j int) bool { + return timelineData.Events[i].Date.Before(timelineData.Events[j].Date) + }) + + return &Visualization{ + ID: uuid.New().String(), + Type: VizTimeline, + Title: title, + Data: timelineData, + Config: VisualizationConfig{ + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) createComparisonVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + compData := &ComparisonData{ + Items: make([]ComparisonItem, 0), + Categories: make([]string, 0), + } + + if dataSlice, ok := data.([]interface{}); ok && len(dataSlice) > 0 { + if firstItem, ok := dataSlice[0].(map[string]interface{}); ok { + for k := range firstItem { + if k != "name" && k != "id" && k != "image" { + compData.Categories = append(compData.Categories, k) + } + } + } + + for _, item := range dataSlice { + if itemMap, ok := item.(map[string]interface{}); ok { + compItem := ComparisonItem{ + ID: uuid.New().String(), + Values: make(map[string]interface{}), + } + + if v, ok := itemMap["name"].(string); ok { + compItem.Name = v + } + if v, ok := itemMap["image"].(string); ok { + compItem.Image = v + } + + for _, cat := range compData.Categories { + if v, ok := itemMap[cat]; ok { + compItem.Values[cat] = v + } + } + + compData.Items = append(compData.Items, compItem) + } + } + } + + return &Visualization{ + ID: uuid.New().String(), + Type: VizComparison, + Title: title, + Data: compData, + Config: VisualizationConfig{ + ShowLabels: true, + }, + Responsive: true, + } +} + +func (g *Generator) createProgressVisualization(title string, data interface{}, mapping map[string]interface{}) *Visualization { + progressData := &ProgressData{ + Current: 0, + Total: 100, + ShowValue: true, + Animated: true, + } + + if dataMap, ok := data.(map[string]interface{}); ok { + if v, ok := dataMap["current"]; ok { + progressData.Current = toFloat64(v) + } + if v, ok := dataMap["total"]; ok { + progressData.Total = toFloat64(v) + } + if v, ok := dataMap["label"].(string); ok { + progressData.Label = v + } + } + + if v, ok := data.(float64); ok { + progressData.Current = v + } + + return &Visualization{ + ID: uuid.New().String(), + Type: VizProgress, + Title: title, + Data: progressData, + Config: VisualizationConfig{ + Animated: true, + ShowValues: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateBarChart(title string, labels []string, values []float64) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizBarChart, + Title: title, + Data: &ChartData{ + Labels: labels, + Datasets: []ChartDataset{ + {Label: title, Data: values}, + }, + }, + Config: VisualizationConfig{ + ShowLegend: true, + ShowTooltip: true, + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateLineChart(title string, labels []string, datasets []ChartDataset) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizLineChart, + Title: title, + Data: &ChartData{ + Labels: labels, + Datasets: datasets, + }, + Config: VisualizationConfig{ + ShowLegend: true, + ShowTooltip: true, + ShowGrid: true, + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreatePieChart(title string, labels []string, values []float64) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizPieChart, + Title: title, + Data: &ChartData{ + Labels: labels, + Datasets: []ChartDataset{ + {Label: title, Data: values}, + }, + }, + Config: VisualizationConfig{ + ShowLegend: true, + ShowTooltip: true, + ShowValues: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateTable(title string, columns []TableColumn, rows []TableRow) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizTable, + Title: title, + Data: &TableData{ + Columns: columns, + Rows: rows, + Summary: &TableSummary{TotalRows: len(rows)}, + }, + Config: VisualizationConfig{ + Sortable: true, + Searchable: true, + Paginated: len(rows) > 10, + PageSize: 10, + }, + Responsive: true, + } +} + +func (g *Generator) CreateStatCards(title string, cards []StatCard) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizStatCards, + Title: title, + Data: &StatCardsData{Cards: cards}, + Config: VisualizationConfig{ + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateKPI(title string, value interface{}, change float64, unit string) Visualization { + changeType := "neutral" + if change > 0 { + changeType = "increase" + } else if change < 0 { + changeType = "decrease" + } + + return Visualization{ + ID: uuid.New().String(), + Type: VizKPI, + Title: title, + Data: &KPIData{ + Value: value, + Change: change, + ChangeType: changeType, + Unit: unit, + }, + Config: VisualizationConfig{ + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateMarkdown(title string, content string) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizMarkdown, + Title: title, + Data: &MarkdownData{Content: content}, + Responsive: true, + } +} + +func (g *Generator) CreateCodeBlock(title, code, language string) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizCodeBlock, + Title: title, + Data: &CodeBlockData{ + Code: code, + Language: language, + ShowLineNum: true, + Copyable: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateTabs(title string, tabs []TabItem) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizTabs, + Title: title, + Data: &TabsData{Tabs: tabs}, + Responsive: true, + } +} + +func (g *Generator) CreateAccordion(title string, items []AccordionItem) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizAccordion, + Title: title, + Data: &AccordionData{Items: items}, + Config: VisualizationConfig{ + Animated: true, + }, + Responsive: true, + } +} + +func (g *Generator) CreateHeatmap(title string, xLabels, yLabels []string, values [][]float64) Visualization { + return Visualization{ + ID: uuid.New().String(), + Type: VizHeatmap, + Title: title, + Data: &HeatmapData{ + XLabels: xLabels, + YLabels: yLabels, + Values: values, + }, + Config: VisualizationConfig{ + ShowTooltip: true, + ShowLabels: true, + }, + Responsive: true, + } +} + +func extractJSON(text string) string { + re := regexp.MustCompile(`(?s)\{.*\}`) + match := re.FindString(text) + if match != "" { + return match + } + return "{}" +} + +func toFloat64(v interface{}) float64 { + switch val := v.(type) { + case float64: + return val + case float32: + return float64(val) + case int: + return float64(val) + case int64: + return float64(val) + case string: + f, _ := strconv.ParseFloat(val, 64) + return f + default: + return 0 + } +} + +func formatColumnLabel(key string) string { + key = strings.ReplaceAll(key, "_", " ") + key = strings.ReplaceAll(key, "-", " ") + + words := strings.Fields(key) + for i, word := range words { + if len(word) > 0 { + words[i] = strings.ToUpper(string(word[0])) + strings.ToLower(word[1:]) + } + } + + return strings.Join(words, " ") +} + +func extractStringFromData(data interface{}, mapping map[string]interface{}, key string) string { + if dataMap, ok := data.(map[string]interface{}); ok { + if v, ok := dataMap[key].(string); ok { + return v + } + } + return fmt.Sprintf("%v", data) +} + +func formatDataAsMarkdown(data interface{}) string { + jsonBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + return fmt.Sprintf("%v", data) + } + return "```json\n" + string(jsonBytes) + "\n```" +} diff --git a/backend/internal/labs/types.go b/backend/internal/labs/types.go new file mode 100644 index 0000000..77cba2f --- /dev/null +++ b/backend/internal/labs/types.go @@ -0,0 +1,335 @@ +package labs + +import "time" + +type VisualizationType string + +const ( + VizBarChart VisualizationType = "bar_chart" + VizLineChart VisualizationType = "line_chart" + VizPieChart VisualizationType = "pie_chart" + VizDonutChart VisualizationType = "donut_chart" + VizAreaChart VisualizationType = "area_chart" + VizScatterPlot VisualizationType = "scatter_plot" + VizHeatmap VisualizationType = "heatmap" + VizTreemap VisualizationType = "treemap" + VizGauge VisualizationType = "gauge" + VizRadar VisualizationType = "radar" + VizSankey VisualizationType = "sankey" + VizTable VisualizationType = "table" + VizTimeline VisualizationType = "timeline" + VizKPI VisualizationType = "kpi" + VizProgress VisualizationType = "progress" + VizComparison VisualizationType = "comparison" + VizStatCards VisualizationType = "stat_cards" + VizMap VisualizationType = "map" + VizFlowChart VisualizationType = "flow_chart" + VizOrgChart VisualizationType = "org_chart" + VizCodeBlock VisualizationType = "code_block" + VizMarkdown VisualizationType = "markdown" + VizCollapsible VisualizationType = "collapsible" + VizTabs VisualizationType = "tabs" + VizAccordion VisualizationType = "accordion" + VizStepper VisualizationType = "stepper" + VizForm VisualizationType = "form" +) + +type Visualization struct { + ID string `json:"id"` + Type VisualizationType `json:"type"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Data interface{} `json:"data"` + Config VisualizationConfig `json:"config,omitempty"` + Style VisualizationStyle `json:"style,omitempty"` + Actions []VisualizationAction `json:"actions,omitempty"` + Responsive bool `json:"responsive"` +} + +type VisualizationConfig struct { + ShowLegend bool `json:"showLegend,omitempty"` + ShowGrid bool `json:"showGrid,omitempty"` + ShowTooltip bool `json:"showTooltip,omitempty"` + ShowLabels bool `json:"showLabels,omitempty"` + ShowValues bool `json:"showValues,omitempty"` + Animated bool `json:"animated,omitempty"` + Stacked bool `json:"stacked,omitempty"` + Horizontal bool `json:"horizontal,omitempty"` + Sortable bool `json:"sortable,omitempty"` + Filterable bool `json:"filterable,omitempty"` + Searchable bool `json:"searchable,omitempty"` + Paginated bool `json:"paginated,omitempty"` + PageSize int `json:"pageSize,omitempty"` + Expandable bool `json:"expandable,omitempty"` + DefaultExpanded bool `json:"defaultExpanded,omitempty"` + XAxisLabel string `json:"xAxisLabel,omitempty"` + YAxisLabel string `json:"yAxisLabel,omitempty"` + Colors []string `json:"colors,omitempty"` + DateFormat string `json:"dateFormat,omitempty"` + NumberFormat string `json:"numberFormat,omitempty"` + CurrencySymbol string `json:"currencySymbol,omitempty"` +} + +type VisualizationStyle struct { + Width string `json:"width,omitempty"` + Height string `json:"height,omitempty"` + MinHeight string `json:"minHeight,omitempty"` + MaxHeight string `json:"maxHeight,omitempty"` + Padding string `json:"padding,omitempty"` + Margin string `json:"margin,omitempty"` + BorderRadius string `json:"borderRadius,omitempty"` + Background string `json:"background,omitempty"` + Shadow string `json:"shadow,omitempty"` + FontFamily string `json:"fontFamily,omitempty"` + FontSize string `json:"fontSize,omitempty"` + TextColor string `json:"textColor,omitempty"` + AccentColor string `json:"accentColor,omitempty"` + GridColor string `json:"gridColor,omitempty"` +} + +type VisualizationAction struct { + ID string `json:"id"` + Label string `json:"label"` + Icon string `json:"icon,omitempty"` + Type string `json:"type"` + Handler string `json:"handler,omitempty"` + URL string `json:"url,omitempty"` +} + +type ChartData struct { + Labels []string `json:"labels"` + Datasets []ChartDataset `json:"datasets"` +} + +type ChartDataset struct { + Label string `json:"label"` + Data []float64 `json:"data"` + BackgroundColor string `json:"backgroundColor,omitempty"` + BorderColor string `json:"borderColor,omitempty"` + Fill bool `json:"fill,omitempty"` +} + +type TableData struct { + Columns []TableColumn `json:"columns"` + Rows []TableRow `json:"rows"` + Summary *TableSummary `json:"summary,omitempty"` +} + +type TableColumn struct { + Key string `json:"key"` + Label string `json:"label"` + Type string `json:"type,omitempty"` + Width string `json:"width,omitempty"` + Sortable bool `json:"sortable,omitempty"` + Align string `json:"align,omitempty"` + Format string `json:"format,omitempty"` + Highlight bool `json:"highlight,omitempty"` +} + +type TableRow map[string]interface{} + +type TableSummary struct { + TotalRows int `json:"totalRows"` + Aggregations map[string]interface{} `json:"aggregations,omitempty"` +} + +type TimelineData struct { + Events []TimelineEvent `json:"events"` +} + +type TimelineEvent struct { + ID string `json:"id"` + Date time.Time `json:"date"` + Title string `json:"title"` + Description string `json:"description,omitempty"` + Icon string `json:"icon,omitempty"` + Color string `json:"color,omitempty"` + Link string `json:"link,omitempty"` +} + +type KPIData struct { + Value interface{} `json:"value"` + PrevValue interface{} `json:"prevValue,omitempty"` + Change float64 `json:"change,omitempty"` + ChangeType string `json:"changeType,omitempty"` + Unit string `json:"unit,omitempty"` + Prefix string `json:"prefix,omitempty"` + Suffix string `json:"suffix,omitempty"` + Target interface{} `json:"target,omitempty"` + Trend []float64 `json:"trend,omitempty"` + Icon string `json:"icon,omitempty"` + Color string `json:"color,omitempty"` + Description string `json:"description,omitempty"` +} + +type StatCardsData struct { + Cards []StatCard `json:"cards"` +} + +type StatCard struct { + ID string `json:"id"` + Title string `json:"title"` + Value interface{} `json:"value"` + Change float64 `json:"change,omitempty"` + ChangeLabel string `json:"changeLabel,omitempty"` + Icon string `json:"icon,omitempty"` + Color string `json:"color,omitempty"` + Sparkline []float64 `json:"sparkline,omitempty"` +} + +type ComparisonData struct { + Items []ComparisonItem `json:"items"` + Categories []string `json:"categories"` +} + +type ComparisonItem struct { + ID string `json:"id"` + Name string `json:"name"` + Image string `json:"image,omitempty"` + Values map[string]interface{} `json:"values"` +} + +type ProgressData struct { + Current float64 `json:"current"` + Total float64 `json:"total"` + Label string `json:"label,omitempty"` + Color string `json:"color,omitempty"` + ShowValue bool `json:"showValue,omitempty"` + Animated bool `json:"animated,omitempty"` +} + +type HeatmapData struct { + XLabels []string `json:"xLabels"` + YLabels []string `json:"yLabels"` + Values [][]float64 `json:"values"` + Min float64 `json:"min,omitempty"` + Max float64 `json:"max,omitempty"` +} + +type MapData struct { + Center []float64 `json:"center"` + Zoom int `json:"zoom"` + Markers []MapMarker `json:"markers,omitempty"` + Regions []MapRegion `json:"regions,omitempty"` +} + +type MapMarker struct { + ID string `json:"id"` + Position []float64 `json:"position"` + Label string `json:"label,omitempty"` + Icon string `json:"icon,omitempty"` + Color string `json:"color,omitempty"` + Popup string `json:"popup,omitempty"` +} + +type MapRegion struct { + ID string `json:"id"` + Name string `json:"name"` + Value float64 `json:"value"` + Color string `json:"color,omitempty"` +} + +type CollapsibleData struct { + Title string `json:"title"` + Content interface{} `json:"content"` + DefaultOpen bool `json:"defaultOpen,omitempty"` + Icon string `json:"icon,omitempty"` + Children []Visualization `json:"children,omitempty"` +} + +type TabsData struct { + Tabs []TabItem `json:"tabs"` +} + +type TabItem struct { + ID string `json:"id"` + Label string `json:"label"` + Icon string `json:"icon,omitempty"` + Content interface{} `json:"content"` + Children []Visualization `json:"children,omitempty"` +} + +type AccordionData struct { + Items []AccordionItem `json:"items"` +} + +type AccordionItem struct { + ID string `json:"id"` + Title string `json:"title"` + Content interface{} `json:"content"` + Icon string `json:"icon,omitempty"` + Open bool `json:"open,omitempty"` +} + +type StepperData struct { + Steps []StepperStep `json:"steps"` + CurrentStep int `json:"currentStep"` + Orientation string `json:"orientation,omitempty"` +} + +type StepperStep struct { + ID string `json:"id"` + Label string `json:"label"` + Description string `json:"description,omitempty"` + Content interface{} `json:"content,omitempty"` + Status string `json:"status,omitempty"` + Icon string `json:"icon,omitempty"` +} + +type FormData struct { + Fields []FormField `json:"fields"` + SubmitLabel string `json:"submitLabel,omitempty"` + Layout string `json:"layout,omitempty"` +} + +type FormField struct { + ID string `json:"id"` + Type string `json:"type"` + Label string `json:"label"` + Placeholder string `json:"placeholder,omitempty"` + Value interface{} `json:"value,omitempty"` + Options []FormOption `json:"options,omitempty"` + Required bool `json:"required,omitempty"` + Validation string `json:"validation,omitempty"` +} + +type FormOption struct { + Value string `json:"value"` + Label string `json:"label"` +} + +type CodeBlockData struct { + Code string `json:"code"` + Language string `json:"language"` + Filename string `json:"filename,omitempty"` + Highlight []int `json:"highlight,omitempty"` + ShowLineNum bool `json:"showLineNum,omitempty"` + Copyable bool `json:"copyable,omitempty"` +} + +type MarkdownData struct { + Content string `json:"content"` +} + +type Report struct { + ID string `json:"id"` + Title string `json:"title"` + Description string `json:"description,omitempty"` + Sections []ReportSection `json:"sections"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Author string `json:"author,omitempty"` + Tags []string `json:"tags,omitempty"` + IsPublic bool `json:"isPublic"` + Theme string `json:"theme,omitempty"` + CustomCSS string `json:"customCss,omitempty"` +} + +type ReportSection struct { + ID string `json:"id"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Visualizations []Visualization `json:"visualizations"` + Layout string `json:"layout,omitempty"` + Columns int `json:"columns,omitempty"` +} diff --git a/backend/internal/learning/stepper.go b/backend/internal/learning/stepper.go new file mode 100644 index 0000000..1122ecf --- /dev/null +++ b/backend/internal/learning/stepper.go @@ -0,0 +1,701 @@ +package learning + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type LearningMode string + +const ( + ModeExplain LearningMode = "explain" + ModeGuided LearningMode = "guided" + ModeInteractive LearningMode = "interactive" + ModePractice LearningMode = "practice" + ModeQuiz LearningMode = "quiz" +) + +type DifficultyLevel string + +const ( + DifficultyBeginner DifficultyLevel = "beginner" + DifficultyIntermediate DifficultyLevel = "intermediate" + DifficultyAdvanced DifficultyLevel = "advanced" + DifficultyExpert DifficultyLevel = "expert" +) + +type StepByStepLesson struct { + ID string `json:"id"` + Title string `json:"title"` + Description string `json:"description"` + Topic string `json:"topic"` + Difficulty DifficultyLevel `json:"difficulty"` + Mode LearningMode `json:"mode"` + Steps []LearningStep `json:"steps"` + Prerequisites []string `json:"prerequisites,omitempty"` + LearningGoals []string `json:"learningGoals"` + EstimatedTime int `json:"estimatedTimeMinutes"` + Progress LessonProgress `json:"progress"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type LearningStep struct { + ID string `json:"id"` + Number int `json:"number"` + Title string `json:"title"` + Type StepType `json:"type"` + Content StepContent `json:"content"` + Interaction *StepInteraction `json:"interaction,omitempty"` + Hints []string `json:"hints,omitempty"` + Examples []Example `json:"examples,omitempty"` + Practice *PracticeExercise `json:"practice,omitempty"` + Quiz *QuizQuestion `json:"quiz,omitempty"` + Duration int `json:"durationSeconds,omitempty"` + Status StepStatus `json:"status"` + Metadata map[string]interface{} `json:"metadata,omitempty"` +} + +type StepType string + +const ( + StepExplanation StepType = "explanation" + StepVisualization StepType = "visualization" + StepCode StepType = "code" + StepInteractive StepType = "interactive" + StepPractice StepType = "practice" + StepQuiz StepType = "quiz" + StepSummary StepType = "summary" + StepCheckpoint StepType = "checkpoint" +) + +type StepStatus string + +const ( + StatusLocked StepStatus = "locked" + StatusAvailable StepStatus = "available" + StatusInProgress StepStatus = "in_progress" + StatusCompleted StepStatus = "completed" + StatusSkipped StepStatus = "skipped" +) + +type StepContent struct { + Text string `json:"text"` + Markdown string `json:"markdown,omitempty"` + HTML string `json:"html,omitempty"` + Code *CodeContent `json:"code,omitempty"` + Visualization *VisualizationContent `json:"visualization,omitempty"` + Media *MediaContent `json:"media,omitempty"` + Formula string `json:"formula,omitempty"` + Highlights []TextHighlight `json:"highlights,omitempty"` +} + +type CodeContent struct { + Language string `json:"language"` + Code string `json:"code"` + Filename string `json:"filename,omitempty"` + Runnable bool `json:"runnable"` + Editable bool `json:"editable"` + Highlights []int `json:"highlights,omitempty"` + Annotations []CodeAnnotation `json:"annotations,omitempty"` +} + +type CodeAnnotation struct { + Line int `json:"line"` + Text string `json:"text"` + Type string `json:"type"` +} + +type VisualizationContent struct { + Type string `json:"type"` + Data interface{} `json:"data"` + Config map[string]interface{} `json:"config,omitempty"` +} + +type MediaContent struct { + Type string `json:"type"` + URL string `json:"url"` + Caption string `json:"caption,omitempty"` + Duration int `json:"duration,omitempty"` +} + +type TextHighlight struct { + Start int `json:"start"` + End int `json:"end"` + Text string `json:"text"` + Type string `json:"type"` + Note string `json:"note,omitempty"` +} + +type StepInteraction struct { + Type string `json:"type"` + Prompt string `json:"prompt"` + Options []Option `json:"options,omitempty"` + Validation *Validation `json:"validation,omitempty"` + Feedback *Feedback `json:"feedback,omitempty"` +} + +type Option struct { + ID string `json:"id"` + Text string `json:"text"` + IsCorrect bool `json:"isCorrect,omitempty"` + Feedback string `json:"feedback,omitempty"` +} + +type Validation struct { + Type string `json:"type"` + Pattern string `json:"pattern,omitempty"` + Expected string `json:"expected,omitempty"` + Keywords []string `json:"keywords,omitempty"` +} + +type Feedback struct { + Correct string `json:"correct"` + Incorrect string `json:"incorrect"` + Partial string `json:"partial,omitempty"` +} + +type Example struct { + Title string `json:"title"` + Description string `json:"description"` + Input string `json:"input,omitempty"` + Output string `json:"output,omitempty"` + Code string `json:"code,omitempty"` + Language string `json:"language,omitempty"` +} + +type PracticeExercise struct { + Prompt string `json:"prompt"` + Instructions string `json:"instructions"` + Starter string `json:"starter,omitempty"` + Solution string `json:"solution,omitempty"` + TestCases []TestCase `json:"testCases,omitempty"` + Hints []string `json:"hints,omitempty"` +} + +type TestCase struct { + Input string `json:"input"` + Expected string `json:"expected"` + Hidden bool `json:"hidden,omitempty"` +} + +type QuizQuestion struct { + Question string `json:"question"` + Type string `json:"type"` + Options []Option `json:"options,omitempty"` + CorrectIndex []int `json:"correctIndex,omitempty"` + Explanation string `json:"explanation"` + Points int `json:"points"` +} + +type LessonProgress struct { + CurrentStep int `json:"currentStep"` + CompletedSteps []int `json:"completedSteps"` + Score int `json:"score"` + MaxScore int `json:"maxScore"` + TimeSpent int `json:"timeSpentSeconds"` + StartedAt time.Time `json:"startedAt"` + LastAccessed time.Time `json:"lastAccessed"` + Completed bool `json:"completed"` + CompletedAt *time.Time `json:"completedAt,omitempty"` +} + +type LearningGenerator struct { + llm llm.Client +} + +func NewLearningGenerator(llmClient llm.Client) *LearningGenerator { + return &LearningGenerator{llm: llmClient} +} + +type GenerateLessonOptions struct { + Topic string + Query string + Difficulty DifficultyLevel + Mode LearningMode + MaxSteps int + Locale string + IncludeCode bool + IncludeQuiz bool +} + +func (g *LearningGenerator) GenerateLesson(ctx context.Context, opts GenerateLessonOptions) (*StepByStepLesson, error) { + if opts.MaxSteps == 0 { + opts.MaxSteps = 10 + } + if opts.Difficulty == "" { + opts.Difficulty = DifficultyBeginner + } + if opts.Mode == "" { + opts.Mode = ModeExplain + } + + langInstruction := "" + if opts.Locale == "ru" { + langInstruction = "Generate all content in Russian language." + } + + prompt := fmt.Sprintf(`Create a step-by-step educational lesson on the following topic. + +Topic: %s +Query: %s +Difficulty: %s +Mode: %s +Max Steps: %d +Include Code Examples: %v +Include Quiz: %v +%s + +Generate a structured lesson with these requirements: +1. Break down the concept into clear, digestible steps +2. Each step should build on the previous one +3. Include explanations, examples, and visualizations where helpful +4. For code topics, include runnable code snippets +5. Add practice exercises for interactive learning +6. Include quiz questions to test understanding + +Respond in this JSON format: +{ + "title": "Lesson title", + "description": "Brief description", + "learningGoals": ["Goal 1", "Goal 2"], + "estimatedTimeMinutes": 15, + "steps": [ + { + "title": "Step title", + "type": "explanation|code|interactive|practice|quiz|summary", + "content": { + "text": "Main explanation", + "markdown": "## Formatted content", + "code": {"language": "python", "code": "example", "runnable": true}, + "formula": "optional LaTeX formula" + }, + "hints": ["Hint 1"], + "examples": [{"title": "Example", "description": "...", "code": "..."}], + "quiz": { + "question": "...", + "type": "multiple_choice", + "options": [{"id": "a", "text": "Option A", "isCorrect": false}], + "explanation": "..." + } + } + ] +}`, opts.Topic, opts.Query, opts.Difficulty, opts.Mode, opts.MaxSteps, opts.IncludeCode, opts.IncludeQuiz, langInstruction) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSON(result) + + var parsed struct { + Title string `json:"title"` + Description string `json:"description"` + LearningGoals []string `json:"learningGoals"` + EstimatedTimeMinutes int `json:"estimatedTimeMinutes"` + Steps []struct { + Title string `json:"title"` + Type string `json:"type"` + Content struct { + Text string `json:"text"` + Markdown string `json:"markdown"` + Code *struct { + Language string `json:"language"` + Code string `json:"code"` + Runnable bool `json:"runnable"` + } `json:"code"` + Formula string `json:"formula"` + } `json:"content"` + Hints []string `json:"hints"` + Examples []struct { + Title string `json:"title"` + Description string `json:"description"` + Code string `json:"code"` + } `json:"examples"` + Quiz *struct { + Question string `json:"question"` + Type string `json:"type"` + Options []struct { + ID string `json:"id"` + Text string `json:"text"` + IsCorrect bool `json:"isCorrect"` + } `json:"options"` + Explanation string `json:"explanation"` + } `json:"quiz"` + } `json:"steps"` + } + + if err := json.Unmarshal([]byte(jsonStr), &parsed); err != nil { + return g.createDefaultLesson(opts) + } + + lesson := &StepByStepLesson{ + ID: uuid.New().String(), + Title: parsed.Title, + Description: parsed.Description, + Topic: opts.Topic, + Difficulty: opts.Difficulty, + Mode: opts.Mode, + LearningGoals: parsed.LearningGoals, + EstimatedTime: parsed.EstimatedTimeMinutes, + Steps: make([]LearningStep, 0), + Progress: LessonProgress{ + CurrentStep: 0, + CompletedSteps: []int{}, + }, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + for i, s := range parsed.Steps { + step := LearningStep{ + ID: uuid.New().String(), + Number: i + 1, + Title: s.Title, + Type: StepType(s.Type), + Content: StepContent{ + Text: s.Content.Text, + Markdown: s.Content.Markdown, + Formula: s.Content.Formula, + }, + Hints: s.Hints, + Status: StatusAvailable, + } + + if i > 0 { + step.Status = StatusLocked + } + + if s.Content.Code != nil { + step.Content.Code = &CodeContent{ + Language: s.Content.Code.Language, + Code: s.Content.Code.Code, + Runnable: s.Content.Code.Runnable, + Editable: true, + } + } + + for _, ex := range s.Examples { + step.Examples = append(step.Examples, Example{ + Title: ex.Title, + Description: ex.Description, + Code: ex.Code, + }) + } + + if s.Quiz != nil { + quiz := &QuizQuestion{ + Question: s.Quiz.Question, + Type: s.Quiz.Type, + Explanation: s.Quiz.Explanation, + Points: 10, + } + for _, opt := range s.Quiz.Options { + quiz.Options = append(quiz.Options, Option{ + ID: opt.ID, + Text: opt.Text, + IsCorrect: opt.IsCorrect, + }) + } + step.Quiz = quiz + } + + lesson.Steps = append(lesson.Steps, step) + } + + lesson.Progress.MaxScore = len(lesson.Steps) * 10 + + return lesson, nil +} + +func (g *LearningGenerator) createDefaultLesson(opts GenerateLessonOptions) (*StepByStepLesson, error) { + return &StepByStepLesson{ + ID: uuid.New().String(), + Title: fmt.Sprintf("Learn: %s", opts.Topic), + Description: opts.Query, + Topic: opts.Topic, + Difficulty: opts.Difficulty, + Mode: opts.Mode, + LearningGoals: []string{"Understand the basics"}, + EstimatedTime: 10, + Steps: []LearningStep{ + { + ID: uuid.New().String(), + Number: 1, + Title: "Introduction", + Type: StepExplanation, + Content: StepContent{ + Text: opts.Query, + Markdown: fmt.Sprintf("# %s\n\n%s", opts.Topic, opts.Query), + }, + Status: StatusAvailable, + }, + }, + Progress: LessonProgress{ + CurrentStep: 0, + CompletedSteps: []int{}, + MaxScore: 10, + }, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }, nil +} + +func (g *LearningGenerator) GenerateExplanation(ctx context.Context, topic string, difficulty DifficultyLevel, locale string) (*LearningStep, error) { + langInstruction := "" + if locale == "ru" { + langInstruction = "Respond in Russian." + } + + prompt := fmt.Sprintf(`Explain this topic step by step for a %s level learner. +Topic: %s +%s + +Format your response with clear sections: +1. Start with a simple definition +2. Explain key concepts +3. Provide a real-world analogy +4. Give a concrete example + +Use markdown formatting.`, difficulty, topic, langInstruction) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + return &LearningStep{ + ID: uuid.New().String(), + Number: 1, + Title: topic, + Type: StepExplanation, + Content: StepContent{ + Markdown: result, + }, + Status: StatusAvailable, + }, nil +} + +func (g *LearningGenerator) GenerateQuiz(ctx context.Context, topic string, numQuestions int, difficulty DifficultyLevel, locale string) ([]QuizQuestion, error) { + langInstruction := "" + if locale == "ru" { + langInstruction = "Generate all questions and answers in Russian." + } + + prompt := fmt.Sprintf(`Generate %d multiple choice quiz questions about: %s +Difficulty level: %s +%s + +Respond in JSON format: +{ + "questions": [ + { + "question": "Question text", + "options": [ + {"id": "a", "text": "Option A", "isCorrect": false}, + {"id": "b", "text": "Option B", "isCorrect": true}, + {"id": "c", "text": "Option C", "isCorrect": false}, + {"id": "d", "text": "Option D", "isCorrect": false} + ], + "explanation": "Why the correct answer is correct" + } + ] +}`, numQuestions, topic, difficulty, langInstruction) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSON(result) + + var parsed struct { + Questions []struct { + Question string `json:"question"` + Options []struct { + ID string `json:"id"` + Text string `json:"text"` + IsCorrect bool `json:"isCorrect"` + } `json:"options"` + Explanation string `json:"explanation"` + } `json:"questions"` + } + + if err := json.Unmarshal([]byte(jsonStr), &parsed); err != nil { + return nil, err + } + + questions := make([]QuizQuestion, 0) + for _, q := range parsed.Questions { + quiz := QuizQuestion{ + Question: q.Question, + Type: "multiple_choice", + Explanation: q.Explanation, + Points: 10, + } + for _, opt := range q.Options { + quiz.Options = append(quiz.Options, Option{ + ID: opt.ID, + Text: opt.Text, + IsCorrect: opt.IsCorrect, + }) + } + questions = append(questions, quiz) + } + + return questions, nil +} + +func (g *LearningGenerator) GeneratePracticeExercise(ctx context.Context, topic, language string, difficulty DifficultyLevel, locale string) (*PracticeExercise, error) { + langInstruction := "" + if locale == "ru" { + langInstruction = "Write instructions and explanations in Russian." + } + + prompt := fmt.Sprintf(`Create a coding practice exercise for: %s +Programming language: %s +Difficulty: %s +%s + +Generate: +1. A clear problem statement +2. Step-by-step instructions +3. Starter code template +4. Solution code +5. Test cases + +Respond in JSON: +{ + "prompt": "Problem statement", + "instructions": "Step-by-step instructions", + "starter": "// Starter code", + "solution": "// Solution code", + "testCases": [ + {"input": "test input", "expected": "expected output"} + ], + "hints": ["Hint 1", "Hint 2"] +}`, topic, language, difficulty, langInstruction) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSON(result) + + var exercise PracticeExercise + if err := json.Unmarshal([]byte(jsonStr), &exercise); err != nil { + return &PracticeExercise{ + Prompt: topic, + Instructions: "Practice this concept", + }, nil + } + + return &exercise, nil +} + +func (l *StepByStepLesson) CompleteStep(stepIndex int) { + if stepIndex < 0 || stepIndex >= len(l.Steps) { + return + } + + l.Steps[stepIndex].Status = StatusCompleted + + alreadyCompleted := false + for _, idx := range l.Progress.CompletedSteps { + if idx == stepIndex { + alreadyCompleted = true + break + } + } + if !alreadyCompleted { + l.Progress.CompletedSteps = append(l.Progress.CompletedSteps, stepIndex) + l.Progress.Score += 10 + } + + if stepIndex+1 < len(l.Steps) { + l.Steps[stepIndex+1].Status = StatusAvailable + l.Progress.CurrentStep = stepIndex + 1 + } + + if len(l.Progress.CompletedSteps) == len(l.Steps) { + l.Progress.Completed = true + now := time.Now() + l.Progress.CompletedAt = &now + } + + l.UpdatedAt = time.Now() + l.Progress.LastAccessed = time.Now() +} + +func (l *StepByStepLesson) SubmitQuizAnswer(stepIndex int, selectedOptions []string) (bool, string) { + if stepIndex < 0 || stepIndex >= len(l.Steps) { + return false, "Invalid step" + } + + step := &l.Steps[stepIndex] + if step.Quiz == nil { + return false, "No quiz in this step" + } + + correctCount := 0 + totalCorrect := 0 + + for _, opt := range step.Quiz.Options { + if opt.IsCorrect { + totalCorrect++ + } + } + + for _, selected := range selectedOptions { + for _, opt := range step.Quiz.Options { + if opt.ID == selected && opt.IsCorrect { + correctCount++ + } + } + } + + isCorrect := correctCount == totalCorrect && len(selectedOptions) == totalCorrect + + if isCorrect { + return true, step.Quiz.Explanation + } + + return false, step.Quiz.Explanation +} + +func extractJSON(text string) string { + re := regexp.MustCompile(`(?s)\{.*\}`) + match := re.FindString(text) + if match != "" { + return match + } + return "{}" +} + +func (l *StepByStepLesson) ToJSON() ([]byte, error) { + return json.Marshal(l) +} + +func ParseLesson(data []byte) (*StepByStepLesson, error) { + var lesson StepByStepLesson + if err := json.Unmarshal(data, &lesson); err != nil { + return nil, err + } + return &lesson, nil +} diff --git a/backend/internal/llm/anthropic.go b/backend/internal/llm/anthropic.go new file mode 100644 index 0000000..08028e7 --- /dev/null +++ b/backend/internal/llm/anthropic.go @@ -0,0 +1,182 @@ +package llm + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" +) + +type AnthropicClient struct { + baseClient + apiKey string + baseURL string + client *http.Client +} + +func NewAnthropicClient(cfg ProviderConfig) (*AnthropicClient, error) { + baseURL := cfg.BaseURL + if baseURL == "" { + baseURL = "https://api.anthropic.com" + } + + return &AnthropicClient{ + baseClient: baseClient{ + providerID: cfg.ProviderID, + modelKey: cfg.ModelKey, + }, + apiKey: cfg.APIKey, + baseURL: strings.TrimSuffix(baseURL, "/"), + client: &http.Client{}, + }, nil +} + +type anthropicRequest struct { + Model string `json:"model"` + Messages []anthropicMessage `json:"messages"` + System string `json:"system,omitempty"` + MaxTokens int `json:"max_tokens"` + Stream bool `json:"stream"` + Tools []anthropicTool `json:"tools,omitempty"` +} + +type anthropicMessage struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type anthropicTool struct { + Name string `json:"name"` + Description string `json:"description"` + InputSchema interface{} `json:"input_schema"` +} + +type anthropicStreamEvent struct { + Type string `json:"type"` + Index int `json:"index,omitempty"` + Delta struct { + Type string `json:"type,omitempty"` + Text string `json:"text,omitempty"` + } `json:"delta,omitempty"` + ContentBlock struct { + Type string `json:"type"` + Text string `json:"text,omitempty"` + } `json:"content_block,omitempty"` +} + +func (c *AnthropicClient) StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) { + var systemPrompt string + messages := make([]anthropicMessage, 0) + + for _, m := range req.Messages { + if m.Role == RoleSystem { + systemPrompt = m.Content + continue + } + role := string(m.Role) + if role == "tool" { + role = "user" + } + messages = append(messages, anthropicMessage{ + Role: role, + Content: m.Content, + }) + } + + maxTokens := req.Options.MaxTokens + if maxTokens == 0 { + maxTokens = 4096 + } + + anthropicReq := anthropicRequest{ + Model: c.modelKey, + Messages: messages, + System: systemPrompt, + MaxTokens: maxTokens, + Stream: true, + } + + if len(req.Tools) > 0 { + anthropicReq.Tools = make([]anthropicTool, len(req.Tools)) + for i, t := range req.Tools { + anthropicReq.Tools[i] = anthropicTool{ + Name: t.Name, + Description: t.Description, + InputSchema: t.Schema, + } + } + } + + body, err := json.Marshal(anthropicReq) + if err != nil { + return nil, err + } + + httpReq, err := http.NewRequestWithContext(ctx, "POST", c.baseURL+"/v1/messages", bytes.NewReader(body)) + if err != nil { + return nil, err + } + + httpReq.Header.Set("Content-Type", "application/json") + httpReq.Header.Set("x-api-key", c.apiKey) + httpReq.Header.Set("anthropic-version", "2023-06-01") + + resp, err := c.client.Do(httpReq) + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + return nil, fmt.Errorf("anthropic API error: %d - %s", resp.StatusCode, string(body)) + } + + ch := make(chan StreamChunk, 100) + go func() { + defer close(ch) + defer resp.Body.Close() + + scanner := bufio.NewScanner(resp.Body) + for scanner.Scan() { + line := scanner.Text() + if !strings.HasPrefix(line, "data: ") { + continue + } + + data := strings.TrimPrefix(line, "data: ") + if data == "[DONE]" { + return + } + + var event anthropicStreamEvent + if err := json.Unmarshal([]byte(data), &event); err != nil { + continue + } + + switch event.Type { + case "content_block_delta": + if event.Delta.Text != "" { + ch <- StreamChunk{ContentChunk: event.Delta.Text} + } + case "message_stop": + ch <- StreamChunk{FinishReason: "stop"} + return + } + } + }() + + return ch, nil +} + +func (c *AnthropicClient) GenerateText(ctx context.Context, req StreamRequest) (string, error) { + ch, err := c.StreamText(ctx, req) + if err != nil { + return "", err + } + return readAllChunks(ch), nil +} diff --git a/backend/internal/llm/client.go b/backend/internal/llm/client.go new file mode 100644 index 0000000..74d7807 --- /dev/null +++ b/backend/internal/llm/client.go @@ -0,0 +1,145 @@ +package llm + +import ( + "context" + "fmt" + "io" +) + +type Role string + +const ( + RoleSystem Role = "system" + RoleUser Role = "user" + RoleAssistant Role = "assistant" + RoleTool Role = "tool" +) + +type Message struct { + Role Role `json:"role"` + Content string `json:"content"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` + Name string `json:"name,omitempty"` + Images []ImageContent `json:"images,omitempty"` +} + +type ImageContent struct { + Type string `json:"type"` + URL string `json:"url,omitempty"` + Data string `json:"data,omitempty"` + IsBase64 bool `json:"isBase64,omitempty"` +} + +type ToolCall struct { + ID string `json:"id"` + Name string `json:"name"` + Arguments map[string]interface{} `json:"arguments"` +} + +type Tool struct { + Name string `json:"name"` + Description string `json:"description"` + Schema interface{} `json:"schema"` +} + +type StreamOptions struct { + MaxTokens int `json:"max_tokens,omitempty"` + Temperature float64 `json:"temperature,omitempty"` + TopP float64 `json:"top_p,omitempty"` + StopWords []string `json:"stop,omitempty"` +} + +type StreamChunk struct { + ContentChunk string `json:"content_chunk,omitempty"` + ToolCallChunk []ToolCall `json:"tool_call_chunk,omitempty"` + FinishReason string `json:"finish_reason,omitempty"` +} + +type StreamRequest struct { + Messages []Message `json:"messages"` + Tools []Tool `json:"tools,omitempty"` + Options StreamOptions `json:"options,omitempty"` +} + +type Client interface { + StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) + GenerateText(ctx context.Context, req StreamRequest) (string, error) + GetProviderID() string + GetModelKey() string +} + +type ProviderConfig struct { + ProviderID string `json:"providerId"` + ModelKey string `json:"key"` + APIKey string `json:"apiKey,omitempty"` + BaseURL string `json:"baseUrl,omitempty"` + AgentAccessID string `json:"agentAccessId,omitempty"` +} + +func NewClient(cfg ProviderConfig) (Client, error) { + switch cfg.ProviderID { + case "timeweb": + return NewTimewebClient(TimewebConfig{ + BaseURL: cfg.BaseURL, + AgentAccessID: cfg.AgentAccessID, + APIKey: cfg.APIKey, + ModelKey: cfg.ModelKey, + ProxySource: "gooseek", + }) + case "openai": + return NewOpenAIClient(cfg) + case "anthropic": + return NewAnthropicClient(cfg) + case "gemini", "google": + return NewGeminiClient(cfg) + default: + return nil, fmt.Errorf("unknown provider: %s", cfg.ProviderID) + } +} + +type baseClient struct { + providerID string + modelKey string +} + +func (c *baseClient) GetProviderID() string { + return c.providerID +} + +func (c *baseClient) GetModelKey() string { + return c.modelKey +} + +func readAllChunks(ch <-chan StreamChunk) string { + var result string + for chunk := range ch { + result += chunk.ContentChunk + } + return result +} + +type streamReader struct { + ch <-chan StreamChunk + buffer []byte +} + +func (r *streamReader) Read(p []byte) (n int, err error) { + if len(r.buffer) > 0 { + n = copy(p, r.buffer) + r.buffer = r.buffer[n:] + return n, nil + } + + chunk, ok := <-r.ch + if !ok { + return 0, io.EOF + } + + data := []byte(chunk.ContentChunk) + n = copy(p, data) + if n < len(data) { + r.buffer = data[n:] + } + return n, nil +} diff --git a/backend/internal/llm/gemini.go b/backend/internal/llm/gemini.go new file mode 100644 index 0000000..b83ab63 --- /dev/null +++ b/backend/internal/llm/gemini.go @@ -0,0 +1,193 @@ +package llm + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" +) + +type GeminiClient struct { + baseClient + apiKey string + baseURL string + client *http.Client +} + +func NewGeminiClient(cfg ProviderConfig) (*GeminiClient, error) { + baseURL := cfg.BaseURL + if baseURL == "" { + baseURL = "https://generativelanguage.googleapis.com/v1beta" + } + + return &GeminiClient{ + baseClient: baseClient{ + providerID: cfg.ProviderID, + modelKey: cfg.ModelKey, + }, + apiKey: cfg.APIKey, + baseURL: strings.TrimSuffix(baseURL, "/"), + client: &http.Client{}, + }, nil +} + +type geminiRequest struct { + Contents []geminiContent `json:"contents"` + SystemInstruction *geminiContent `json:"systemInstruction,omitempty"` + GenerationConfig geminiGenerationConfig `json:"generationConfig,omitempty"` + Tools []geminiTool `json:"tools,omitempty"` +} + +type geminiContent struct { + Role string `json:"role,omitempty"` + Parts []geminiPart `json:"parts"` +} + +type geminiPart struct { + Text string `json:"text,omitempty"` +} + +type geminiGenerationConfig struct { + MaxOutputTokens int `json:"maxOutputTokens,omitempty"` + Temperature float64 `json:"temperature,omitempty"` + TopP float64 `json:"topP,omitempty"` +} + +type geminiTool struct { + FunctionDeclarations []geminiFunctionDecl `json:"functionDeclarations,omitempty"` +} + +type geminiFunctionDecl struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters interface{} `json:"parameters"` +} + +type geminiStreamResponse struct { + Candidates []struct { + Content struct { + Parts []struct { + Text string `json:"text"` + } `json:"parts"` + } `json:"content"` + FinishReason string `json:"finishReason,omitempty"` + } `json:"candidates"` +} + +func (c *GeminiClient) StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) { + contents := make([]geminiContent, 0) + var systemInstruction *geminiContent + + for _, m := range req.Messages { + if m.Role == RoleSystem { + systemInstruction = &geminiContent{ + Parts: []geminiPart{{Text: m.Content}}, + } + continue + } + + role := "user" + if m.Role == RoleAssistant { + role = "model" + } + + contents = append(contents, geminiContent{ + Role: role, + Parts: []geminiPart{{Text: m.Content}}, + }) + } + + geminiReq := geminiRequest{ + Contents: contents, + SystemInstruction: systemInstruction, + GenerationConfig: geminiGenerationConfig{ + MaxOutputTokens: req.Options.MaxTokens, + Temperature: req.Options.Temperature, + TopP: req.Options.TopP, + }, + } + + if len(req.Tools) > 0 { + decls := make([]geminiFunctionDecl, len(req.Tools)) + for i, t := range req.Tools { + decls[i] = geminiFunctionDecl{ + Name: t.Name, + Description: t.Description, + Parameters: t.Schema, + } + } + geminiReq.Tools = []geminiTool{{FunctionDeclarations: decls}} + } + + body, err := json.Marshal(geminiReq) + if err != nil { + return nil, err + } + + url := fmt.Sprintf("%s/models/%s:streamGenerateContent?key=%s&alt=sse", + c.baseURL, c.modelKey, c.apiKey) + + httpReq, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body)) + if err != nil { + return nil, err + } + httpReq.Header.Set("Content-Type", "application/json") + + resp, err := c.client.Do(httpReq) + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + resp.Body.Close() + return nil, fmt.Errorf("gemini API error: %d - %s", resp.StatusCode, string(body)) + } + + ch := make(chan StreamChunk, 100) + go func() { + defer close(ch) + defer resp.Body.Close() + + scanner := bufio.NewScanner(resp.Body) + for scanner.Scan() { + line := scanner.Text() + if !strings.HasPrefix(line, "data: ") { + continue + } + + data := strings.TrimPrefix(line, "data: ") + + var response geminiStreamResponse + if err := json.Unmarshal([]byte(data), &response); err != nil { + continue + } + + if len(response.Candidates) > 0 { + candidate := response.Candidates[0] + for _, part := range candidate.Content.Parts { + if part.Text != "" { + ch <- StreamChunk{ContentChunk: part.Text} + } + } + if candidate.FinishReason != "" { + ch <- StreamChunk{FinishReason: candidate.FinishReason} + } + } + } + }() + + return ch, nil +} + +func (c *GeminiClient) GenerateText(ctx context.Context, req StreamRequest) (string, error) { + ch, err := c.StreamText(ctx, req) + if err != nil { + return "", err + } + return readAllChunks(ch), nil +} diff --git a/backend/internal/llm/openai.go b/backend/internal/llm/openai.go new file mode 100644 index 0000000..a5d4403 --- /dev/null +++ b/backend/internal/llm/openai.go @@ -0,0 +1,166 @@ +package llm + +import ( + "context" + "encoding/json" + "errors" + "io" + + "github.com/sashabaranov/go-openai" +) + +type OpenAIClient struct { + baseClient + client *openai.Client +} + +func NewOpenAIClient(cfg ProviderConfig) (*OpenAIClient, error) { + config := openai.DefaultConfig(cfg.APIKey) + if cfg.BaseURL != "" { + config.BaseURL = cfg.BaseURL + } + + return &OpenAIClient{ + baseClient: baseClient{ + providerID: cfg.ProviderID, + modelKey: cfg.ModelKey, + }, + client: openai.NewClientWithConfig(config), + }, nil +} + +func (c *OpenAIClient) StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) { + messages := make([]openai.ChatCompletionMessage, 0, len(req.Messages)) + for _, m := range req.Messages { + msg := openai.ChatCompletionMessage{ + Role: string(m.Role), + Content: m.Content, + } + if m.Name != "" { + msg.Name = m.Name + } + if m.ToolCallID != "" { + msg.ToolCallID = m.ToolCallID + } + if len(m.ToolCalls) > 0 { + msg.ToolCalls = make([]openai.ToolCall, len(m.ToolCalls)) + for i, tc := range m.ToolCalls { + args, _ := json.Marshal(tc.Arguments) + msg.ToolCalls[i] = openai.ToolCall{ + ID: tc.ID, + Type: openai.ToolTypeFunction, + Function: openai.FunctionCall{ + Name: tc.Name, + Arguments: string(args), + }, + } + } + } + messages = append(messages, msg) + } + + chatReq := openai.ChatCompletionRequest{ + Model: c.modelKey, + Messages: messages, + Stream: true, + } + + if req.Options.MaxTokens > 0 { + chatReq.MaxTokens = req.Options.MaxTokens + } + if req.Options.Temperature > 0 { + chatReq.Temperature = float32(req.Options.Temperature) + } + if req.Options.TopP > 0 { + chatReq.TopP = float32(req.Options.TopP) + } + + if len(req.Tools) > 0 { + chatReq.Tools = make([]openai.Tool, len(req.Tools)) + for i, t := range req.Tools { + chatReq.Tools[i] = openai.Tool{ + Type: openai.ToolTypeFunction, + Function: &openai.FunctionDefinition{ + Name: t.Name, + Description: t.Description, + Parameters: t.Schema, + }, + } + } + } + + stream, err := c.client.CreateChatCompletionStream(ctx, chatReq) + if err != nil { + return nil, err + } + + ch := make(chan StreamChunk, 100) + go func() { + defer close(ch) + defer stream.Close() + + toolCalls := make(map[int]*ToolCall) + + for { + response, err := stream.Recv() + if errors.Is(err, io.EOF) { + if len(toolCalls) > 0 { + calls := make([]ToolCall, 0, len(toolCalls)) + for _, tc := range toolCalls { + calls = append(calls, *tc) + } + ch <- StreamChunk{ToolCallChunk: calls} + } + return + } + if err != nil { + return + } + + if len(response.Choices) == 0 { + continue + } + + delta := response.Choices[0].Delta + + if delta.Content != "" { + ch <- StreamChunk{ContentChunk: delta.Content} + } + + for _, tc := range delta.ToolCalls { + idx := *tc.Index + if _, ok := toolCalls[idx]; !ok { + toolCalls[idx] = &ToolCall{ + ID: tc.ID, + Name: tc.Function.Name, + Arguments: make(map[string]interface{}), + } + } + + if tc.Function.Arguments != "" { + existing := toolCalls[idx] + var args map[string]interface{} + if err := json.Unmarshal([]byte(tc.Function.Arguments), &args); err == nil { + for k, v := range args { + existing.Arguments[k] = v + } + } + } + } + + if response.Choices[0].FinishReason != "" { + ch <- StreamChunk{FinishReason: string(response.Choices[0].FinishReason)} + } + } + }() + + return ch, nil +} + +func (c *OpenAIClient) GenerateText(ctx context.Context, req StreamRequest) (string, error) { + ch, err := c.StreamText(ctx, req) + if err != nil { + return "", err + } + return readAllChunks(ch), nil +} diff --git a/backend/internal/llm/registry.go b/backend/internal/llm/registry.go new file mode 100644 index 0000000..9d36b0a --- /dev/null +++ b/backend/internal/llm/registry.go @@ -0,0 +1,229 @@ +package llm + +import ( + "errors" + "sort" + "sync" +) + +type ModelCapability string + +const ( + CapReasoning ModelCapability = "reasoning" + CapCoding ModelCapability = "coding" + CapSearch ModelCapability = "search" + CapCreative ModelCapability = "creative" + CapFast ModelCapability = "fast" + CapLongContext ModelCapability = "long_context" + CapVision ModelCapability = "vision" + CapMath ModelCapability = "math" + CapVideo ModelCapability = "video" + CapImage ModelCapability = "image" +) + +type ModelSpec struct { + ID string + Provider string + Model string + Capabilities []ModelCapability + CostPer1K float64 + MaxContext int + Priority int + MaxTokens int + Description string +} + +func (m ModelSpec) HasCapability(cap ModelCapability) bool { + for _, c := range m.Capabilities { + if c == cap { + return true + } + } + return false +} + +type ModelRegistry struct { + models map[string]ModelSpec + clients map[string]Client + mu sync.RWMutex +} + +func NewModelRegistry() *ModelRegistry { + return &ModelRegistry{ + models: make(map[string]ModelSpec), + clients: make(map[string]Client), + } +} + +func (r *ModelRegistry) Register(spec ModelSpec, client Client) { + r.mu.Lock() + defer r.mu.Unlock() + r.models[spec.ID] = spec + r.clients[spec.ID] = client +} + +func (r *ModelRegistry) Unregister(id string) { + r.mu.Lock() + defer r.mu.Unlock() + delete(r.models, id) + delete(r.clients, id) +} + +func (r *ModelRegistry) GetByID(id string) (Client, ModelSpec, error) { + r.mu.RLock() + defer r.mu.RUnlock() + + spec, ok := r.models[id] + if !ok { + return nil, ModelSpec{}, errors.New("model not found: " + id) + } + + client, ok := r.clients[id] + if !ok { + return nil, ModelSpec{}, errors.New("client not found: " + id) + } + + return client, spec, nil +} + +func (r *ModelRegistry) GetBest(cap ModelCapability) (Client, ModelSpec, error) { + r.mu.RLock() + defer r.mu.RUnlock() + + var candidates []ModelSpec + for _, spec := range r.models { + if spec.HasCapability(cap) { + candidates = append(candidates, spec) + } + } + + if len(candidates) == 0 { + return nil, ModelSpec{}, errors.New("no model found with capability: " + string(cap)) + } + + sort.Slice(candidates, func(i, j int) bool { + if candidates[i].Priority != candidates[j].Priority { + return candidates[i].Priority < candidates[j].Priority + } + return candidates[i].CostPer1K < candidates[j].CostPer1K + }) + + best := candidates[0] + client := r.clients[best.ID] + return client, best, nil +} + +func (r *ModelRegistry) GetAllWithCapability(cap ModelCapability) []ModelSpec { + r.mu.RLock() + defer r.mu.RUnlock() + + var result []ModelSpec + for _, spec := range r.models { + if spec.HasCapability(cap) { + result = append(result, spec) + } + } + + sort.Slice(result, func(i, j int) bool { + return result[i].Priority < result[j].Priority + }) + + return result +} + +func (r *ModelRegistry) GetAll() []ModelSpec { + r.mu.RLock() + defer r.mu.RUnlock() + + result := make([]ModelSpec, 0, len(r.models)) + for _, spec := range r.models { + result = append(result, spec) + } + return result +} + +func (r *ModelRegistry) GetClient(id string) (Client, error) { + r.mu.RLock() + defer r.mu.RUnlock() + + client, ok := r.clients[id] + if !ok { + return nil, errors.New("client not found: " + id) + } + return client, nil +} + +func (r *ModelRegistry) Count() int { + r.mu.RLock() + defer r.mu.RUnlock() + return len(r.models) +} + +var DefaultModels = []ModelSpec{ + { + ID: "gpt-4o", + Provider: "openai", + Model: "gpt-4o", + Capabilities: []ModelCapability{CapSearch, CapFast, CapVision, CapCoding, CapCreative}, + CostPer1K: 0.005, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 1, + Description: "GPT-4o: fast multimodal model with search", + }, + { + ID: "gpt-4o-mini", + Provider: "openai", + Model: "gpt-4o-mini", + Capabilities: []ModelCapability{CapFast, CapCoding}, + CostPer1K: 0.00015, + MaxContext: 128000, + MaxTokens: 16384, + Priority: 2, + Description: "GPT-4o Mini: cost-effective for simple tasks", + }, + { + ID: "claude-3-opus", + Provider: "anthropic", + Model: "claude-3-opus-20240229", + Capabilities: []ModelCapability{CapReasoning, CapCoding, CapCreative, CapLongContext}, + CostPer1K: 0.015, + MaxContext: 200000, + MaxTokens: 4096, + Priority: 1, + Description: "Claude 3 Opus: best for complex reasoning and coding", + }, + { + ID: "claude-3-sonnet", + Provider: "anthropic", + Model: "claude-3-5-sonnet-20241022", + Capabilities: []ModelCapability{CapCoding, CapCreative, CapFast}, + CostPer1K: 0.003, + MaxContext: 200000, + MaxTokens: 8192, + Priority: 1, + Description: "Claude 3.5 Sonnet: balanced speed and quality", + }, + { + ID: "gemini-1.5-pro", + Provider: "gemini", + Model: "gemini-1.5-pro", + Capabilities: []ModelCapability{CapLongContext, CapSearch, CapVision, CapMath}, + CostPer1K: 0.00125, + MaxContext: 2000000, + MaxTokens: 8192, + Priority: 1, + Description: "Gemini 1.5 Pro: best for long context and research", + }, + { + ID: "gemini-1.5-flash", + Provider: "gemini", + Model: "gemini-1.5-flash", + Capabilities: []ModelCapability{CapFast, CapVision}, + CostPer1K: 0.000075, + MaxContext: 1000000, + MaxTokens: 8192, + Priority: 2, + Description: "Gemini 1.5 Flash: fastest for lightweight tasks", + }, +} diff --git a/backend/internal/llm/timeweb.go b/backend/internal/llm/timeweb.go new file mode 100644 index 0000000..9934b7d --- /dev/null +++ b/backend/internal/llm/timeweb.go @@ -0,0 +1,402 @@ +package llm + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "strings" + "time" +) + +type TimewebClient struct { + baseClient + httpClient *http.Client + baseURL string + agentAccessID string + apiKey string + proxySource string +} + +type TimewebConfig struct { + ProviderID string + ModelKey string + BaseURL string + AgentAccessID string + APIKey string + ProxySource string +} + +func NewTimewebClient(cfg TimewebConfig) (*TimewebClient, error) { + if cfg.AgentAccessID == "" { + return nil, errors.New("agent_access_id is required for Timeweb") + } + if cfg.APIKey == "" { + return nil, errors.New("api_key is required for Timeweb") + } + + baseURL := cfg.BaseURL + if baseURL == "" { + baseURL = "https://api.timeweb.cloud" + } + + proxySource := cfg.ProxySource + if proxySource == "" { + proxySource = "gooseek" + } + + return &TimewebClient{ + baseClient: baseClient{ + providerID: cfg.ProviderID, + modelKey: cfg.ModelKey, + }, + httpClient: &http.Client{ + Timeout: 120 * time.Second, + }, + baseURL: baseURL, + agentAccessID: cfg.AgentAccessID, + apiKey: cfg.APIKey, + proxySource: proxySource, + }, nil +} + +type timewebChatRequest struct { + Model string `json:"model,omitempty"` + Messages []timewebMessage `json:"messages"` + Stream bool `json:"stream,omitempty"` + Temperature float64 `json:"temperature,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + TopP float64 `json:"top_p,omitempty"` + Tools []timewebTool `json:"tools,omitempty"` + Stop []string `json:"stop,omitempty"` +} + +type timewebMessage struct { + Role string `json:"role"` + Content interface{} `json:"content"` + Name string `json:"name,omitempty"` + ToolCalls []timewebToolCall `json:"tool_calls,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` +} + +type timewebTool struct { + Type string `json:"type"` + Function timewebFunction `json:"function"` +} + +type timewebFunction struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters interface{} `json:"parameters"` +} + +type timewebToolCall struct { + ID string `json:"id"` + Type string `json:"type"` + Function struct { + Name string `json:"name"` + Arguments string `json:"arguments"` + } `json:"function"` +} + +type timewebChatResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []struct { + Index int `json:"index"` + Message struct { + Role string `json:"role"` + Content string `json:"content"` + ToolCalls []timewebToolCall `json:"tool_calls,omitempty"` + } `json:"message"` + FinishReason string `json:"finish_reason"` + } `json:"choices"` + Usage struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` + } `json:"usage"` +} + +type timewebStreamResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []struct { + Index int `json:"index"` + Delta struct { + Role string `json:"role,omitempty"` + Content string `json:"content,omitempty"` + ToolCalls []timewebToolCall `json:"tool_calls,omitempty"` + } `json:"delta"` + FinishReason string `json:"finish_reason,omitempty"` + } `json:"choices"` +} + +func (c *TimewebClient) StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) { + messages := make([]timewebMessage, 0, len(req.Messages)) + for _, m := range req.Messages { + msg := timewebMessage{ + Role: string(m.Role), + Content: m.Content, + } + if m.Name != "" { + msg.Name = m.Name + } + if m.ToolCallID != "" { + msg.ToolCallID = m.ToolCallID + } + if len(m.ToolCalls) > 0 { + msg.ToolCalls = make([]timewebToolCall, len(m.ToolCalls)) + for i, tc := range m.ToolCalls { + args, _ := json.Marshal(tc.Arguments) + msg.ToolCalls[i] = timewebToolCall{ + ID: tc.ID, + Type: "function", + } + msg.ToolCalls[i].Function.Name = tc.Name + msg.ToolCalls[i].Function.Arguments = string(args) + } + } + messages = append(messages, msg) + } + + chatReq := timewebChatRequest{ + Model: c.modelKey, + Messages: messages, + Stream: true, + } + + if req.Options.MaxTokens > 0 { + chatReq.MaxTokens = req.Options.MaxTokens + } + if req.Options.Temperature > 0 { + chatReq.Temperature = req.Options.Temperature + } + if req.Options.TopP > 0 { + chatReq.TopP = req.Options.TopP + } + if len(req.Options.StopWords) > 0 { + chatReq.Stop = req.Options.StopWords + } + + if len(req.Tools) > 0 { + chatReq.Tools = make([]timewebTool, len(req.Tools)) + for i, t := range req.Tools { + chatReq.Tools[i] = timewebTool{ + Type: "function", + Function: timewebFunction{ + Name: t.Name, + Description: t.Description, + Parameters: t.Schema, + }, + } + } + } + + body, err := json.Marshal(chatReq) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %w", err) + } + + url := fmt.Sprintf("%s/api/v1/cloud-ai/agents/%s/v1/chat/completions", c.baseURL, c.agentAccessID) + httpReq, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + httpReq.Header.Set("Content-Type", "application/json") + httpReq.Header.Set("Authorization", "Bearer "+c.apiKey) + httpReq.Header.Set("x-proxy-source", c.proxySource) + + resp, err := c.httpClient.Do(httpReq) + if err != nil { + return nil, fmt.Errorf("request failed: %w", err) + } + + if resp.StatusCode != http.StatusOK { + defer resp.Body.Close() + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("Timeweb API error: status %d, body: %s", resp.StatusCode, string(body)) + } + + ch := make(chan StreamChunk, 100) + go func() { + defer close(ch) + defer resp.Body.Close() + + toolCalls := make(map[int]*ToolCall) + reader := bufio.NewReader(resp.Body) + + for { + line, err := reader.ReadString('\n') + if err != nil { + if err != io.EOF { + return + } + if len(toolCalls) > 0 { + calls := make([]ToolCall, 0, len(toolCalls)) + for _, tc := range toolCalls { + calls = append(calls, *tc) + } + ch <- StreamChunk{ToolCallChunk: calls} + } + return + } + + line = strings.TrimSpace(line) + if line == "" { + continue + } + + if !strings.HasPrefix(line, "data: ") { + continue + } + + data := strings.TrimPrefix(line, "data: ") + if data == "[DONE]" { + if len(toolCalls) > 0 { + calls := make([]ToolCall, 0, len(toolCalls)) + for _, tc := range toolCalls { + calls = append(calls, *tc) + } + ch <- StreamChunk{ToolCallChunk: calls} + } + return + } + + var streamResp timewebStreamResponse + if err := json.Unmarshal([]byte(data), &streamResp); err != nil { + continue + } + + if len(streamResp.Choices) == 0 { + continue + } + + delta := streamResp.Choices[0].Delta + + if delta.Content != "" { + ch <- StreamChunk{ContentChunk: delta.Content} + } + + for _, tc := range delta.ToolCalls { + idx := 0 + if _, ok := toolCalls[idx]; !ok { + toolCalls[idx] = &ToolCall{ + ID: tc.ID, + Name: tc.Function.Name, + Arguments: make(map[string]interface{}), + } + } + + if tc.Function.Arguments != "" { + existing := toolCalls[idx] + var args map[string]interface{} + if err := json.Unmarshal([]byte(tc.Function.Arguments), &args); err == nil { + for k, v := range args { + existing.Arguments[k] = v + } + } + } + } + + if streamResp.Choices[0].FinishReason != "" { + ch <- StreamChunk{FinishReason: streamResp.Choices[0].FinishReason} + } + } + }() + + return ch, nil +} + +func (c *TimewebClient) GenerateText(ctx context.Context, req StreamRequest) (string, error) { + messages := make([]timewebMessage, 0, len(req.Messages)) + for _, m := range req.Messages { + msg := timewebMessage{ + Role: string(m.Role), + Content: m.Content, + } + if m.Name != "" { + msg.Name = m.Name + } + if m.ToolCallID != "" { + msg.ToolCallID = m.ToolCallID + } + messages = append(messages, msg) + } + + chatReq := timewebChatRequest{ + Model: c.modelKey, + Messages: messages, + Stream: false, + } + + if req.Options.MaxTokens > 0 { + chatReq.MaxTokens = req.Options.MaxTokens + } + if req.Options.Temperature > 0 { + chatReq.Temperature = req.Options.Temperature + } + if req.Options.TopP > 0 { + chatReq.TopP = req.Options.TopP + } + + if len(req.Tools) > 0 { + chatReq.Tools = make([]timewebTool, len(req.Tools)) + for i, t := range req.Tools { + chatReq.Tools[i] = timewebTool{ + Type: "function", + Function: timewebFunction{ + Name: t.Name, + Description: t.Description, + Parameters: t.Schema, + }, + } + } + } + + body, err := json.Marshal(chatReq) + if err != nil { + return "", fmt.Errorf("failed to marshal request: %w", err) + } + + url := fmt.Sprintf("%s/api/v1/cloud-ai/agents/%s/v1/chat/completions", c.baseURL, c.agentAccessID) + httpReq, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body)) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + httpReq.Header.Set("Content-Type", "application/json") + httpReq.Header.Set("Authorization", "Bearer "+c.apiKey) + httpReq.Header.Set("x-proxy-source", c.proxySource) + + resp, err := c.httpClient.Do(httpReq) + if err != nil { + return "", fmt.Errorf("request failed: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("Timeweb API error: status %d, body: %s", resp.StatusCode, string(body)) + } + + var chatResp timewebChatResponse + if err := json.NewDecoder(resp.Body).Decode(&chatResp); err != nil { + return "", fmt.Errorf("failed to decode response: %w", err) + } + + if len(chatResp.Choices) == 0 { + return "", errors.New("no choices in response") + } + + return chatResp.Choices[0].Message.Content, nil +} diff --git a/backend/internal/pages/generator.go b/backend/internal/pages/generator.go new file mode 100644 index 0000000..c6ac5f8 --- /dev/null +++ b/backend/internal/pages/generator.go @@ -0,0 +1,318 @@ +package pages + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" +) + +type Page struct { + ID string `json:"id"` + UserID string `json:"userId"` + ThreadID string `json:"threadId,omitempty"` + Title string `json:"title"` + Subtitle string `json:"subtitle,omitempty"` + Sections []PageSection `json:"sections"` + Sources []PageSource `json:"sources"` + Thumbnail string `json:"thumbnail,omitempty"` + IsPublic bool `json:"isPublic"` + ShareID string `json:"shareId,omitempty"` + ViewCount int `json:"viewCount"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type PageSection struct { + ID string `json:"id"` + Type string `json:"type"` + Title string `json:"title,omitempty"` + Content string `json:"content"` + ImageURL string `json:"imageUrl,omitempty"` + Order int `json:"order"` +} + +type PageSource struct { + Index int `json:"index"` + URL string `json:"url"` + Title string `json:"title"` + Domain string `json:"domain"` + Favicon string `json:"favicon,omitempty"` +} + +type PageGeneratorConfig struct { + LLMClient llm.Client + Locale string + Style string + Audience string +} + +type PageGenerator struct { + cfg PageGeneratorConfig +} + +func NewPageGenerator(cfg PageGeneratorConfig) *PageGenerator { + return &PageGenerator{cfg: cfg} +} + +func (g *PageGenerator) GenerateFromThread(ctx context.Context, query string, answer string, sources []types.Chunk) (*Page, error) { + structurePrompt := g.buildStructurePrompt(query, answer, sources) + + structure, err := g.cfg.LLMClient.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{ + {Role: "user", Content: structurePrompt}, + }, + }) + if err != nil { + return nil, fmt.Errorf("failed to generate structure: %w", err) + } + + page := g.parseStructure(structure) + page.ID = uuid.New().String() + page.CreatedAt = time.Now() + page.UpdatedAt = time.Now() + + for i, src := range sources { + if i >= 20 { + break + } + url := src.Metadata["url"] + title := src.Metadata["title"] + page.Sources = append(page.Sources, PageSource{ + Index: i + 1, + URL: url, + Title: title, + Domain: extractDomain(url), + }) + } + + return page, nil +} + +func (g *PageGenerator) buildStructurePrompt(query, answer string, sources []types.Chunk) string { + var sourcesText strings.Builder + for i, s := range sources { + if i >= 15 { + break + } + sourcesText.WriteString(fmt.Sprintf("[%d] %s\n%s\n\n", i+1, s.Metadata["title"], truncate(s.Content, 300))) + } + + langInstr := "" + if g.cfg.Locale == "ru" { + langInstr = "Write in Russian." + } + + style := g.cfg.Style + if style == "" { + style = "informative" + } + + audience := g.cfg.Audience + if audience == "" { + audience = "general" + } + + return fmt.Sprintf(`Create a well-structured article from this research. + +Topic: %s + +Research findings: +%s + +Sources: +%s + +%s + +Style: %s +Target audience: %s + +Generate the article in this exact format: + +TITLE: [compelling title] +SUBTITLE: [brief subtitle] + +SECTION: Introduction +[2-3 paragraphs introducing the topic] + +SECTION: [Topic Name 1] +[detailed content with citations [1], [2], etc.] + +SECTION: [Topic Name 2] +[detailed content with citations] + +SECTION: [Topic Name 3] +[detailed content with citations] + +SECTION: Conclusion +[summary and key takeaways] + +SECTION: Key Points +- [bullet point 1] +- [bullet point 2] +- [bullet point 3] + +Requirements: +- Use citations [1], [2], etc. throughout +- Make it comprehensive but readable +- Include specific facts and data +- Keep sections focused and well-organized`, query, truncate(answer, 2000), sourcesText.String(), langInstr, style, audience) +} + +func (g *PageGenerator) parseStructure(text string) *Page { + page := &Page{ + Sections: make([]PageSection, 0), + } + + lines := strings.Split(text, "\n") + var currentSection *PageSection + var contentBuilder strings.Builder + order := 0 + + for _, line := range lines { + line = strings.TrimSpace(line) + + if strings.HasPrefix(line, "TITLE:") { + page.Title = strings.TrimSpace(strings.TrimPrefix(line, "TITLE:")) + continue + } + + if strings.HasPrefix(line, "SUBTITLE:") { + page.Subtitle = strings.TrimSpace(strings.TrimPrefix(line, "SUBTITLE:")) + continue + } + + if strings.HasPrefix(line, "SECTION:") { + if currentSection != nil { + currentSection.Content = strings.TrimSpace(contentBuilder.String()) + page.Sections = append(page.Sections, *currentSection) + contentBuilder.Reset() + } + + order++ + currentSection = &PageSection{ + ID: uuid.New().String(), + Type: "text", + Title: strings.TrimSpace(strings.TrimPrefix(line, "SECTION:")), + Order: order, + } + continue + } + + if currentSection != nil { + contentBuilder.WriteString(line) + contentBuilder.WriteString("\n") + } + } + + if currentSection != nil { + currentSection.Content = strings.TrimSpace(contentBuilder.String()) + page.Sections = append(page.Sections, *currentSection) + } + + return page +} + +func (g *PageGenerator) ExportToMarkdown(page *Page) string { + var md strings.Builder + + md.WriteString("# " + page.Title + "\n\n") + if page.Subtitle != "" { + md.WriteString("*" + page.Subtitle + "*\n\n") + } + + for _, section := range page.Sections { + md.WriteString("## " + section.Title + "\n\n") + md.WriteString(section.Content + "\n\n") + } + + md.WriteString("---\n\n## Sources\n\n") + for _, src := range page.Sources { + md.WriteString(fmt.Sprintf("%d. [%s](%s)\n", src.Index, src.Title, src.URL)) + } + + return md.String() +} + +func (g *PageGenerator) ExportToHTML(page *Page) string { + var html strings.Builder + + html.WriteString("\n\n\n") + html.WriteString(fmt.Sprintf("%s\n", page.Title)) + html.WriteString("\n\n\n") + + html.WriteString(fmt.Sprintf("

%s

\n", page.Title)) + if page.Subtitle != "" { + html.WriteString(fmt.Sprintf("

%s

\n", page.Subtitle)) + } + + for _, section := range page.Sections { + html.WriteString(fmt.Sprintf("

%s

\n", section.Title)) + paragraphs := strings.Split(section.Content, "\n\n") + for _, p := range paragraphs { + p = strings.TrimSpace(p) + if p != "" { + if strings.HasPrefix(p, "- ") { + html.WriteString("
    \n") + for _, item := range strings.Split(p, "\n") { + item = strings.TrimPrefix(item, "- ") + html.WriteString(fmt.Sprintf("
  • %s
  • \n", item)) + } + html.WriteString("
\n") + } else { + html.WriteString(fmt.Sprintf("

%s

\n", p)) + } + } + } + } + + html.WriteString("
\n

Sources

\n
    \n") + for _, src := range page.Sources { + html.WriteString(fmt.Sprintf("
  1. %s (%s)
  2. \n", src.URL, src.Title, src.Domain)) + } + html.WriteString("
\n
\n") + + html.WriteString("\n") + + return html.String() +} + +func (g *PageGenerator) ToJSON(page *Page) (string, error) { + data, err := json.MarshalIndent(page, "", " ") + if err != nil { + return "", err + } + return string(data), nil +} + +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] + "..." +} + +func extractDomain(url string) string { + url = strings.TrimPrefix(url, "https://") + url = strings.TrimPrefix(url, "http://") + url = strings.TrimPrefix(url, "www.") + if idx := strings.Index(url, "/"); idx > 0 { + return url[:idx] + } + return url +} diff --git a/backend/internal/podcast/generator.go b/backend/internal/podcast/generator.go new file mode 100644 index 0000000..7fa250e --- /dev/null +++ b/backend/internal/podcast/generator.go @@ -0,0 +1,507 @@ +package podcast + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type PodcastType string + +const ( + PodcastDaily PodcastType = "daily" + PodcastWeekly PodcastType = "weekly" + PodcastTopicDeep PodcastType = "topic_deep" + PodcastBreaking PodcastType = "breaking" +) + +type VoiceStyle string + +const ( + VoiceNeutral VoiceStyle = "neutral" + VoiceEnthusiastic VoiceStyle = "enthusiastic" + VoiceProfessional VoiceStyle = "professional" + VoiceCasual VoiceStyle = "casual" + VoiceStorytelling VoiceStyle = "storytelling" +) + +type Podcast struct { + ID string `json:"id"` + Title string `json:"title"` + Description string `json:"description"` + Type PodcastType `json:"type"` + Date time.Time `json:"date"` + Duration int `json:"durationSeconds"` + AudioURL string `json:"audioUrl,omitempty"` + Transcript string `json:"transcript"` + Segments []PodcastSegment `json:"segments"` + Topics []string `json:"topics"` + Sources []Source `json:"sources"` + Thumbnail string `json:"thumbnail,omitempty"` + Status PodcastStatus `json:"status"` + GeneratedAt time.Time `json:"generatedAt"` + PublishedAt *time.Time `json:"publishedAt,omitempty"` + Locale string `json:"locale"` + VoiceConfig VoiceConfig `json:"voiceConfig"` +} + +type PodcastStatus string + +const ( + StatusDraft PodcastStatus = "draft" + StatusGenerating PodcastStatus = "generating" + StatusReady PodcastStatus = "ready" + StatusPublished PodcastStatus = "published" + StatusFailed PodcastStatus = "failed" +) + +type PodcastSegment struct { + ID string `json:"id"` + Type string `json:"type"` + Title string `json:"title"` + Content string `json:"content"` + Duration int `json:"durationSeconds"` + StartTime int `json:"startTime"` + EndTime int `json:"endTime"` + Sources []Source `json:"sources,omitempty"` + Highlights []string `json:"highlights,omitempty"` +} + +type Source struct { + Title string `json:"title"` + URL string `json:"url"` + Publisher string `json:"publisher"` + Date string `json:"date,omitempty"` +} + +type VoiceConfig struct { + Provider string `json:"provider"` + VoiceID string `json:"voiceId"` + Style VoiceStyle `json:"style"` + Speed float64 `json:"speed"` + Pitch float64 `json:"pitch"` + Language string `json:"language"` +} + +type PodcastGenerator struct { + llm llm.Client + ttsClient TTSClient + httpClient *http.Client + config GeneratorConfig +} + +type GeneratorConfig struct { + DefaultDuration int + MaxDuration int + DefaultVoice VoiceConfig + OutputDir string +} + +type TTSClient interface { + GenerateSpeech(ctx context.Context, text string, config VoiceConfig) ([]byte, error) +} + +func NewPodcastGenerator(llmClient llm.Client, ttsClient TTSClient, cfg GeneratorConfig) *PodcastGenerator { + if cfg.DefaultDuration == 0 { + cfg.DefaultDuration = 300 + } + if cfg.MaxDuration == 0 { + cfg.MaxDuration = 1800 + } + if cfg.DefaultVoice.Provider == "" { + cfg.DefaultVoice = VoiceConfig{ + Provider: "elevenlabs", + VoiceID: "21m00Tcm4TlvDq8ikWAM", + Style: VoiceNeutral, + Speed: 1.0, + Pitch: 1.0, + Language: "ru", + } + } + + return &PodcastGenerator{ + llm: llmClient, + ttsClient: ttsClient, + httpClient: &http.Client{Timeout: 60 * time.Second}, + config: cfg, + } +} + +type GenerateOptions struct { + Type PodcastType + Topics []string + NewsItems []NewsItem + Date time.Time + Duration int + Locale string + VoiceConfig *VoiceConfig + IncludeIntro bool + IncludeOutro bool + PersonalizeFor string +} + +type NewsItem struct { + Title string `json:"title"` + Summary string `json:"summary"` + URL string `json:"url"` + Source string `json:"source"` + PublishedAt string `json:"publishedAt"` + Topics []string `json:"topics"` + Importance int `json:"importance"` +} + +func (g *PodcastGenerator) GenerateDailyPodcast(ctx context.Context, opts GenerateOptions) (*Podcast, error) { + if opts.Date.IsZero() { + opts.Date = time.Now() + } + if opts.Duration == 0 { + opts.Duration = g.config.DefaultDuration + } + if opts.Locale == "" { + opts.Locale = "ru" + } + + script, err := g.generateScript(ctx, opts) + if err != nil { + return nil, fmt.Errorf("failed to generate script: %w", err) + } + + podcast := &Podcast{ + ID: uuid.New().String(), + Title: script.Title, + Description: script.Description, + Type: opts.Type, + Date: opts.Date, + Duration: opts.Duration, + Transcript: script.FullText, + Segments: script.Segments, + Topics: opts.Topics, + Sources: script.Sources, + Status: StatusDraft, + GeneratedAt: time.Now(), + Locale: opts.Locale, + VoiceConfig: g.config.DefaultVoice, + } + + if opts.VoiceConfig != nil { + podcast.VoiceConfig = *opts.VoiceConfig + } + + return podcast, nil +} + +type PodcastScript struct { + Title string + Description string + FullText string + Segments []PodcastSegment + Sources []Source +} + +func (g *PodcastGenerator) generateScript(ctx context.Context, opts GenerateOptions) (*PodcastScript, error) { + locale := opts.Locale + langInstruction := "" + if locale == "ru" { + langInstruction = "Generate the entire script in Russian language. Use natural Russian speech patterns." + } + + newsJSON, _ := json.Marshal(opts.NewsItems) + + prompt := fmt.Sprintf(`Create a podcast script for a daily news digest. + +Date: %s +Duration target: %d seconds (approximately %d minutes) +Topics: %v +%s + +News items to cover: +%s + +Create an engaging podcast script with these requirements: +1. Start with a catchy introduction greeting the audience +2. Cover the most important news first +3. Transition smoothly between stories +4. Add brief analysis or context where appropriate +5. End with a summary and sign-off + +The script should sound natural when read aloud - use conversational language, not formal news anchor style. + +Respond in JSON format: +{ + "title": "Podcast title for this episode", + "description": "Brief episode description", + "segments": [ + { + "type": "intro|news|analysis|transition|outro", + "title": "Segment title", + "content": "Full text to be spoken", + "highlights": ["Key point 1", "Key point 2"], + "sources": [{"title": "Source title", "url": "url", "publisher": "publisher"}] + } + ] +}`, opts.Date.Format("2006-01-02"), opts.Duration, opts.Duration/60, opts.Topics, langInstruction, string(newsJSON)) + + result, err := g.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSON(result) + + var parsed struct { + Title string `json:"title"` + Description string `json:"description"` + Segments []struct { + Type string `json:"type"` + Title string `json:"title"` + Content string `json:"content"` + Highlights []string `json:"highlights"` + Sources []struct { + Title string `json:"title"` + URL string `json:"url"` + Publisher string `json:"publisher"` + } `json:"sources"` + } `json:"segments"` + } + + if err := json.Unmarshal([]byte(jsonStr), &parsed); err != nil { + return g.generateDefaultScript(opts) + } + + script := &PodcastScript{ + Title: parsed.Title, + Description: parsed.Description, + Segments: make([]PodcastSegment, 0), + Sources: make([]Source, 0), + } + + var fullTextBuilder strings.Builder + currentTime := 0 + avgWordsPerSecond := 2.5 + + for i, seg := range parsed.Segments { + wordCount := len(strings.Fields(seg.Content)) + segDuration := int(float64(wordCount) / avgWordsPerSecond) + if segDuration < 10 { + segDuration = 10 + } + + segment := PodcastSegment{ + ID: uuid.New().String(), + Type: seg.Type, + Title: seg.Title, + Content: seg.Content, + Duration: segDuration, + StartTime: currentTime, + EndTime: currentTime + segDuration, + Highlights: seg.Highlights, + } + + for _, src := range seg.Sources { + source := Source{ + Title: src.Title, + URL: src.URL, + Publisher: src.Publisher, + } + segment.Sources = append(segment.Sources, source) + script.Sources = append(script.Sources, source) + } + + script.Segments = append(script.Segments, segment) + + fullTextBuilder.WriteString(seg.Content) + if i < len(parsed.Segments)-1 { + fullTextBuilder.WriteString("\n\n") + } + + currentTime += segDuration + } + + script.FullText = fullTextBuilder.String() + + return script, nil +} + +func (g *PodcastGenerator) generateDefaultScript(opts GenerateOptions) (*PodcastScript, error) { + date := opts.Date.Format("2 January 2006") + + intro := fmt.Sprintf("Добрый день! С вами GooSeek Daily — ваш ежедневный подкаст с главными новостями. Сегодня %s, и вот что происходит в мире.", date) + + var newsContent strings.Builder + for i, news := range opts.NewsItems { + if i > 0 { + newsContent.WriteString("\n\n") + } + newsContent.WriteString(fmt.Sprintf("%s. %s", news.Title, news.Summary)) + } + + outro := "На этом всё на сегодня. Спасибо, что слушаете GooSeek Daily! Подписывайтесь на наш подкаст и до встречи завтра." + + return &PodcastScript{ + Title: fmt.Sprintf("GooSeek Daily — %s", date), + Description: "Ежедневный подкаст с главными новостями", + FullText: fmt.Sprintf("%s\n\n%s\n\n%s", intro, newsContent.String(), outro), + Segments: []PodcastSegment{ + {ID: uuid.New().String(), Type: "intro", Title: "Вступление", Content: intro, Duration: 15}, + {ID: uuid.New().String(), Type: "news", Title: "Новости", Content: newsContent.String(), Duration: opts.Duration - 30}, + {ID: uuid.New().String(), Type: "outro", Title: "Завершение", Content: outro, Duration: 15}, + }, + }, nil +} + +func (g *PodcastGenerator) GenerateAudio(ctx context.Context, podcast *Podcast) ([]byte, error) { + if g.ttsClient == nil { + return nil, fmt.Errorf("TTS client not configured") + } + + podcast.Status = StatusGenerating + + audioData, err := g.ttsClient.GenerateSpeech(ctx, podcast.Transcript, podcast.VoiceConfig) + if err != nil { + podcast.Status = StatusFailed + return nil, fmt.Errorf("failed to generate audio: %w", err) + } + + podcast.Status = StatusReady + + return audioData, nil +} + +func (g *PodcastGenerator) GenerateWeeklySummary(ctx context.Context, weeklyNews []NewsItem, locale string) (*Podcast, error) { + return g.GenerateDailyPodcast(ctx, GenerateOptions{ + Type: PodcastWeekly, + NewsItems: weeklyNews, + Duration: 900, + Locale: locale, + IncludeIntro: true, + IncludeOutro: true, + }) +} + +func (g *PodcastGenerator) GenerateTopicDeepDive(ctx context.Context, topic string, articles []NewsItem, locale string) (*Podcast, error) { + return g.GenerateDailyPodcast(ctx, GenerateOptions{ + Type: PodcastTopicDeep, + Topics: []string{topic}, + NewsItems: articles, + Duration: 600, + Locale: locale, + IncludeIntro: true, + IncludeOutro: true, + }) +} + +func extractJSON(text string) string { + start := strings.Index(text, "{") + if start == -1 { + return "{}" + } + + depth := 0 + for i := start; i < len(text); i++ { + if text[i] == '{' { + depth++ + } else if text[i] == '}' { + depth-- + if depth == 0 { + return text[start : i+1] + } + } + } + + return "{}" +} + +func (p *Podcast) ToJSON() ([]byte, error) { + return json.Marshal(p) +} + +func ParsePodcast(data []byte) (*Podcast, error) { + var podcast Podcast + if err := json.Unmarshal(data, &podcast); err != nil { + return nil, err + } + return &podcast, nil +} + +type ElevenLabsTTS struct { + apiKey string + httpClient *http.Client + baseURL string +} + +func NewElevenLabsTTS(apiKey string) *ElevenLabsTTS { + return &ElevenLabsTTS{ + apiKey: apiKey, + httpClient: &http.Client{Timeout: 120 * time.Second}, + baseURL: "https://api.elevenlabs.io/v1", + } +} + +func (t *ElevenLabsTTS) GenerateSpeech(ctx context.Context, text string, config VoiceConfig) ([]byte, error) { + voiceID := config.VoiceID + if voiceID == "" { + voiceID = "21m00Tcm4TlvDq8ikWAM" + } + + url := fmt.Sprintf("%s/text-to-speech/%s", t.baseURL, voiceID) + + body := map[string]interface{}{ + "text": text, + "model_id": "eleven_multilingual_v2", + "voice_settings": map[string]interface{}{ + "stability": 0.5, + "similarity_boost": 0.75, + "style": 0.5, + "use_speaker_boost": true, + }, + } + + bodyJSON, _ := json.Marshal(body) + + req, err := http.NewRequestWithContext(ctx, "POST", url, strings.NewReader(string(bodyJSON))) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("xi-api-key", t.apiKey) + req.Header.Set("Accept", "audio/mpeg") + + resp, err := t.httpClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("ElevenLabs API error: %d", resp.StatusCode) + } + + var audioData []byte + buf := make([]byte, 32*1024) + for { + n, err := resp.Body.Read(buf) + if n > 0 { + audioData = append(audioData, buf[:n]...) + } + if err != nil { + break + } + } + + return audioData, nil +} + +type DummyTTS struct{} + +func (t *DummyTTS) GenerateSpeech(ctx context.Context, text string, config VoiceConfig) ([]byte, error) { + return []byte{}, nil +} diff --git a/backend/internal/prompts/classifier.go b/backend/internal/prompts/classifier.go new file mode 100644 index 0000000..5bf83e1 --- /dev/null +++ b/backend/internal/prompts/classifier.go @@ -0,0 +1,50 @@ +package prompts + +import "strings" + +func GetClassifierPrompt(locale, detectedLang string) string { + langInstruction := "Respond in the same language as the user's query." + if detectedLang == "ru" { + langInstruction = "The user is writing in Russian. Process accordingly." + } + + return strings.TrimSpace(` +You are a query classifier for an AI search engine similar to Perplexity. + +Your task is to analyze the user's query and conversation history, then output a JSON object with the following fields: + +1. "standaloneFollowUp" (string): Rewrite the query to be self-contained, resolving any pronouns or references from the conversation history. If the query is already standalone, return it as-is. + +2. "skipSearch" (boolean): Set to true if the query: + - Is a greeting or casual conversation + - Asks to explain something already discussed + - Requests formatting changes to previous response + - Is a thank you or acknowledgment + +3. "topics" (array of strings): Key topics or entities mentioned in the query. + +4. "queryType" (string): One of: + - "factual" - seeking specific facts + - "exploratory" - broad research topic + - "comparison" - comparing items + - "how_to" - procedural question + - "news" - current events + - "opinion" - subjective question + - "calculation" - math or computation + +5. "engines" (array of strings): Suggested search engines based on query type. + +` + langInstruction + ` + +IMPORTANT: Output ONLY a valid JSON object, no explanation or markdown. + +Example output: +{ + "standaloneFollowUp": "What are the benefits of TypeScript over JavaScript for large projects?", + "skipSearch": false, + "topics": ["TypeScript", "JavaScript", "programming"], + "queryType": "comparison", + "engines": ["google", "duckduckgo"] +} +`) +} diff --git a/backend/internal/prompts/researcher.go b/backend/internal/prompts/researcher.go new file mode 100644 index 0000000..dedb05b --- /dev/null +++ b/backend/internal/prompts/researcher.go @@ -0,0 +1,127 @@ +package prompts + +import ( + "fmt" + "strings" +) + +type ResearcherConfig struct { + AvailableActions string + Mode string + Iteration int + MaxIterations int + Locale string + DetectedLanguage string + IsArticleSummary bool +} + +func GetResearcherPrompt(cfg ResearcherConfig) string { + var sb strings.Builder + + sb.WriteString("You are a research agent for GooSeek, an AI search engine.\n\n") + + sb.WriteString("## Your Role\n\n") + sb.WriteString("You gather information to answer user queries by:\n") + sb.WriteString("1. Searching the web for relevant information\n") + sb.WriteString("2. Scraping specific pages for detailed content\n") + sb.WriteString("3. Deciding when you have enough information\n\n") + + sb.WriteString("## Available Actions\n\n") + sb.WriteString(cfg.AvailableActions) + sb.WriteString("\n\n") + + sb.WriteString("## Progress\n\n") + sb.WriteString(fmt.Sprintf("Current iteration: %d / %d\n\n", cfg.Iteration+1, cfg.MaxIterations)) + + switch cfg.Mode { + case "speed": + sb.WriteString("## Speed Mode\n\n") + sb.WriteString("- Perform ONE search and call done\n") + sb.WriteString("- Do NOT scrape pages\n") + sb.WriteString("- Use snippets from search results\n\n") + case "balanced": + sb.WriteString("## Balanced Mode\n\n") + sb.WriteString("- Perform 1-3 searches\n") + sb.WriteString("- Scrape top 3-5 relevant pages\n") + sb.WriteString("- Balance depth vs. speed\n\n") + case "quality": + sb.WriteString("## Quality Mode\n\n") + sb.WriteString("- Perform multiple searches with different queries\n") + sb.WriteString("- Scrape 10-15 relevant pages\n") + sb.WriteString("- Verify information across sources\n") + sb.WriteString("- Be thorough and comprehensive\n\n") + } + + if cfg.IsArticleSummary { + sb.WriteString("## Article Summary Task (Perplexity Discover-style)\n\n") + sb.WriteString("The user requested an article summary (Summary: ). This is a multi-source digest request.\n\n") + sb.WriteString("**Your goals:**\n") + sb.WriteString("1. The main article is already pre-scraped and will be in context\n") + sb.WriteString("2. Search for 3-5 related sources that provide context\n") + sb.WriteString("3. Look for: related news, background, analysis, reactions\n") + sb.WriteString("4. Use news categories: `news`, `science` engines\n") + sb.WriteString("5. Max 5 additional sources (article itself is [1])\n\n") + sb.WriteString("**Search strategy:**\n") + sb.WriteString("- Extract key entities/topics from article title\n") + sb.WriteString("- Search for recent news on those topics\n") + sb.WriteString("- Find expert opinions or analysis\n") + sb.WriteString("- Look for official statements if relevant\n\n") + } + + if cfg.DetectedLanguage == "ru" { + sb.WriteString("## Language\n\n") + sb.WriteString("Пользователь пишет на русском. Формулируй поисковые запросы на русском языке.\n\n") + } + + sb.WriteString("## Instructions\n\n") + sb.WriteString("1. Analyze the user's query and conversation history\n") + sb.WriteString("2. Plan what information you need to gather\n") + sb.WriteString("3. Execute actions to gather that information\n") + sb.WriteString("4. Call 'done' when you have sufficient information\n\n") + + sb.WriteString("## Important Rules\n\n") + sb.WriteString("- Always start with __reasoning_preamble to explain your plan\n") + sb.WriteString("- Formulate specific, targeted search queries\n") + sb.WriteString("- Avoid redundant searches\n") + sb.WriteString("- Call 'done' when information is sufficient\n") + sb.WriteString("- Don't exceed the iteration limit\n\n") + + sb.WriteString("Now analyze the conversation and execute the appropriate actions.") + + return sb.String() +} + +func GetAvailableActionsDescription() string { + return strings.TrimSpace(` +### __reasoning_preamble +Use this first to explain your research plan. +Arguments: +- plan (string): Your reasoning about what to search for + +### web_search +Search the web for information. +Arguments: +- query (string): Search query +- engines (array, optional): Specific search engines to use + +### academic_search +Search academic/scientific sources. +Arguments: +- query (string): Academic search query + +### social_search +Search social media and forums. +Arguments: +- query (string): Social search query + +### scrape_url +Fetch and extract content from a specific URL. +Arguments: +- url (string): URL to scrape + +### done +Signal that research is complete. +Arguments: +- reason (string): Why research is sufficient +`) +} diff --git a/backend/internal/prompts/writer.go b/backend/internal/prompts/writer.go new file mode 100644 index 0000000..960bde1 --- /dev/null +++ b/backend/internal/prompts/writer.go @@ -0,0 +1,146 @@ +package prompts + +import ( + "fmt" + "strings" +) + +type WriterConfig struct { + Context string + SystemInstructions string + Mode string + Locale string + MemoryContext string + AnswerMode string + ResponsePrefs *ResponsePrefs + DetectedLanguage string + IsArticleSummary bool + LearningMode bool +} + +type ResponsePrefs struct { + Format string + Length string + Tone string +} + +func GetWriterPrompt(cfg WriterConfig) string { + var sb strings.Builder + + sb.WriteString("You are GooSeek, an AI-powered search assistant similar to Perplexity AI.\n\n") + + if cfg.DetectedLanguage == "ru" { + sb.WriteString("ВАЖНО: Пользователь пишет на русском языке. Отвечай ТОЛЬКО на русском языке.\n\n") + } + + sb.WriteString("## Core Instructions\n\n") + sb.WriteString("1. **Always cite sources** using [number] format, e.g., [1], [2]. Citations must reference the search results provided.\n") + sb.WriteString("2. **Be comprehensive** but concise. Provide thorough answers with key information.\n") + sb.WriteString("3. **Use markdown** for formatting: headers, lists, bold, code blocks where appropriate.\n") + sb.WriteString("4. **Be objective** and factual. Present information neutrally.\n") + sb.WriteString("5. **Acknowledge limitations** if search results are insufficient.\n\n") + + if cfg.IsArticleSummary { + sb.WriteString("## Article Summary Mode (Perplexity-style Digest)\n\n") + sb.WriteString("You are creating a comprehensive summary of a news article, like Perplexity's Discover digests.\n\n") + sb.WriteString("**Structure your response as:**\n") + sb.WriteString("1. **Headline summary** (1-2 sentences capturing the essence)\n") + sb.WriteString("2. **Key points** with citations [1], [2], etc.\n") + sb.WriteString("3. **Context and background** from related sources\n") + sb.WriteString("4. **Analysis/implications** if relevant\n") + sb.WriteString("5. **Related questions** the reader might have (as > quoted lines)\n\n") + sb.WriteString("**Rules:**\n") + sb.WriteString("- Always cite sources [1], [2], etc.\n") + sb.WriteString("- First source [1] is usually the main article\n") + sb.WriteString("- Add context from other sources [2], [3], etc.\n") + sb.WriteString("- End with 2-3 follow-up questions prefixed with >\n") + sb.WriteString("- Write in the user's language (Russian if they use Russian)\n\n") + } + + switch cfg.Mode { + case "speed": + sb.WriteString("## Speed Mode\n\n") + sb.WriteString("Provide a quick, focused answer. Be concise (2-3 paragraphs max).\n") + sb.WriteString("Prioritize the most relevant information.\n\n") + case "balanced": + sb.WriteString("## Balanced Mode\n\n") + sb.WriteString("Provide a well-rounded answer with moderate detail.\n") + sb.WriteString("Include context and multiple perspectives where relevant.\n\n") + case "quality": + sb.WriteString("## Quality Mode\n\n") + sb.WriteString("Provide a comprehensive, in-depth analysis.\n") + sb.WriteString("Include detailed explanations, examples, and nuances.\n") + sb.WriteString("Cover multiple aspects of the topic.\n\n") + } + + if cfg.AnswerMode != "" && cfg.AnswerMode != "standard" { + sb.WriteString(fmt.Sprintf("## Answer Mode: %s\n\n", cfg.AnswerMode)) + sb.WriteString(getAnswerModeInstructions(cfg.AnswerMode)) + } + + if cfg.ResponsePrefs != nil { + sb.WriteString("## Response Preferences\n\n") + if cfg.ResponsePrefs.Format != "" { + sb.WriteString(fmt.Sprintf("- Format: %s\n", cfg.ResponsePrefs.Format)) + } + if cfg.ResponsePrefs.Length != "" { + sb.WriteString(fmt.Sprintf("- Length: %s\n", cfg.ResponsePrefs.Length)) + } + if cfg.ResponsePrefs.Tone != "" { + sb.WriteString(fmt.Sprintf("- Tone: %s\n", cfg.ResponsePrefs.Tone)) + } + sb.WriteString("\n") + } + + if cfg.MemoryContext != "" { + sb.WriteString("## User Context (from memory)\n\n") + sb.WriteString(cfg.MemoryContext) + sb.WriteString("\n\n") + } + + if cfg.SystemInstructions != "" && cfg.SystemInstructions != "None" { + sb.WriteString("## Custom Instructions\n\n") + sb.WriteString(cfg.SystemInstructions) + sb.WriteString("\n\n") + } + + if cfg.LearningMode { + sb.WriteString("## Learning Mode\n\n") + sb.WriteString("The user is in learning mode. Explain concepts thoroughly.\n") + sb.WriteString("Use analogies, examples, and break down complex topics.\n") + sb.WriteString("Ask clarifying questions if the topic is ambiguous.\n\n") + } + + sb.WriteString("## Search Results\n\n") + sb.WriteString(cfg.Context) + sb.WriteString("\n\n") + + sb.WriteString("## Citation Rules\n\n") + sb.WriteString("- Use [1], [2], etc. to cite sources from the search results\n") + sb.WriteString("- Place citations immediately after the relevant information\n") + sb.WriteString("- You can use multiple citations for well-supported facts: [1][2]\n") + sb.WriteString("- Do not cite widgets or generated content\n") + sb.WriteString("- If no relevant source exists, don't make up citations\n\n") + + sb.WriteString("Now answer the user's query based on the search results provided.") + + return sb.String() +} + +func getAnswerModeInstructions(mode string) string { + instructions := map[string]string{ + "academic": "Focus on scholarly sources, research papers, and academic perspectives. Use formal language and cite peer-reviewed sources when available.\n\n", + "writing": "Help with writing tasks. Provide suggestions for structure, style, and content. Be creative and helpful.\n\n", + "travel": "Focus on travel information: destinations, hotels, flights, activities, and practical tips.\n\n", + "finance": "Provide financial information carefully. Include disclaimers about not being financial advice. Focus on factual data.\n\n", + "health": "Provide health information from reliable sources. Always recommend consulting healthcare professionals. Be cautious and accurate.\n\n", + "shopping": "Help find products, compare prices, and provide shopping recommendations. Include product features and user reviews.\n\n", + "news": "Focus on current events and recent news. Provide multiple perspectives and fact-check information.\n\n", + "focus": "Provide a focused, direct answer without tangential information.\n\n", + } + + if inst, ok := instructions[mode]; ok { + return inst + } + return "" +} diff --git a/backend/internal/search/media.go b/backend/internal/search/media.go new file mode 100644 index 0000000..549a2f0 --- /dev/null +++ b/backend/internal/search/media.go @@ -0,0 +1,215 @@ +package search + +import ( + "context" + "regexp" + "strconv" + "strings" + + "github.com/gooseek/backend/internal/types" +) + +type MediaSearchOptions struct { + MaxImages int + MaxVideos int +} + +type MediaSearchResult struct { + Images []types.ImageData `json:"images"` + Videos []types.VideoData `json:"videos"` +} + +func (c *SearXNGClient) SearchMedia(ctx context.Context, query string, opts *MediaSearchOptions) (*MediaSearchResult, error) { + if opts == nil { + opts = &MediaSearchOptions{MaxImages: 8, MaxVideos: 6} + } + + result := &MediaSearchResult{ + Images: make([]types.ImageData, 0), + Videos: make([]types.VideoData, 0), + } + + imageCh := make(chan []types.ImageData, 1) + videoCh := make(chan []types.VideoData, 1) + errCh := make(chan error, 2) + + go func() { + images, err := c.searchImages(ctx, query, opts.MaxImages) + if err != nil { + errCh <- err + imageCh <- nil + return + } + errCh <- nil + imageCh <- images + }() + + go func() { + videos, err := c.searchVideos(ctx, query, opts.MaxVideos) + if err != nil { + errCh <- err + videoCh <- nil + return + } + errCh <- nil + videoCh <- videos + }() + + <-errCh + <-errCh + result.Images = <-imageCh + result.Videos = <-videoCh + + if result.Images == nil { + result.Images = make([]types.ImageData, 0) + } + if result.Videos == nil { + result.Videos = make([]types.VideoData, 0) + } + + return result, nil +} + +func (c *SearXNGClient) searchImages(ctx context.Context, query string, max int) ([]types.ImageData, error) { + resp, err := c.Search(ctx, query, &SearchOptions{ + Categories: []string{"images"}, + PageNo: 1, + }) + if err != nil { + return nil, err + } + + images := make([]types.ImageData, 0, max) + seen := make(map[string]bool) + + for _, r := range resp.Results { + if len(images) >= max { + break + } + + imgURL := r.ImgSrc + if imgURL == "" { + imgURL = r.ThumbnailSrc + } + if imgURL == "" { + imgURL = r.Thumbnail + } + if imgURL == "" { + continue + } + + if seen[imgURL] { + continue + } + seen[imgURL] = true + + images = append(images, types.ImageData{ + URL: imgURL, + Title: r.Title, + Source: extractDomain(r.URL), + SourceURL: r.URL, + }) + } + + return images, nil +} + +func (c *SearXNGClient) searchVideos(ctx context.Context, query string, max int) ([]types.VideoData, error) { + resp, err := c.Search(ctx, query, &SearchOptions{ + Categories: []string{"videos"}, + PageNo: 1, + }) + if err != nil { + return nil, err + } + + videos := make([]types.VideoData, 0, max) + seen := make(map[string]bool) + + for _, r := range resp.Results { + if len(videos) >= max { + break + } + + if seen[r.URL] { + continue + } + seen[r.URL] = true + + platform := detectVideoPlatform(r.URL) + + video := types.VideoData{ + Title: r.Title, + URL: r.URL, + Thumbnail: r.Thumbnail, + Duration: toInt(r.Duration), + Views: toInt(r.Views), + Author: r.Author, + Platform: platform, + EmbedURL: r.IframeSrc, + } + + videos = append(videos, video) + } + + return videos, nil +} + +var ( + youtubePattern = regexp.MustCompile(`youtube\.com|youtu\.be`) + rutubePattern = regexp.MustCompile(`rutube\.ru`) + vkPattern = regexp.MustCompile(`vk\.com`) + dzenPattern = regexp.MustCompile(`dzen\.ru`) +) + +func detectVideoPlatform(url string) string { + urlLower := strings.ToLower(url) + + if youtubePattern.MatchString(urlLower) { + return "youtube" + } + if rutubePattern.MatchString(urlLower) { + return "rutube" + } + if vkPattern.MatchString(urlLower) { + return "vk" + } + if dzenPattern.MatchString(urlLower) { + return "dzen" + } + + return "other" +} + +func extractDomain(rawURL string) string { + rawURL = strings.TrimPrefix(rawURL, "https://") + rawURL = strings.TrimPrefix(rawURL, "http://") + rawURL = strings.TrimPrefix(rawURL, "www.") + + if idx := strings.Index(rawURL, "/"); idx > 0 { + rawURL = rawURL[:idx] + } + + return rawURL +} + +func toInt(v interface{}) int { + if v == nil { + return 0 + } + switch val := v.(type) { + case int: + return val + case int64: + return int(val) + case float64: + return int(val) + case string: + if i, err := strconv.Atoi(val); err == nil { + return i + } + return 0 + default: + return 0 + } +} diff --git a/backend/internal/search/reranker.go b/backend/internal/search/reranker.go new file mode 100644 index 0000000..babe028 --- /dev/null +++ b/backend/internal/search/reranker.go @@ -0,0 +1,163 @@ +package search + +import ( + "math" + "sort" + "strings" + "unicode" + + "github.com/gooseek/backend/internal/types" +) + +type RankedItem struct { + Chunk types.Chunk + Score float64 +} + +func RerankBM25(chunks []types.Chunk, query string, topK int) []types.Chunk { + if len(chunks) == 0 { + return chunks + } + + queryTerms := tokenize(query) + if len(queryTerms) == 0 { + return chunks + } + + df := make(map[string]int) + for _, chunk := range chunks { + seen := make(map[string]bool) + terms := tokenize(chunk.Content + " " + chunk.Metadata["title"]) + for _, term := range terms { + if !seen[term] { + df[term]++ + seen[term] = true + } + } + } + + avgDocLen := 0.0 + for _, chunk := range chunks { + avgDocLen += float64(len(tokenize(chunk.Content))) + } + avgDocLen /= float64(len(chunks)) + + k1 := 1.5 + b := 0.75 + n := float64(len(chunks)) + + ranked := make([]RankedItem, len(chunks)) + for i, chunk := range chunks { + docTerms := tokenize(chunk.Content + " " + chunk.Metadata["title"]) + docLen := float64(len(docTerms)) + + tf := make(map[string]int) + for _, term := range docTerms { + tf[term]++ + } + + score := 0.0 + for _, qterm := range queryTerms { + if termFreq, ok := tf[qterm]; ok { + docFreq := float64(df[qterm]) + idf := math.Log((n - docFreq + 0.5) / (docFreq + 0.5)) + if idf < 0 { + idf = 0 + } + + tfNorm := float64(termFreq) * (k1 + 1) / + (float64(termFreq) + k1*(1-b+b*docLen/avgDocLen)) + + score += idf * tfNorm + } + } + + if title, ok := chunk.Metadata["title"]; ok { + titleLower := strings.ToLower(title) + for _, qterm := range queryTerms { + if strings.Contains(titleLower, qterm) { + score += 2.0 + } + } + } + + ranked[i] = RankedItem{Chunk: chunk, Score: score} + } + + sort.Slice(ranked, func(i, j int) bool { + return ranked[i].Score > ranked[j].Score + }) + + if topK > len(ranked) { + topK = len(ranked) + } + + result := make([]types.Chunk, topK) + for i := 0; i < topK; i++ { + result[i] = ranked[i].Chunk + } + + return result +} + +func tokenize(text string) []string { + text = strings.ToLower(text) + + var tokens []string + var current strings.Builder + + for _, r := range text { + if unicode.IsLetter(r) || unicode.IsDigit(r) { + current.WriteRune(r) + } else { + if current.Len() >= 2 { + tokens = append(tokens, current.String()) + } + current.Reset() + } + } + + if current.Len() >= 2 { + tokens = append(tokens, current.String()) + } + + return tokens +} + +func EstimateQueryComplexity(query string) float64 { + terms := tokenize(query) + complexity := float64(len(terms)) / 5.0 + + if strings.Contains(query, "?") { + complexity += 0.2 + } + if strings.Contains(query, " и ") || strings.Contains(query, " или ") { + complexity += 0.3 + } + + if complexity > 1.0 { + complexity = 1.0 + } + return complexity +} + +func ComputeAdaptiveTopK(totalResults int, complexity float64, mode string) int { + baseK := 15 + + switch mode { + case "speed": + baseK = 10 + case "balanced": + baseK = 20 + case "quality": + baseK = 30 + } + + adaptiveK := int(float64(baseK) * (1 + complexity*0.5)) + + if adaptiveK > totalResults { + adaptiveK = totalResults + } + + return adaptiveK +} diff --git a/backend/internal/search/searxng.go b/backend/internal/search/searxng.go new file mode 100644 index 0000000..8406afc --- /dev/null +++ b/backend/internal/search/searxng.go @@ -0,0 +1,177 @@ +package search + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "regexp" + "strings" + "time" + + "github.com/gooseek/backend/internal/types" + "github.com/gooseek/backend/pkg/config" +) + +type SearXNGClient struct { + primaryURL string + fallbackURLs []string + client *http.Client + timeout time.Duration +} + +func NewSearXNGClient(cfg *config.Config) *SearXNGClient { + return &SearXNGClient{ + primaryURL: cfg.SearXNGURL, + fallbackURLs: cfg.SearXNGFallbackURL, + client: &http.Client{Timeout: cfg.SearchTimeout}, + timeout: cfg.SearchTimeout, + } +} + +type SearchOptions struct { + Engines []string + Categories []string + PageNo int + Language string +} + +func (c *SearXNGClient) Search(ctx context.Context, query string, opts *SearchOptions) (*types.SearchResponse, error) { + candidates := c.buildCandidates() + if len(candidates) == 0 { + return nil, fmt.Errorf("no SearXNG URLs configured") + } + + var lastErr error + for _, baseURL := range candidates { + result, err := c.searchWithURL(ctx, baseURL, query, opts) + if err == nil { + return result, nil + } + lastErr = err + } + + return nil, fmt.Errorf("all SearXNG instances failed: %w", lastErr) +} + +func (c *SearXNGClient) buildCandidates() []string { + candidates := make([]string, 0) + + if c.primaryURL != "" { + u := strings.TrimSuffix(c.primaryURL, "/") + if !strings.HasPrefix(u, "http") { + u = "http://" + u + } + candidates = append(candidates, u) + } + + for _, fb := range c.fallbackURLs { + u := strings.TrimSpace(fb) + if u == "" { + continue + } + u = strings.TrimSuffix(u, "/") + if !strings.HasPrefix(u, "http") { + u = "https://" + u + } + if !contains(candidates, u) { + candidates = append(candidates, u) + } + } + + return candidates +} + +func (c *SearXNGClient) searchWithURL(ctx context.Context, baseURL, query string, opts *SearchOptions) (*types.SearchResponse, error) { + params := url.Values{} + params.Set("format", "json") + params.Set("q", query) + + if opts != nil { + if len(opts.Engines) > 0 { + params.Set("engines", strings.Join(opts.Engines, ",")) + } + if len(opts.Categories) > 0 { + params.Set("categories", strings.Join(opts.Categories, ",")) + } + if opts.PageNo > 0 { + params.Set("pageno", fmt.Sprintf("%d", opts.PageNo)) + } + if opts.Language != "" { + params.Set("language", opts.Language) + } + } + + reqURL := fmt.Sprintf("%s/search?%s", baseURL, params.Encode()) + + req, err := http.NewRequestWithContext(ctx, "GET", reqURL, nil) + if err != nil { + return nil, err + } + + resp, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("SearXNG returned status %d", resp.StatusCode) + } + + var result struct { + Results []types.SearchResult `json:"results"` + Suggestions []string `json:"suggestions"` + } + + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return nil, err + } + + return &types.SearchResponse{ + Results: result.Results, + Suggestions: result.Suggestions, + }, nil +} + +var ( + productPattern = regexp.MustCompile(`ozon\.ru/product|wildberries\.ru/catalog/\d|aliexpress\.(ru|com)/item|market\.yandex`) + videoPattern = regexp.MustCompile(`rutube\.ru/video|vk\.com/video|vk\.com/clip|youtube\.com/watch|youtu\.be|dzen\.ru/video`) + vkProfilePattern = regexp.MustCompile(`vk\.com/[a-zA-Z0-9_.]+$`) + tgProfilePattern = regexp.MustCompile(`t\.me/[a-zA-Z0-9_]+$`) +) + +func CategorizeResult(result *types.SearchResult) types.ContentCategory { + urlLower := strings.ToLower(result.URL) + + if productPattern.MatchString(urlLower) { + return types.CategoryProduct + } + + if videoPattern.MatchString(urlLower) || result.IframeSrc != "" || result.Category == "videos" { + return types.CategoryVideo + } + + if tgProfilePattern.MatchString(urlLower) { + return types.CategoryProfile + } + if vkProfilePattern.MatchString(urlLower) && !videoPattern.MatchString(urlLower) { + return types.CategoryProfile + } + + if result.ImgSrc != "" && result.Category == "images" { + return types.CategoryImage + } + + return types.CategoryArticle +} + +func contains(slice []string, item string) bool { + for _, s := range slice { + if s == item { + return true + } + } + return false +} diff --git a/backend/internal/session/manager.go b/backend/internal/session/manager.go new file mode 100644 index 0000000..d292782 --- /dev/null +++ b/backend/internal/session/manager.go @@ -0,0 +1,183 @@ +package session + +import ( + "encoding/json" + "sync" + + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" +) + +type EventType string + +const ( + EventData EventType = "data" + EventEnd EventType = "end" + EventError EventType = "error" +) + +type Event struct { + Type EventType `json:"type"` + Data interface{} `json:"data"` +} + +type Subscriber func(event EventType, data interface{}) + +type Session struct { + id string + blocks map[string]*types.Block + subscribers []Subscriber + mu sync.RWMutex + closed bool +} + +func NewSession() *Session { + return &Session{ + id: uuid.New().String(), + blocks: make(map[string]*types.Block), + subscribers: make([]Subscriber, 0), + } +} + +func (s *Session) ID() string { + return s.id +} + +func (s *Session) Subscribe(fn Subscriber) func() { + s.mu.Lock() + s.subscribers = append(s.subscribers, fn) + idx := len(s.subscribers) - 1 + s.mu.Unlock() + + return func() { + s.mu.Lock() + defer s.mu.Unlock() + if idx < len(s.subscribers) { + s.subscribers = append(s.subscribers[:idx], s.subscribers[idx+1:]...) + } + } +} + +func (s *Session) Emit(eventType EventType, data interface{}) { + s.mu.RLock() + if s.closed { + s.mu.RUnlock() + return + } + subs := make([]Subscriber, len(s.subscribers)) + copy(subs, s.subscribers) + s.mu.RUnlock() + + for _, sub := range subs { + sub(eventType, data) + } +} + +func (s *Session) EmitBlock(block *types.Block) { + s.mu.Lock() + s.blocks[block.ID] = block + s.mu.Unlock() + + s.Emit(EventData, map[string]interface{}{ + "type": "block", + "block": block, + }) +} + +func (s *Session) UpdateBlock(blockID string, patches []Patch) { + s.mu.Lock() + block, ok := s.blocks[blockID] + if !ok { + s.mu.Unlock() + return + } + + for _, patch := range patches { + applyPatch(block, patch) + } + s.mu.Unlock() + + s.Emit(EventData, map[string]interface{}{ + "type": "updateBlock", + "blockId": blockID, + "patch": patches, + }) +} + +func (s *Session) EmitTextChunk(blockID, chunk string) { + s.Emit(EventData, map[string]interface{}{ + "type": "textChunk", + "blockId": blockID, + "chunk": chunk, + }) +} + +func (s *Session) EmitResearchComplete() { + s.Emit(EventData, map[string]interface{}{ + "type": "researchComplete", + }) +} + +func (s *Session) EmitEnd() { + s.Emit(EventData, map[string]interface{}{ + "type": "messageEnd", + }) + s.Emit(EventEnd, nil) +} + +func (s *Session) EmitError(err error) { + s.Emit(EventData, map[string]interface{}{ + "type": "error", + "data": err.Error(), + }) + s.Emit(EventError, map[string]interface{}{ + "data": err.Error(), + }) +} + +func (s *Session) GetBlock(id string) *types.Block { + s.mu.RLock() + defer s.mu.RUnlock() + return s.blocks[id] +} + +func (s *Session) Close() { + s.mu.Lock() + s.closed = true + s.subscribers = nil + s.mu.Unlock() +} + +func (s *Session) RemoveAllListeners() { + s.mu.Lock() + s.subscribers = nil + s.mu.Unlock() +} + +type Patch struct { + Op string `json:"op"` + Path string `json:"path"` + Value interface{} `json:"value"` +} + +func applyPatch(block *types.Block, patch Patch) { + if patch.Op != "replace" { + return + } + + switch patch.Path { + case "/data": + block.Data = patch.Value + case "/data/subSteps": + if rd, ok := block.Data.(types.ResearchData); ok { + if steps, ok := patch.Value.([]types.ResearchSubStep); ok { + rd.SubSteps = steps + block.Data = rd + } + } + } +} + +func MarshalEvent(data interface{}) ([]byte, error) { + return json.Marshal(data) +} diff --git a/backend/internal/types/blocks.go b/backend/internal/types/blocks.go new file mode 100644 index 0000000..7b70afe --- /dev/null +++ b/backend/internal/types/blocks.go @@ -0,0 +1,102 @@ +package types + +type BlockType string + +const ( + BlockTypeText BlockType = "text" + BlockTypeResearch BlockType = "research" + BlockTypeSource BlockType = "source" + BlockTypeWidget BlockType = "widget" + BlockTypeThinking BlockType = "thinking" +) + +type Block struct { + ID string `json:"id"` + Type BlockType `json:"type"` + Data interface{} `json:"data"` +} + +type TextBlock struct { + ID string `json:"id"` + Type string `json:"type"` + Data string `json:"data"` +} + +type ResearchBlock struct { + ID string `json:"id"` + Type string `json:"type"` + Data ResearchData `json:"data"` +} + +type ResearchData struct { + SubSteps []ResearchSubStep `json:"subSteps"` +} + +type ResearchSubStep struct { + ID string `json:"id"` + Type string `json:"type"` + Reasoning string `json:"reasoning,omitempty"` + Searching []string `json:"searching,omitempty"` + Reading []Chunk `json:"reading,omitempty"` +} + +type SourceBlock struct { + ID string `json:"id"` + Type string `json:"type"` + Data []Chunk `json:"data"` +} + +type WidgetBlock struct { + ID string `json:"id"` + Type string `json:"type"` + Data WidgetData `json:"data"` +} + +type WidgetData struct { + WidgetType string `json:"widgetType"` + Params interface{} `json:"params"` +} + +type StreamEvent struct { + Type string `json:"type"` + Block *Block `json:"block,omitempty"` + BlockID string `json:"blockId,omitempty"` + Chunk string `json:"chunk,omitempty"` + Patch interface{} `json:"patch,omitempty"` + Data interface{} `json:"data,omitempty"` +} + +func NewTextBlock(id, content string) *Block { + return &Block{ + ID: id, + Type: BlockTypeText, + Data: content, + } +} + +func NewResearchBlock(id string) *Block { + return &Block{ + ID: id, + Type: BlockTypeResearch, + Data: ResearchData{SubSteps: []ResearchSubStep{}}, + } +} + +func NewSourceBlock(id string, chunks []Chunk) *Block { + return &Block{ + ID: id, + Type: BlockTypeSource, + Data: chunks, + } +} + +func NewWidgetBlock(id, widgetType string, params interface{}) *Block { + return &Block{ + ID: id, + Type: BlockTypeWidget, + Data: WidgetData{ + WidgetType: widgetType, + Params: params, + }, + } +} diff --git a/backend/internal/types/chunks.go b/backend/internal/types/chunks.go new file mode 100644 index 0000000..a9a6b34 --- /dev/null +++ b/backend/internal/types/chunks.go @@ -0,0 +1,75 @@ +package types + +type Chunk struct { + Content string `json:"content"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +type SearchResult struct { + Title string `json:"title"` + URL string `json:"url"` + Content string `json:"content,omitempty"` + Thumbnail string `json:"thumbnail,omitempty"` + ImgSrc string `json:"img_src,omitempty"` + ThumbnailSrc string `json:"thumbnail_src,omitempty"` + IframeSrc string `json:"iframe_src,omitempty"` + Author string `json:"author,omitempty"` + PublishedDate string `json:"publishedDate,omitempty"` + Engine string `json:"engine,omitempty"` + Category string `json:"category,omitempty"` + Score float64 `json:"score,omitempty"` + Price string `json:"price,omitempty"` + Currency string `json:"currency,omitempty"` + Duration interface{} `json:"duration,omitempty"` + Views interface{} `json:"views,omitempty"` +} + +type SearchResponse struct { + Results []SearchResult `json:"results"` + Suggestions []string `json:"suggestions,omitempty"` +} + +type ContentCategory string + +const ( + CategoryProduct ContentCategory = "product" + CategoryVideo ContentCategory = "video" + CategoryProfile ContentCategory = "profile" + CategoryPromo ContentCategory = "promo" + CategoryImage ContentCategory = "image" + CategoryArticle ContentCategory = "article" +) + +func (r *SearchResult) ToChunk() Chunk { + metadata := map[string]string{ + "title": r.Title, + "url": r.URL, + } + if r.Thumbnail != "" { + metadata["thumbnail"] = r.Thumbnail + } + if r.Author != "" { + metadata["author"] = r.Author + } + if r.PublishedDate != "" { + metadata["publishedDate"] = r.PublishedDate + } + + content := r.Content + if content == "" { + content = r.Title + } + + return Chunk{ + Content: content, + Metadata: metadata, + } +} + +func SearchResultsToChunks(results []SearchResult) []Chunk { + chunks := make([]Chunk, 0, len(results)) + for _, r := range results { + chunks = append(chunks, r.ToChunk()) + } + return chunks +} diff --git a/backend/internal/types/widgets.go b/backend/internal/types/widgets.go new file mode 100644 index 0000000..a9ccf67 --- /dev/null +++ b/backend/internal/types/widgets.go @@ -0,0 +1,145 @@ +package types + +type WidgetType string + +const ( + WidgetWeather WidgetType = "weather" + WidgetCalculator WidgetType = "calculator" + WidgetProducts WidgetType = "products" + WidgetVideos WidgetType = "videos" + WidgetProfiles WidgetType = "profiles" + WidgetPromos WidgetType = "promos" + WidgetImageGallery WidgetType = "image_gallery" + WidgetVideoEmbed WidgetType = "video_embed" + WidgetKnowledge WidgetType = "knowledge_card" +) + +type ProductData struct { + Title string `json:"title"` + URL string `json:"url"` + Price float64 `json:"price"` + OldPrice float64 `json:"oldPrice,omitempty"` + Currency string `json:"currency"` + Discount int `json:"discount,omitempty"` + Rating float64 `json:"rating,omitempty"` + ReviewCount int `json:"reviewCount,omitempty"` + ImageURL string `json:"imageUrl,omitempty"` + Marketplace string `json:"marketplace"` + InStock bool `json:"inStock"` + Badges []Badge `json:"badges,omitempty"` +} + +type VideoData struct { + Title string `json:"title"` + URL string `json:"url"` + Thumbnail string `json:"thumbnail,omitempty"` + Duration int `json:"duration,omitempty"` + Views int `json:"views,omitempty"` + Likes int `json:"likes,omitempty"` + Author string `json:"author,omitempty"` + Platform string `json:"platform"` + EmbedURL string `json:"embedUrl,omitempty"` +} + +type ProfileData struct { + Name string `json:"name"` + Username string `json:"username,omitempty"` + URL string `json:"url"` + AvatarURL string `json:"avatarUrl,omitempty"` + Bio string `json:"bio,omitempty"` + Followers int `json:"followers,omitempty"` + Following int `json:"following,omitempty"` + Platform string `json:"platform"` + Verified bool `json:"verified"` + IsOnline bool `json:"isOnline,omitempty"` + LastOnline string `json:"lastOnline,omitempty"` +} + +type PromoData struct { + Code string `json:"code"` + Description string `json:"description"` + Discount string `json:"discount"` + Store string `json:"store"` + StoreURL string `json:"storeUrl"` + LogoURL string `json:"logoUrl,omitempty"` + ExpiresAt string `json:"expiresAt,omitempty"` + Conditions string `json:"conditions,omitempty"` + Verified bool `json:"verified"` +} + +type ImageData struct { + URL string `json:"url"` + Title string `json:"title,omitempty"` + Source string `json:"source,omitempty"` + SourceURL string `json:"sourceUrl,omitempty"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` +} + +type Badge struct { + Text string `json:"text"` + Type string `json:"type"` + Color string `json:"color,omitempty"` +} + +type KnowledgeCardData struct { + Type string `json:"type"` + Title string `json:"title,omitempty"` + Content interface{} `json:"content"` +} + +type ComparisonTable struct { + Headers []string `json:"headers"` + Rows [][]string `json:"rows"` +} + +type StatCard struct { + Label string `json:"label"` + Value string `json:"value"` + Change float64 `json:"change,omitempty"` + Unit string `json:"unit,omitempty"` +} + +type Timeline struct { + Events []TimelineEvent `json:"events"` +} + +type TimelineEvent struct { + Date string `json:"date"` + Title string `json:"title"` + Description string `json:"description,omitempty"` +} + +type WeatherParams struct { + Location string `json:"location"` + Current WeatherCurrent `json:"current"` + Forecast []WeatherDay `json:"forecast,omitempty"` + LastUpdated string `json:"lastUpdated,omitempty"` +} + +type WeatherCurrent struct { + Temp float64 `json:"temp"` + FeelsLike float64 `json:"feelsLike"` + Humidity int `json:"humidity"` + WindSpeed float64 `json:"windSpeed"` + Description string `json:"description"` + Icon string `json:"icon"` +} + +type WeatherDay struct { + Date string `json:"date"` + TempMax float64 `json:"tempMax"` + TempMin float64 `json:"tempMin"` + Icon string `json:"icon"` +} + +type CalculatorParams struct { + Expression string `json:"expression"` + Result float64 `json:"result"` + Steps []Step `json:"steps,omitempty"` +} + +type Step struct { + Description string `json:"description"` + Value string `json:"value"` +} diff --git a/backend/pkg/cache/redis.go b/backend/pkg/cache/redis.go new file mode 100644 index 0000000..8a0f13c --- /dev/null +++ b/backend/pkg/cache/redis.go @@ -0,0 +1,183 @@ +package cache + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "time" + + "github.com/redis/go-redis/v9" +) + +type RedisCache struct { + client *redis.Client + prefix string +} + +func NewRedisCache(redisURL, prefix string) (*RedisCache, error) { + opts, err := redis.ParseURL(redisURL) + if err != nil { + return nil, err + } + + client := redis.NewClient(opts) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := client.Ping(ctx).Err(); err != nil { + return nil, err + } + + return &RedisCache{ + client: client, + prefix: prefix, + }, nil +} + +func (c *RedisCache) Close() error { + return c.client.Close() +} + +func (c *RedisCache) Set(ctx context.Context, key string, value interface{}, ttl time.Duration) error { + data, err := json.Marshal(value) + if err != nil { + return err + } + return c.client.Set(ctx, c.prefix+":"+key, data, ttl).Err() +} + +func (c *RedisCache) Get(ctx context.Context, key string, dest interface{}) error { + data, err := c.client.Get(ctx, c.prefix+":"+key).Bytes() + if err != nil { + return err + } + return json.Unmarshal(data, dest) +} + +func (c *RedisCache) Exists(ctx context.Context, key string) (bool, error) { + n, err := c.client.Exists(ctx, c.prefix+":"+key).Result() + if err != nil { + return false, err + } + return n > 0, nil +} + +func (c *RedisCache) Delete(ctx context.Context, key string) error { + return c.client.Del(ctx, c.prefix+":"+key).Err() +} + +func (c *RedisCache) SetJSON(ctx context.Context, key string, value interface{}, ttl time.Duration) error { + return c.Set(ctx, key, value, ttl) +} + +func (c *RedisCache) GetJSON(ctx context.Context, key string, dest interface{}) error { + return c.Get(ctx, key, dest) +} + +type CacheKey string + +const ( + KeySearchResults CacheKey = "search" + KeyArticleSummary CacheKey = "summary" + KeyDigest CacheKey = "digest" + KeyChatResponse CacheKey = "chat" +) + +func HashKey(parts ...string) string { + combined := "" + for _, p := range parts { + combined += p + ":" + } + hash := sha256.Sum256([]byte(combined)) + return hex.EncodeToString(hash[:16]) +} + +func (c *RedisCache) CacheSearch(ctx context.Context, query string, results interface{}, ttl time.Duration) error { + key := string(KeySearchResults) + ":" + HashKey(query) + return c.Set(ctx, key, results, ttl) +} + +func (c *RedisCache) GetCachedSearch(ctx context.Context, query string, dest interface{}) error { + key := string(KeySearchResults) + ":" + HashKey(query) + return c.Get(ctx, key, dest) +} + +func (c *RedisCache) CacheArticleSummary(ctx context.Context, url string, events []string, ttl time.Duration) error { + key := string(KeyArticleSummary) + ":" + HashKey(url) + return c.Set(ctx, key, events, ttl) +} + +func (c *RedisCache) GetCachedArticleSummary(ctx context.Context, url string) ([]string, error) { + key := string(KeyArticleSummary) + ":" + HashKey(url) + var events []string + if err := c.Get(ctx, key, &events); err != nil { + return nil, err + } + return events, nil +} + +func (c *RedisCache) CacheDigest(ctx context.Context, topic, region, title string, digest interface{}, ttl time.Duration) error { + key := string(KeyDigest) + ":" + HashKey(topic, region, title) + return c.Set(ctx, key, digest, ttl) +} + +func (c *RedisCache) GetCachedDigest(ctx context.Context, topic, region, title string, dest interface{}) error { + key := string(KeyDigest) + ":" + HashKey(topic, region, title) + return c.Get(ctx, key, dest) +} + +type MemoryCache struct { + data map[string]cacheEntry +} + +type cacheEntry struct { + value interface{} + expiresAt time.Time +} + +func NewMemoryCache() *MemoryCache { + return &MemoryCache{ + data: make(map[string]cacheEntry), + } +} + +func (c *MemoryCache) Set(key string, value interface{}, ttl time.Duration) { + c.data[key] = cacheEntry{ + value: value, + expiresAt: time.Now().Add(ttl), + } +} + +func (c *MemoryCache) Get(key string) (interface{}, bool) { + entry, ok := c.data[key] + if !ok { + return nil, false + } + if time.Now().After(entry.expiresAt) { + delete(c.data, key) + return nil, false + } + return entry.value, true +} + +func (c *MemoryCache) Delete(key string) { + delete(c.data, key) +} + +func (c *MemoryCache) Clear() { + c.data = make(map[string]cacheEntry) +} + +func (c *MemoryCache) Cleanup() int { + count := 0 + now := time.Now() + for k, v := range c.data { + if now.After(v.expiresAt) { + delete(c.data, k) + count++ + } + } + return count +} diff --git a/backend/pkg/config/config.go b/backend/pkg/config/config.go new file mode 100644 index 0000000..2bda6d6 --- /dev/null +++ b/backend/pkg/config/config.go @@ -0,0 +1,173 @@ +package config + +import ( + "os" + "strconv" + "strings" + "time" + + "github.com/joho/godotenv" +) + +type Config struct { + Environment string + LogLevel string + + // Service ports + APIGatewayPort int + ChatSvcPort int + AgentSvcPort int + SearchSvcPort int + LLMSvcPort int + ScraperSvcPort int + + // Service URLs + ChatSvcURL string + AgentSvcURL string + SearchSvcURL string + LLMSvcURL string + ScraperSvcURL string + MemorySvcURL string + LibrarySvcURL string + + // External services + SearXNGURL string + SearXNGFallbackURL []string + Crawl4AIURL string + RedisURL string + DatabaseURL string + DiscoverSvcURL string + CollectionSvcURL string + FileSvcURL string + ThreadSvcURL string + ComputerSvcURL string + FinanceHeatmapURL string + LearningSvcURL string + + // Auth + JWTSecret string + AuthSvcURL string + + // LLM defaults + DefaultLLMProvider string + DefaultLLMModel string + OpenAIAPIKey string + AnthropicAPIKey string + GeminiAPIKey string + + // Timeweb Cloud AI + TimewebAPIBaseURL string + TimewebAgentAccessID string + TimewebAPIKey string + TimewebProxySource string + + // Timeouts + HTTPTimeout time.Duration + LLMTimeout time.Duration + ScrapeTimeout time.Duration + SearchTimeout time.Duration + + // CORS + AllowedOrigins []string +} + +var cfg *Config + +func Load() (*Config, error) { + _ = godotenv.Load() + _ = godotenv.Load("../.env") + _ = godotenv.Load("../../.env") + + cfg = &Config{ + Environment: getEnv("ENVIRONMENT", "production"), + LogLevel: getEnv("LOG_LEVEL", "info"), + + APIGatewayPort: getEnvInt("API_GATEWAY_PORT", 3015), + ChatSvcPort: getEnvInt("CHAT_SVC_PORT", 3005), + AgentSvcPort: getEnvInt("AGENT_SVC_PORT", 3018), + SearchSvcPort: getEnvInt("SEARCH_SVC_PORT", 3001), + LLMSvcPort: getEnvInt("LLM_SVC_PORT", 3020), + ScraperSvcPort: getEnvInt("SCRAPER_SVC_PORT", 3021), + + ChatSvcURL: getEnv("CHAT_SVC_URL", "http://localhost:3005"), + AgentSvcURL: getEnv("MASTER_AGENTS_SVC_URL", "http://localhost:3018"), + SearchSvcURL: getEnv("SEARCH_SVC_URL", "http://localhost:3001"), + LLMSvcURL: getEnv("LLM_SVC_URL", "http://localhost:3020"), + ScraperSvcURL: getEnv("SCRAPER_SVC_URL", "http://localhost:3021"), + MemorySvcURL: getEnv("MEMORY_SVC_URL", ""), + LibrarySvcURL: getEnv("LIBRARY_SVC_URL", "http://localhost:3009"), + + SearXNGURL: getEnv("SEARXNG_URL", "http://searxng:8080"), + SearXNGFallbackURL: strings.Split(getEnv("SEARXNG_FALLBACK_URL", ""), ","), + Crawl4AIURL: getEnv("CRAWL4AI_URL", "http://crawl4ai:11235"), + RedisURL: getEnv("REDIS_URL", "redis://localhost:6379"), + DatabaseURL: getEnv("DATABASE_URL", ""), + DiscoverSvcURL: getEnv("DISCOVER_SVC_URL", "http://localhost:3002"), + CollectionSvcURL: getEnv("COLLECTION_SVC_URL", "http://localhost:3025"), + FileSvcURL: getEnv("FILE_SVC_URL", "http://localhost:3026"), + ThreadSvcURL: getEnv("THREAD_SVC_URL", "http://localhost:3027"), + ComputerSvcURL: getEnv("COMPUTER_SVC_URL", "http://localhost:3030"), + FinanceHeatmapURL: getEnv("FINANCE_HEATMAP_SVC_URL", "http://localhost:3033"), + LearningSvcURL: getEnv("LEARNING_SVC_URL", "http://localhost:3034"), + + JWTSecret: getEnv("JWT_SECRET", ""), + AuthSvcURL: getEnv("AUTH_SVC_URL", ""), + + DefaultLLMProvider: getEnv("DEFAULT_LLM_PROVIDER", "openai"), + DefaultLLMModel: getEnv("DEFAULT_LLM_MODEL", "gpt-4o-mini"), + OpenAIAPIKey: getEnv("OPENAI_API_KEY", ""), + AnthropicAPIKey: getEnv("ANTHROPIC_API_KEY", ""), + GeminiAPIKey: getEnv("GEMINI_API_KEY", ""), + + TimewebAPIBaseURL: getEnv("TIMEWEB_API_BASE_URL", "https://api.timeweb.cloud"), + TimewebAgentAccessID: getEnv("TIMEWEB_AGENT_ACCESS_ID", ""), + TimewebAPIKey: getEnv("TIMEWEB_API_KEY", ""), + TimewebProxySource: getEnv("TIMEWEB_X_PROXY_SOURCE", "gooseek"), + + HTTPTimeout: time.Duration(getEnvInt("HTTP_TIMEOUT_MS", 60000)) * time.Millisecond, + LLMTimeout: time.Duration(getEnvInt("LLM_TIMEOUT_MS", 120000)) * time.Millisecond, + ScrapeTimeout: time.Duration(getEnvInt("SCRAPE_TIMEOUT_MS", 25000)) * time.Millisecond, + SearchTimeout: time.Duration(getEnvInt("SEARCH_TIMEOUT_MS", 10000)) * time.Millisecond, + + AllowedOrigins: parseOrigins(getEnv("ALLOWED_ORIGINS", "*")), + } + + return cfg, nil +} + +func Get() *Config { + if cfg == nil { + cfg, _ = Load() + } + return cfg +} + +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +func getEnvInt(key string, defaultValue int) int { + if value := os.Getenv(key); value != "" { + if i, err := strconv.Atoi(value); err == nil { + return i + } + } + return defaultValue +} + +func parseOrigins(s string) []string { + if s == "*" { + return []string{"*"} + } + origins := strings.Split(s, ",") + result := make([]string, 0, len(origins)) + for _, o := range origins { + if trimmed := strings.TrimSpace(o); trimmed != "" { + result = append(result, trimmed) + } + } + return result +} diff --git a/backend/pkg/middleware/auth.go b/backend/pkg/middleware/auth.go new file mode 100644 index 0000000..7566e40 --- /dev/null +++ b/backend/pkg/middleware/auth.go @@ -0,0 +1,50 @@ +package middleware + +import ( + "strings" + + "github.com/gofiber/fiber/v2" +) + +type AuthConfig struct { + RequireAuth bool + SkipPaths []string +} + +func Auth(config AuthConfig) fiber.Handler { + return func(c *fiber.Ctx) error { + if !config.RequireAuth { + return c.Next() + } + + path := c.Path() + for _, skip := range config.SkipPaths { + if strings.HasPrefix(path, skip) { + return c.Next() + } + } + + auth := c.Get("Authorization") + if auth == "" { + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + }) + } + + if !strings.HasPrefix(auth, "Bearer ") { + return c.Status(401).JSON(fiber.Map{ + "error": "Invalid authorization format", + }) + } + + return c.Next() + } +} + +func ExtractToken(c *fiber.Ctx) string { + auth := c.Get("Authorization") + if strings.HasPrefix(auth, "Bearer ") { + return strings.TrimPrefix(auth, "Bearer ") + } + return "" +} diff --git a/backend/pkg/middleware/jwt.go b/backend/pkg/middleware/jwt.go new file mode 100644 index 0000000..8c1fde8 --- /dev/null +++ b/backend/pkg/middleware/jwt.go @@ -0,0 +1,280 @@ +package middleware + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/golang-jwt/jwt/v5" +) + +type JWTConfig struct { + Secret string + AuthSvcURL string + SkipPaths []string + AllowGuest bool + CacheDuration time.Duration +} + +type UserClaims struct { + UserID string `json:"userId"` + Email string `json:"email"` + Role string `json:"role"` + Tier string `json:"tier"` + IsGuest bool `json:"isGuest"` + ExpiresAt int64 `json:"exp"` +} + +type contextKey string + +const UserContextKey contextKey = "user" + +func JWT(cfg JWTConfig) fiber.Handler { + skipMap := make(map[string]bool) + for _, path := range cfg.SkipPaths { + skipMap[path] = true + } + + if cfg.CacheDuration == 0 { + cfg.CacheDuration = 5 * time.Minute + } + + return func(c *fiber.Ctx) error { + if skipMap[c.Path()] { + return c.Next() + } + + if strings.HasPrefix(c.Path(), "/health") || strings.HasPrefix(c.Path(), "/ready") { + return c.Next() + } + + token := ExtractToken(c) + + if token == "" { + if cfg.AllowGuest { + c.Locals(string(UserContextKey), &UserClaims{ + IsGuest: true, + Role: "guest", + Tier: "free", + }) + return c.Next() + } + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + "message": "Missing authorization token", + }) + } + + var claims *UserClaims + var err error + + if cfg.Secret != "" { + claims, err = validateLocalJWT(token, cfg.Secret) + } else if cfg.AuthSvcURL != "" { + claims, err = validateWithAuthService(c.Context(), token, cfg.AuthSvcURL) + } else { + return c.Status(500).JSON(fiber.Map{ + "error": "Configuration Error", + "message": "JWT validation not configured", + }) + } + + if err != nil { + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + "message": err.Error(), + }) + } + + c.Locals(string(UserContextKey), claims) + c.Locals("userId", claims.UserID) + c.Locals("userRole", claims.Role) + c.Locals("userTier", claims.Tier) + + return c.Next() + } +} + +func validateLocalJWT(tokenString, secret string) (*UserClaims, error) { + token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return []byte(secret), nil + }) + + if err != nil { + return nil, fmt.Errorf("invalid token: %w", err) + } + + if !token.Valid { + return nil, fmt.Errorf("token is not valid") + } + + mapClaims, ok := token.Claims.(jwt.MapClaims) + if !ok { + return nil, fmt.Errorf("invalid claims format") + } + + claims := &UserClaims{} + + if v, ok := mapClaims["userId"].(string); ok { + claims.UserID = v + } else if v, ok := mapClaims["sub"].(string); ok { + claims.UserID = v + } + + if v, ok := mapClaims["email"].(string); ok { + claims.Email = v + } + + if v, ok := mapClaims["role"].(string); ok { + claims.Role = v + } else { + claims.Role = "user" + } + + if v, ok := mapClaims["tier"].(string); ok { + claims.Tier = v + } else { + claims.Tier = "free" + } + + if v, ok := mapClaims["exp"].(float64); ok { + claims.ExpiresAt = int64(v) + } + + return claims, nil +} + +func validateWithAuthService(ctx context.Context, token, authURL string) (*UserClaims, error) { + reqURL := strings.TrimSuffix(authURL, "/") + "/api/v1/auth/validate" + + req, err := http.NewRequestWithContext(ctx, "GET", reqURL, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Authorization", "Bearer "+token) + + client := &http.Client{Timeout: 5 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("auth service unavailable: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("token validation failed: status %d", resp.StatusCode) + } + + var result struct { + Valid bool `json:"valid"` + User UserClaims `json:"user"` + } + + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return nil, fmt.Errorf("failed to decode auth response: %w", err) + } + + if !result.Valid { + return nil, fmt.Errorf("token is not valid") + } + + return &result.User, nil +} + +func GetUser(c *fiber.Ctx) *UserClaims { + user, ok := c.Locals(string(UserContextKey)).(*UserClaims) + if !ok { + return nil + } + return user +} + +func GetUserID(c *fiber.Ctx) string { + user := GetUser(c) + if user == nil { + return "" + } + return user.UserID +} + +func GetUserTier(c *fiber.Ctx) string { + user := GetUser(c) + if user == nil { + return "free" + } + return user.Tier +} + +func RequireAuth() fiber.Handler { + return func(c *fiber.Ctx) error { + user := GetUser(c) + if user == nil || user.IsGuest { + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + "message": "Authentication required", + }) + } + return c.Next() + } +} + +func RequireRole(roles ...string) fiber.Handler { + roleMap := make(map[string]bool) + for _, r := range roles { + roleMap[r] = true + } + + return func(c *fiber.Ctx) error { + user := GetUser(c) + if user == nil { + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + "message": "Authentication required", + }) + } + + if !roleMap[user.Role] { + return c.Status(403).JSON(fiber.Map{ + "error": "Forbidden", + "message": "Insufficient permissions", + }) + } + + return c.Next() + } +} + +func RequireTier(tiers ...string) fiber.Handler { + tierMap := make(map[string]bool) + for _, t := range tiers { + tierMap[t] = true + } + + return func(c *fiber.Ctx) error { + user := GetUser(c) + if user == nil { + return c.Status(401).JSON(fiber.Map{ + "error": "Unauthorized", + "message": "Authentication required", + }) + } + + if !tierMap[user.Tier] { + return c.Status(403).JSON(fiber.Map{ + "error": "Forbidden", + "message": "This feature requires a higher tier subscription", + "current": user.Tier, + "required": tiers, + }) + } + + return c.Next() + } +} diff --git a/backend/pkg/middleware/logging.go b/backend/pkg/middleware/logging.go new file mode 100644 index 0000000..e3ba5f5 --- /dev/null +++ b/backend/pkg/middleware/logging.go @@ -0,0 +1,49 @@ +package middleware + +import ( + "time" + + "github.com/gofiber/fiber/v2" + "go.uber.org/zap" +) + +type LoggingConfig struct { + Logger *zap.Logger + SkipPaths []string +} + +func Logging(config LoggingConfig) fiber.Handler { + logger := config.Logger + if logger == nil { + logger, _ = zap.NewProduction() + } + + skipPaths := make(map[string]bool) + for _, path := range config.SkipPaths { + skipPaths[path] = true + } + + return func(c *fiber.Ctx) error { + path := c.Path() + if skipPaths[path] { + return c.Next() + } + + start := time.Now() + + err := c.Next() + + duration := time.Since(start) + + logger.Info("request", + zap.String("method", c.Method()), + zap.String("path", path), + zap.Int("status", c.Response().StatusCode()), + zap.Duration("latency", duration), + zap.String("ip", c.IP()), + zap.String("user-agent", c.Get("User-Agent")), + ) + + return err + } +} diff --git a/backend/pkg/middleware/ratelimit.go b/backend/pkg/middleware/ratelimit.go new file mode 100644 index 0000000..aed631f --- /dev/null +++ b/backend/pkg/middleware/ratelimit.go @@ -0,0 +1,106 @@ +package middleware + +import ( + "sync" + "time" + + "github.com/gofiber/fiber/v2" +) + +type RateLimitConfig struct { + Max int + WindowSecs int + KeyFunc func(*fiber.Ctx) string +} + +type rateLimiter struct { + requests map[string][]time.Time + mu sync.RWMutex + max int + window time.Duration +} + +func newRateLimiter(max int, windowSecs int) *rateLimiter { + rl := &rateLimiter{ + requests: make(map[string][]time.Time), + max: max, + window: time.Duration(windowSecs) * time.Second, + } + + go rl.cleanup() + return rl +} + +func (rl *rateLimiter) cleanup() { + ticker := time.NewTicker(time.Minute) + for range ticker.C { + rl.mu.Lock() + now := time.Now() + for key, times := range rl.requests { + var valid []time.Time + for _, t := range times { + if now.Sub(t) < rl.window { + valid = append(valid, t) + } + } + if len(valid) == 0 { + delete(rl.requests, key) + } else { + rl.requests[key] = valid + } + } + rl.mu.Unlock() + } +} + +func (rl *rateLimiter) allow(key string) bool { + rl.mu.Lock() + defer rl.mu.Unlock() + + now := time.Now() + windowStart := now.Add(-rl.window) + + times := rl.requests[key] + var valid []time.Time + for _, t := range times { + if t.After(windowStart) { + valid = append(valid, t) + } + } + + if len(valid) >= rl.max { + rl.requests[key] = valid + return false + } + + rl.requests[key] = append(valid, now) + return true +} + +func RateLimit(config RateLimitConfig) fiber.Handler { + if config.Max == 0 { + config.Max = 100 + } + if config.WindowSecs == 0 { + config.WindowSecs = 60 + } + if config.KeyFunc == nil { + config.KeyFunc = func(c *fiber.Ctx) string { + return c.IP() + } + } + + limiter := newRateLimiter(config.Max, config.WindowSecs) + + return func(c *fiber.Ctx) error { + key := config.KeyFunc(c) + + if !limiter.allow(key) { + return c.Status(429).JSON(fiber.Map{ + "error": "Too many requests", + }) + } + + return c.Next() + } +} diff --git a/backend/pkg/middleware/ratelimit_redis.go b/backend/pkg/middleware/ratelimit_redis.go new file mode 100644 index 0000000..b5846e3 --- /dev/null +++ b/backend/pkg/middleware/ratelimit_redis.go @@ -0,0 +1,213 @@ +package middleware + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/redis/go-redis/v9" +) + +type RedisRateLimiterConfig struct { + RedisClient *redis.Client + KeyPrefix string + Max int + Window time.Duration + KeyFunc func(*fiber.Ctx) string + SkipPaths []string +} + +func RedisRateLimit(cfg RedisRateLimiterConfig) fiber.Handler { + if cfg.KeyPrefix == "" { + cfg.KeyPrefix = "ratelimit" + } + if cfg.Max == 0 { + cfg.Max = 100 + } + if cfg.Window == 0 { + cfg.Window = time.Minute + } + if cfg.KeyFunc == nil { + cfg.KeyFunc = func(c *fiber.Ctx) string { + return c.IP() + } + } + + skipMap := make(map[string]bool) + for _, path := range cfg.SkipPaths { + skipMap[path] = true + } + + return func(c *fiber.Ctx) error { + if skipMap[c.Path()] { + return c.Next() + } + + ctx := context.Background() + key := fmt.Sprintf("%s:%s", cfg.KeyPrefix, cfg.KeyFunc(c)) + + pipe := cfg.RedisClient.Pipeline() + incr := pipe.Incr(ctx, key) + pipe.Expire(ctx, key, cfg.Window) + _, err := pipe.Exec(ctx) + if err != nil { + return c.Next() + } + + current := incr.Val() + + c.Set("X-RateLimit-Limit", strconv.Itoa(cfg.Max)) + c.Set("X-RateLimit-Remaining", strconv.Itoa(max(0, cfg.Max-int(current)))) + + ttl, _ := cfg.RedisClient.TTL(ctx, key).Result() + c.Set("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(ttl).Unix(), 10)) + + if int(current) > cfg.Max { + c.Set("Retry-After", strconv.FormatInt(int64(ttl.Seconds()), 10)) + return c.Status(429).JSON(fiber.Map{ + "error": "Too Many Requests", + "retry_after": int64(ttl.Seconds()), + }) + } + + return c.Next() + } +} + +type SlidingWindowConfig struct { + RedisClient *redis.Client + KeyPrefix string + Max int + Window time.Duration + KeyFunc func(*fiber.Ctx) string +} + +func SlidingWindowRateLimit(cfg SlidingWindowConfig) fiber.Handler { + if cfg.KeyPrefix == "" { + cfg.KeyPrefix = "ratelimit:sliding" + } + if cfg.Max == 0 { + cfg.Max = 100 + } + if cfg.Window == 0 { + cfg.Window = time.Minute + } + if cfg.KeyFunc == nil { + cfg.KeyFunc = func(c *fiber.Ctx) string { + return c.IP() + } + } + + return func(c *fiber.Ctx) error { + ctx := context.Background() + key := fmt.Sprintf("%s:%s", cfg.KeyPrefix, cfg.KeyFunc(c)) + now := time.Now() + windowStart := now.Add(-cfg.Window).UnixMicro() + + pipe := cfg.RedisClient.Pipeline() + + pipe.ZRemRangeByScore(ctx, key, "0", strconv.FormatInt(windowStart, 10)) + + pipe.ZAdd(ctx, key, redis.Z{ + Score: float64(now.UnixMicro()), + Member: fmt.Sprintf("%d", now.UnixNano()), + }) + + countCmd := pipe.ZCard(ctx, key) + + pipe.Expire(ctx, key, cfg.Window) + + _, err := pipe.Exec(ctx) + if err != nil { + return c.Next() + } + + count := countCmd.Val() + + c.Set("X-RateLimit-Limit", strconv.Itoa(cfg.Max)) + c.Set("X-RateLimit-Remaining", strconv.Itoa(max(0, cfg.Max-int(count)))) + + if int(count) > cfg.Max { + return c.Status(429).JSON(fiber.Map{ + "error": "Too Many Requests", + "retry_after": int64(cfg.Window.Seconds()), + }) + } + + return c.Next() + } +} + +type TieredRateLimitConfig struct { + RedisClient *redis.Client + KeyPrefix string + Tiers map[string]TierConfig + GetTierFunc func(*fiber.Ctx) string + KeyFunc func(*fiber.Ctx) string +} + +type TierConfig struct { + Max int + Window time.Duration +} + +func TieredRateLimit(cfg TieredRateLimitConfig) fiber.Handler { + if cfg.KeyPrefix == "" { + cfg.KeyPrefix = "ratelimit:tiered" + } + if cfg.GetTierFunc == nil { + cfg.GetTierFunc = func(c *fiber.Ctx) string { return "default" } + } + if cfg.KeyFunc == nil { + cfg.KeyFunc = func(c *fiber.Ctx) string { return c.IP() } + } + + defaultTier := TierConfig{Max: 60, Window: time.Minute} + if _, ok := cfg.Tiers["default"]; !ok { + cfg.Tiers["default"] = defaultTier + } + + return func(c *fiber.Ctx) error { + ctx := context.Background() + tier := cfg.GetTierFunc(c) + tierCfg, ok := cfg.Tiers[tier] + if !ok { + tierCfg = cfg.Tiers["default"] + } + + key := fmt.Sprintf("%s:%s:%s", cfg.KeyPrefix, tier, cfg.KeyFunc(c)) + + pipe := cfg.RedisClient.Pipeline() + incr := pipe.Incr(ctx, key) + pipe.Expire(ctx, key, tierCfg.Window) + _, err := pipe.Exec(ctx) + if err != nil { + return c.Next() + } + + current := incr.Val() + + c.Set("X-RateLimit-Tier", tier) + c.Set("X-RateLimit-Limit", strconv.Itoa(tierCfg.Max)) + c.Set("X-RateLimit-Remaining", strconv.Itoa(max(0, tierCfg.Max-int(current)))) + + if int(current) > tierCfg.Max { + return c.Status(429).JSON(fiber.Map{ + "error": "Too Many Requests", + "tier": tier, + "limit": tierCfg.Max, + }) + } + + return c.Next() + } +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/backend/pkg/ndjson/writer.go b/backend/pkg/ndjson/writer.go new file mode 100644 index 0000000..3c59b11 --- /dev/null +++ b/backend/pkg/ndjson/writer.go @@ -0,0 +1,113 @@ +package ndjson + +import ( + "bufio" + "encoding/json" + "io" + "sync" +) + +type Writer struct { + w io.Writer + buf *bufio.Writer + mu sync.Mutex +} + +func NewWriter(w io.Writer) *Writer { + return &Writer{ + w: w, + buf: bufio.NewWriter(w), + } +} + +func (w *Writer) Write(v interface{}) error { + w.mu.Lock() + defer w.mu.Unlock() + + data, err := json.Marshal(v) + if err != nil { + return err + } + + if _, err := w.buf.Write(data); err != nil { + return err + } + + if err := w.buf.WriteByte('\n'); err != nil { + return err + } + + return w.buf.Flush() +} + +func (w *Writer) WriteRaw(data []byte) error { + w.mu.Lock() + defer w.mu.Unlock() + + if _, err := w.buf.Write(data); err != nil { + return err + } + + if err := w.buf.WriteByte('\n'); err != nil { + return err + } + + return w.buf.Flush() +} + +func (w *Writer) Flush() error { + w.mu.Lock() + defer w.mu.Unlock() + return w.buf.Flush() +} + +type StreamEvent struct { + Type string `json:"type"` + Block interface{} `json:"block,omitempty"` + BlockID string `json:"blockId,omitempty"` + Chunk string `json:"chunk,omitempty"` + Patch interface{} `json:"patch,omitempty"` + Data interface{} `json:"data,omitempty"` +} + +func WriteBlock(w *Writer, block interface{}) error { + return w.Write(StreamEvent{ + Type: "block", + Block: block, + }) +} + +func WriteTextChunk(w *Writer, blockID, chunk string) error { + return w.Write(StreamEvent{ + Type: "textChunk", + BlockID: blockID, + Chunk: chunk, + }) +} + +func WriteUpdateBlock(w *Writer, blockID string, patch interface{}) error { + return w.Write(StreamEvent{ + Type: "updateBlock", + BlockID: blockID, + Patch: patch, + }) +} + +func WriteResearchComplete(w *Writer) error { + return w.Write(StreamEvent{ + Type: "researchComplete", + }) +} + +func WriteMessageEnd(w *Writer) error { + return w.Write(StreamEvent{ + Type: "messageEnd", + }) +} + +func WriteError(w *Writer, err error) error { + return w.Write(StreamEvent{ + Type: "error", + Data: err.Error(), + }) +} diff --git a/backend/webui/.env.example b/backend/webui/.env.example new file mode 100644 index 0000000..5342e98 --- /dev/null +++ b/backend/webui/.env.example @@ -0,0 +1,7 @@ +# GooSeek WebUI Configuration + +# API Gateway URL (internal Docker network) +API_URL=http://api-gateway:3015 + +# Public API URL (for browser requests) +NEXT_PUBLIC_API_URL= diff --git a/backend/webui/Dockerfile b/backend/webui/Dockerfile new file mode 100644 index 0000000..701bf90 --- /dev/null +++ b/backend/webui/Dockerfile @@ -0,0 +1,44 @@ +FROM node:20-alpine AS base + +FROM base AS deps +RUN apk add --no-cache libc6-compat +WORKDIR /app + +COPY package.json package-lock.json* ./ +RUN npm ci --only=production + +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . + +ENV NEXT_TELEMETRY_DISABLED=1 +ENV NODE_ENV=production + +RUN npm run build + +FROM base AS runner +WORKDIR /app + +ENV NODE_ENV=production +ENV NEXT_TELEMETRY_DISABLED=1 + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +COPY --from=builder /app/public ./public + +RUN mkdir .next +RUN chown nextjs:nodejs .next + +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs + +EXPOSE 3000 + +ENV PORT=3000 +ENV HOSTNAME="0.0.0.0" + +CMD ["node", "server.js"] diff --git a/backend/webui/next-env.d.ts b/backend/webui/next-env.d.ts new file mode 100644 index 0000000..40c3d68 --- /dev/null +++ b/backend/webui/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information. diff --git a/backend/webui/next.config.mjs b/backend/webui/next.config.mjs new file mode 100644 index 0000000..88f4a36 --- /dev/null +++ b/backend/webui/next.config.mjs @@ -0,0 +1,18 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + output: 'standalone', + reactStrictMode: true, + env: { + API_URL: process.env.API_URL || 'http://api-gateway:3015', + }, + async rewrites() { + return [ + { + source: '/api/:path*', + destination: `${process.env.API_URL || 'http://api-gateway:3015'}/api/:path*`, + }, + ]; + }, +}; + +export default nextConfig; diff --git a/backend/webui/package-lock.json b/backend/webui/package-lock.json new file mode 100644 index 0000000..2bbdd71 --- /dev/null +++ b/backend/webui/package-lock.json @@ -0,0 +1,4408 @@ +{ + "name": "gooseek-webui", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "gooseek-webui", + "version": "1.0.0", + "dependencies": { + "@radix-ui/react-avatar": "^1.1.11", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.16", + "@radix-ui/react-scroll-area": "^1.2.10", + "@radix-ui/react-separator": "^1.1.8", + "@radix-ui/react-switch": "^1.2.6", + "@radix-ui/react-tabs": "^1.1.13", + "@radix-ui/react-tooltip": "^1.2.8", + "clsx": "^2.1.1", + "framer-motion": "^12.34.3", + "lucide-react": "^0.454.0", + "next": "^14.2.26", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-markdown": "^9.0.1", + "tailwind-merge": "^2.5.4" + }, + "devDependencies": { + "@types/node": "^22.9.0", + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.14", + "typescript": "^5.6.3" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz", + "integrity": "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.5.tgz", + "integrity": "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.4", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.7.tgz", + "integrity": "sha512-0tLRojf/1Go2JgEVm+3Frg9A3IW8bJgKgdO0BN5RkF//ufuz2joZM63Npau2ff3J6lUVYgDSNzNkR+aH3IVfjg==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.5" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", + "license": "MIT" + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@next/env": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.26.tgz", + "integrity": "sha512-vO//GJ/YBco+H7xdQhzJxF7ub3SUwft76jwaeOyVVQFHCi5DCnkP16WHB+JBylo4vOKPoZBlR94Z8xBxNBdNJA==", + "license": "MIT" + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.26.tgz", + "integrity": "sha512-zDJY8gsKEseGAxG+C2hTMT0w9Nk9N1Sk1qV7vXYz9MEiyRoF5ogQX2+vplyUMIfygnjn9/A04I6yrUTRTuRiyQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.26.tgz", + "integrity": "sha512-U0adH5ryLfmTDkahLwG9sUQG2L0a9rYux8crQeC92rPhi3jGQEY47nByQHrVrt3prZigadwj/2HZ1LUUimuSbg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.26.tgz", + "integrity": "sha512-SINMl1I7UhfHGM7SoRiw0AbwnLEMUnJ/3XXVmhyptzriHbWvPPbbm0OEVG24uUKhuS1t0nvN/DBvm5kz6ZIqpg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.26.tgz", + "integrity": "sha512-s6JaezoyJK2DxrwHWxLWtJKlqKqTdi/zaYigDXUJ/gmx/72CrzdVZfMvUc6VqnZ7YEvRijvYo+0o4Z9DencduA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.26.tgz", + "integrity": "sha512-FEXeUQi8/pLr/XI0hKbe0tgbLmHFRhgXOUiPScz2hk0hSmbGiU8aUqVslj/6C6KA38RzXnWoJXo4FMo6aBxjzg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.26.tgz", + "integrity": "sha512-BUsomaO4d2DuXhXhgQCVt2jjX4B4/Thts8nDoIruEJkhE5ifeQFtvW5c9JkdOtYvE5p2G0hcwQ0UbRaQmQwaVg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.26.tgz", + "integrity": "sha512-5auwsMVzT7wbB2CZXQxDctpWbdEnEW/e66DyXO1DcgHxIyhP06awu+rHKshZE+lPLIGiwtjo7bsyeuubewwxMw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-ia32-msvc": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.26.tgz", + "integrity": "sha512-GQWg/Vbz9zUGi9X80lOeGsz1rMH/MtFO/XqigDznhhhTfDlDoynCM6982mPCbSlxJ/aveZcKtTlwfAjwhyxDpg==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.26.tgz", + "integrity": "sha512-2rdB3T1/Gp7bv1eQTTm9d1Y1sv9UuJ2LAwOE0Pe2prHKe32UNscj7YS13fRB37d0GAiGNR+Y7ZcW8YjDI8Ns0w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@radix-ui/number": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", + "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", + "license": "MIT" + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-arrow/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-avatar": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-avatar/-/react-avatar-1.1.11.tgz", + "integrity": "sha512-0Qk603AHGV28BOBO34p7IgD5m+V5Sg/YovfayABkoDDBM5d3NCx0Mp4gGrjzLGes1jV5eNOE1r3itqOR33VC6Q==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.3", + "@radix-ui/react-primitive": "2.1.4", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-is-hydrated": "0.1.0", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.3.tgz", + "integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", + "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", + "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive/node_modules/@radix-ui/react-slot": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", + "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.10.tgz", + "integrity": "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.8.tgz", + "integrity": "sha512-sDvqVY4itsKwwSMEe0jtKgfTh+72Sy3gPmQpjqcQneqQ4PFmr/1I0YA+2/puilhggCe2gJcx5EBAYFkWkdpa5g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-switch": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz", + "integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-switch/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-switch/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.13.tgz", + "integrity": "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", + "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-is-hydrated": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-is-hydrated/-/react-use-is-hydrated-0.1.0.tgz", + "integrity": "sha512-U+UORVEq+cTnRIaostJv9AGdV3G6Y+zbVd+12e18jQ5A3c0xL03IhnHuiU4UV69wolOQp5GfR58NW/EgdQhwOA==", + "license": "MIT", + "dependencies": { + "use-sync-external-store": "^1.5.0" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-previous": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", + "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "license": "MIT", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", + "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", + "license": "MIT" + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "license": "Apache-2.0" + }, + "node_modules/@swc/helpers": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz", + "integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==", + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3", + "tslib": "^2.4.0" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.19.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.13.tgz", + "integrity": "sha512-akNQMv0wW5uyRpD2v2IEyRSZiR+BeGuoB6L310EgGObO44HSMNT8z1xzio28V8qOrgYaopIDNA18YgdXd+qTiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.28", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz", + "integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==", + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "devOptional": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "license": "ISC" + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.27", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz", + "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001774", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", + "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001774", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001774.tgz", + "integrity": "sha512-DDdwPGz99nmIEv216hKSgLD+D4ikHQHjBC/seF98N9CPqRX4M5mSxT9eTV6oyisnJcuzxtZy4n17yKKQYmYQOA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/client-only": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==", + "license": "MIT" + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", + "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", + "license": "MIT" + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.302", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", + "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/framer-motion": { + "version": "12.34.3", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.34.3.tgz", + "integrity": "sha512-v81ecyZKYO/DfpTwHivqkxSUBzvceOpoI+wLfgCgoUIKxlFKEXdg0oR9imxwXumT4SFy8vRk9xzJ5l3/Du/55Q==", + "license": "MIT", + "dependencies": { + "motion-dom": "^12.34.3", + "motion-utils": "^12.29.2", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", + "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/html-url-attributes": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz", + "integrity": "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/inline-style-parser": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", + "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", + "license": "MIT" + }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lucide-react": { + "version": "0.454.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.454.0.tgz", + "integrity": "sha512-hw7zMDwykCLnEzgncEEjHeA6+45aeEzRYuKHuyRSOPkhko+J3ySGjGIzu+mmMfDFG1vazHepMaYFYHbTFAZAAQ==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.3.tgz", + "integrity": "sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/motion-dom": { + "version": "12.34.3", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.34.3.tgz", + "integrity": "sha512-sYgFe+pR9aIM7o4fhs2aXtOI+oqlUd33N9Yoxcgo1Fv7M20sRkHtCmzE/VRNIcq7uNJ+qio+Xubt1FXH3pQ+eQ==", + "license": "MIT", + "dependencies": { + "motion-utils": "^12.29.2" + } + }, + "node_modules/motion-utils": { + "version": "12.29.2", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.29.2.tgz", + "integrity": "sha512-G3kc34H2cX2gI63RqU+cZq+zWRRPSsNIOjpdl9TN4AQwC4sgwYPl/Q/Obf/d53nOm569T0fYK+tcoSV50BWx8A==", + "license": "MIT" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/next": { + "version": "14.2.26", + "resolved": "https://registry.npmjs.org/next/-/next-14.2.26.tgz", + "integrity": "sha512-b81XSLihMwCfwiUVRRja3LphLo4uBBMZEzBBWMaISbKTwOmq3wPknIETy/8000tr7Gq4WmbuFYPS7jOYIf+ZJw==", + "deprecated": "This version has a security vulnerability. Please upgrade to a patched version. See https://nextjs.org/blog/security-update-2025-12-11 for more details.", + "license": "MIT", + "dependencies": { + "@next/env": "14.2.26", + "@swc/helpers": "0.5.5", + "busboy": "1.6.0", + "caniuse-lite": "^1.0.30001579", + "graceful-fs": "^4.2.11", + "postcss": "8.4.31", + "styled-jsx": "5.1.1" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": ">=18.17.0" + }, + "optionalDependencies": { + "@next/swc-darwin-arm64": "14.2.26", + "@next/swc-darwin-x64": "14.2.26", + "@next/swc-linux-arm64-gnu": "14.2.26", + "@next/swc-linux-arm64-musl": "14.2.26", + "@next/swc-linux-x64-gnu": "14.2.26", + "@next/swc-linux-x64-musl": "14.2.26", + "@next/swc-win32-arm64-msvc": "14.2.26", + "@next/swc-win32-ia32-msvc": "14.2.26", + "@next/swc-win32-x64-msvc": "14.2.26" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.41.2", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@playwright/test": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/next/node_modules/postcss": { + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-markdown": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.1.0.tgz", + "integrity": "sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, + "node_modules/react-remove-scroll": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", + "license": "MIT", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "license": "MIT", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "license": "MIT", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/style-to-js": { + "version": "1.1.21", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", + "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", + "license": "MIT", + "dependencies": { + "style-to-object": "1.0.14" + } + }, + "node_modules/style-to-object": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", + "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.2.7" + } + }, + "node_modules/styled-jsx": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz", + "integrity": "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==", + "license": "MIT", + "dependencies": { + "client-only": "0.0.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwind-merge": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.6.1.tgz", + "integrity": "sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", + "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "license": "MIT", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sync-external-store": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + } + } +} diff --git a/backend/webui/package.json b/backend/webui/package.json new file mode 100644 index 0000000..99ffbcc --- /dev/null +++ b/backend/webui/package.json @@ -0,0 +1,38 @@ +{ + "name": "gooseek-webui", + "version": "1.0.0", + "private": true, + "scripts": { + "dev": "next dev -p 3000", + "build": "next build", + "start": "next start -p 3000", + "lint": "next lint" + }, + "dependencies": { + "@radix-ui/react-avatar": "^1.1.11", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.16", + "@radix-ui/react-scroll-area": "^1.2.10", + "@radix-ui/react-separator": "^1.1.8", + "@radix-ui/react-switch": "^1.2.6", + "@radix-ui/react-tabs": "^1.1.13", + "@radix-ui/react-tooltip": "^1.2.8", + "clsx": "^2.1.1", + "framer-motion": "^12.34.3", + "lucide-react": "^0.454.0", + "next": "^14.2.26", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-markdown": "^9.0.1", + "tailwind-merge": "^2.5.4" + }, + "devDependencies": { + "@types/node": "^22.9.0", + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.14", + "typescript": "^5.6.3" + } +} diff --git a/backend/webui/postcss.config.js b/backend/webui/postcss.config.js new file mode 100644 index 0000000..12a703d --- /dev/null +++ b/backend/webui/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/backend/webui/public/logo.svg b/backend/webui/public/logo.svg new file mode 100644 index 0000000..e4f39b1 --- /dev/null +++ b/backend/webui/public/logo.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/backend/webui/src/app/(main)/computer/page.tsx b/backend/webui/src/app/(main)/computer/page.tsx new file mode 100644 index 0000000..c9ec83a --- /dev/null +++ b/backend/webui/src/app/(main)/computer/page.tsx @@ -0,0 +1,752 @@ +'use client'; + +import { useState, useEffect, useCallback, useRef } from 'react'; +import { motion, AnimatePresence } from 'framer-motion'; +import { + Cpu, + Play, + Pause, + Square, + Clock, + Zap, + Calendar, + CheckCircle2, + XCircle, + Loader2, + ChevronRight, + FileCode, + FileText, + Image as ImageIcon, + Database, + Download, + RefreshCw, + AlertCircle, + Timer, + DollarSign, + Layers, + Bot, + Sparkles, + ArrowRight, + Settings2, + Send, + Globe, + Code2, + BarChart3, + Mail, + MessageCircle, + Webhook, + HardDrive, + Menu, + ArrowLeft, + X, +} from 'lucide-react'; +import type { + ComputerTask, + ComputerTaskStatus, + DurationMode, + Artifact, + ComputerTaskEvent, +} from '@/lib/types'; +import { + executeComputerTask, + fetchComputerTasks, + cancelComputerTask, + streamComputerTask, + fetchComputerArtifacts, + downloadArtifact, +} from '@/lib/api'; + +const durationModes: { value: DurationMode; label: string; desc: string; icon: React.ElementType }[] = [ + { value: 'short', label: '30 мин', desc: 'Быстрые', icon: Zap }, + { value: 'medium', label: '4 часа', desc: 'Стандартные', icon: Clock }, + { value: 'long', label: '24 часа', desc: 'Комплексные', icon: Calendar }, + { value: 'extended', label: '7 дней', desc: 'Мониторинг', icon: Timer }, + { value: 'unlimited', label: '∞', desc: 'Без лимита', icon: Sparkles }, +]; + +const taskExamples = [ + { + icon: BarChart3, + title: 'Исследование конкурентов', + query: 'Проанализируй топ-5 конкурентов в сфере e-commerce в России', + color: 'text-blue-400', + bgColor: 'bg-blue-500/10', + }, + { + icon: Code2, + title: 'Разработка дашборда', + query: 'Создай дашборд для отслеживания курсов криптовалют', + color: 'text-emerald-400', + bgColor: 'bg-emerald-500/10', + }, + { + icon: Globe, + title: 'Мониторинг новостей', + query: 'Мониторь новости по теме AI в медицине каждые 6 часов', + color: 'text-purple-400', + bgColor: 'bg-purple-500/10', + }, + { + icon: FileText, + title: 'Генерация отчёта', + query: 'Исследуй рынок EdTech и создай PDF-отчёт с визуализациями', + color: 'text-orange-400', + bgColor: 'bg-orange-500/10', + }, +]; + +const statusConfig: Record = { + pending: { color: 'text-secondary', bg: 'bg-surface/60', icon: Clock, label: 'Ожидание' }, + planning: { color: 'text-blue-400', bg: 'bg-blue-400/10', icon: Bot, label: 'Планирование' }, + executing: { color: 'text-success', bg: 'bg-success/10', icon: Play, label: 'Выполнение' }, + long_running: { color: 'text-success', bg: 'bg-success/10', icon: Loader2, label: 'Долгая задача' }, + waiting_user: { color: 'text-warning', bg: 'bg-warning/10', icon: AlertCircle, label: 'Ожидает ввода' }, + completed: { color: 'text-success', bg: 'bg-success/10', icon: CheckCircle2, label: 'Завершено' }, + failed: { color: 'text-error', bg: 'bg-error/10', icon: XCircle, label: 'Ошибка' }, + cancelled: { color: 'text-muted', bg: 'bg-surface/40', icon: Square, label: 'Отменено' }, + scheduled: { color: 'text-accent-secondary', bg: 'bg-accent-secondary/10', icon: Calendar, label: 'Запланировано' }, + paused: { color: 'text-warning', bg: 'bg-warning/10', icon: Pause, label: 'Пауза' }, + checkpoint: { color: 'text-accent', bg: 'bg-accent/10', icon: RefreshCw, label: 'Чекпоинт' }, +}; + +const artifactIcons: Record = { + file: FileText, + code: FileCode, + report: FileText, + deployment: Globe, + image: ImageIcon, + data: Database, +}; + +export default function ComputerPage() { + const [tasks, setTasks] = useState([]); + const [selectedTask, setSelectedTask] = useState(null); + const [query, setQuery] = useState(''); + const [durationMode, setDurationMode] = useState('medium'); + const [isLoading, setIsLoading] = useState(false); + const [isExecuting, setIsExecuting] = useState(false); + const [showAdvanced, setShowAdvanced] = useState(false); + const [artifacts, setArtifacts] = useState([]); + const [events, setEvents] = useState([]); + const [showTaskList, setShowTaskList] = useState(false); + const textareaRef = useRef(null); + const eventsEndRef = useRef(null); + + const loadTasks = useCallback(async () => { + setIsLoading(true); + try { + const result = await fetchComputerTasks(undefined, 50); + setTasks(result.tasks || []); + } catch (error) { + console.error('Failed to load tasks:', error); + } finally { + setIsLoading(false); + } + }, []); + + useEffect(() => { + loadTasks(); + }, [loadTasks]); + + useEffect(() => { + eventsEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }, [events]); + + const handleExecute = async () => { + if (!query.trim()) return; + + setIsExecuting(true); + setEvents([]); + try { + const task = await executeComputerTask({ + query: query.trim(), + options: { + async: true, + durationMode, + enableSandbox: true, + enableBrowser: true, + }, + }); + + setSelectedTask(task); + setTasks((prev) => [task, ...prev]); + setQuery(''); + setShowTaskList(false); + + streamTaskEvents(task.id); + } catch (error) { + console.error('Failed to execute task:', error); + } finally { + setIsExecuting(false); + } + }; + + const streamTaskEvents = async (taskId: string) => { + try { + for await (const event of streamComputerTask(taskId)) { + setEvents((prev) => [...prev, event]); + + if (event.status) { + setSelectedTask((prev) => + prev?.id === taskId ? { ...prev, status: event.status!, progress: event.progress ?? prev.progress } : prev + ); + setTasks((prev) => + prev.map((t) => + t.id === taskId ? { ...t, status: event.status!, progress: event.progress ?? t.progress } : t + ) + ); + } + + if (event.type === 'task_completed' || event.type === 'task_failed') { + loadArtifacts(taskId); + } + } + } catch (error) { + console.error('Stream error:', error); + } + }; + + const loadArtifacts = async (taskId: string) => { + try { + const result = await fetchComputerArtifacts(taskId); + setArtifacts(result.artifacts || []); + } catch (error) { + console.error('Failed to load artifacts:', error); + } + }; + + const handleCancel = async (taskId: string) => { + try { + await cancelComputerTask(taskId); + setTasks((prev) => prev.map((t) => (t.id === taskId ? { ...t, status: 'cancelled' } : t))); + if (selectedTask?.id === taskId) { + setSelectedTask((prev) => (prev ? { ...prev, status: 'cancelled' } : null)); + } + } catch (error) { + console.error('Failed to cancel task:', error); + } + }; + + const handleDownload = async (artifact: Artifact) => { + try { + const blob = await downloadArtifact(artifact.id); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = artifact.name; + a.click(); + URL.revokeObjectURL(url); + } catch (error) { + console.error('Failed to download artifact:', error); + } + }; + + const handleExample = (exampleQuery: string) => { + setQuery(exampleQuery); + textareaRef.current?.focus(); + }; + + const handleSelectTask = (task: ComputerTask) => { + setSelectedTask(task); + setShowTaskList(false); + if (isTaskActive(task.status)) { + setEvents([]); + streamTaskEvents(task.id); + } else { + loadArtifacts(task.id); + } + }; + + const formatDuration = (ms: number): string => { + if (ms < 1000) return `${ms}мс`; + if (ms < 60000) return `${Math.round(ms / 1000)}с`; + if (ms < 3600000) return `${Math.round(ms / 60000)}м`; + return `${Math.round(ms / 3600000)}ч`; + }; + + const formatCost = (cost: number): string => { + return `$${cost.toFixed(4)}`; + }; + + const isTaskActive = (status: ComputerTaskStatus): boolean => { + return ['pending', 'planning', 'executing', 'long_running'].includes(status); + }; + + return ( +
+ {/* Mobile Task List Overlay */} + + {showTaskList && ( + <> + setShowTaskList(false)} + className="fixed inset-0 z-40 bg-base/80 backdrop-blur-sm md:hidden" + /> + +
+

+ + Задачи +

+ +
+ +
+ + )} +
+ + {/* Desktop Left Panel - Task List */} +
+
+

+ + Computer +

+

Автономные AI-задачи

+
+ +
+ + {/* Main Content */} +
+ + {selectedTask ? ( + + {/* Task Header */} +
+
+ +
+

{selectedTask.query}

+
+ {(() => { + const config = statusConfig[selectedTask.status]; + const StatusIcon = config.icon; + return ( + + + {config.label} + + ); + })()} + + + {formatDuration(selectedTask.totalRuntime)} + + + + {formatCost(selectedTask.totalCost)} + +
+
+ {isTaskActive(selectedTask.status) && ( + + )} +
+ + {/* Progress Bar */} + {isTaskActive(selectedTask.status) && ( +
+
+ +
+
+ Прогресс + {selectedTask.progress}% +
+
+ )} +
+ + {/* Task Content */} +
+ {/* Sub Tasks */} + {selectedTask.subTasks && selectedTask.subTasks.length > 0 && ( +
+

+ + Подзадачи ({selectedTask.subTasks.length}) +

+
+ {selectedTask.subTasks.map((subtask) => { + const config = statusConfig[subtask.status]; + const StatusIcon = config.icon; + return ( +
+
+ +
+

{subtask.description}

+
+ {subtask.type} + {subtask.cost > 0 && ( + {formatCost(subtask.cost)} + )} +
+
+
+
+ ); + })} +
+
+ )} + + {/* Events Log */} + {events.length > 0 && ( +
+

+ + События +

+
+ {events.map((event, i) => ( +
+ + {new Date(event.timestamp).toLocaleTimeString()} + + {event.message || event.type} +
+ ))} +
+
+
+ )} + + {/* Artifacts */} + {artifacts.length > 0 && ( +
+

+ + Артефакты ({artifacts.length}) +

+
+ {artifacts.map((artifact) => { + const Icon = artifactIcons[artifact.type] || FileText; + return ( +
+
+
+ +
+
+

{artifact.name}

+
+ {artifact.type} + + {(artifact.size / 1024).toFixed(1)} KB + +
+
+ +
+
+ ); + })} +
+
+ )} +
+ + ) : ( + +
+ {/* Mobile task list button */} +
+ +
+ + {/* Header */} +
+

+ GooSeek Computer +

+

+ Автономный AI-агент для сложных задач: исследования, код, мониторинг. +

+
+ + {/* Task Input */} +
+
+