From a0e3748dde3a8f0f6b4861ec7294152fcd499c73 Mon Sep 17 00:00:00 2001 From: home Date: Sat, 28 Feb 2026 01:33:49 +0300 Subject: [PATCH] feat: auth service + security audit fixes + cleanup legacy services Major changes: - Add auth-svc: JWT auth, register/login/refresh, password reset - Add auth UI: modals, pages (/login, /register, /forgot-password) - Add usage tracking (usage_metrics table, daily limits) - Add tiered rate limiting (free/pro/business) - Add LLM usage limits per tier Security fixes: - All repos now require userID for Update/Delete operations - JWT middleware in chat-svc, llm-svc, agent-svc, discover-svc - ErrNotFound/ErrForbidden errors for proper access control Cleanup: - Remove legacy TypeScript services/ directory - Remove computer-svc (to be reimplemented) - Remove old deploy/docker configs New files: - backend/cmd/auth-svc/main.go - backend/internal/auth/{types,repository}.go - backend/internal/usage/{types,repository}.go - backend/pkg/middleware/{llm_limits,ratelimit_tiered}.go - backend/webui/src/components/auth/* - backend/webui/src/app/(auth)/* Made-with: Cursor --- CONTINUE.md | 930 +----- TEST_RESULTS.md | 280 -- api-1.json | 2896 ----------------- backend/.env.computer-svc | 65 - backend/Makefile | 2 +- backend/cmd/admin-svc/main.go | 440 +++ backend/cmd/agent-svc/main.go | 13 +- backend/cmd/api-gateway/main.go | 60 +- backend/cmd/auth-svc/main.go | 460 +++ backend/cmd/browser-svc/main.go | 53 - backend/cmd/chat-svc/main.go | 9 +- backend/cmd/collection-svc/main.go | 31 +- backend/cmd/computer-svc/main.go | 552 ---- backend/cmd/discover-svc/main.go | 120 +- backend/cmd/file-svc/main.go | 9 +- backend/cmd/llm-svc/main.go | 52 +- backend/cmd/thread-svc/main.go | 88 +- backend/deploy/docker/Dockerfile.all | 10 +- backend/deploy/docker/Dockerfile.computer-svc | 32 - backend/deploy/docker/docker-compose.yml | 125 +- backend/deploy/k8s/computer-svc.yaml | 137 - backend/deploy/k8s/kustomization.yaml | 1 - backend/go.mod | 7 +- backend/go.sum | 6 +- backend/internal/admin/migrations.go | 153 + backend/internal/admin/repositories.go | 984 ++++++ backend/internal/admin/types.go | 256 ++ backend/internal/auth/repository.go | 455 +++ backend/internal/auth/types.go | 118 + backend/internal/computer/browser/browser.go | 587 ---- backend/internal/computer/browser/server.go | 555 ---- backend/internal/computer/computer.go | 763 ----- .../internal/computer/connectors/connector.go | 104 - backend/internal/computer/connectors/email.go | 215 -- .../internal/computer/connectors/storage.go | 432 --- .../internal/computer/connectors/telegram.go | 263 -- .../internal/computer/connectors/webhook.go | 275 -- backend/internal/computer/executor.go | 574 ---- backend/internal/computer/memory.go | 377 --- backend/internal/computer/planner.go | 371 --- backend/internal/computer/router.go | 244 -- backend/internal/computer/sandbox.go | 431 --- backend/internal/computer/scheduler.go | 386 --- backend/internal/computer/types.go | 376 --- backend/internal/db/collection_repo.go | 67 +- backend/internal/db/computer_artifact_repo.go | 322 -- backend/internal/db/computer_memory_repo.go | 306 -- backend/internal/db/computer_task_repo.go | 411 --- backend/internal/db/file_repo.go | 36 +- backend/internal/db/memory_repo.go | 30 +- backend/internal/db/page_repo.go | 47 +- backend/internal/db/space_repo.go | 30 +- backend/internal/db/thread_repo.go | 99 +- backend/internal/usage/repository.go | 177 + backend/internal/usage/types.go | 56 + backend/pkg/config/config.go | 21 + backend/pkg/middleware/llm_limits.go | 106 + backend/pkg/middleware/ratelimit_tiered.go | 158 + backend/pkg/storage/minio.go | 228 ++ backend/webui/.env.example | 9 + backend/webui/Dockerfile | 4 + .../src/app/(auth)/forgot-password/page.tsx | 33 + backend/webui/src/app/(auth)/layout.tsx | 5 + backend/webui/src/app/(auth)/login/page.tsx | 62 + .../webui/src/app/(auth)/register/page.tsx | 58 + .../src/app/(auth)/reset-password/page.tsx | 208 ++ .../webui/src/app/(main)/admin/audit/page.tsx | 225 ++ .../src/app/(main)/admin/discover/page.tsx | 513 +++ backend/webui/src/app/(main)/admin/layout.tsx | 125 + backend/webui/src/app/(main)/admin/page.tsx | 180 + .../webui/src/app/(main)/admin/posts/page.tsx | 415 +++ .../src/app/(main)/admin/settings/page.tsx | 424 +++ .../webui/src/app/(main)/admin/users/page.tsx | 386 +++ .../webui/src/app/(main)/computer/page.tsx | 752 ----- .../webui/src/app/(main)/discover/page.tsx | 182 +- backend/webui/src/app/(main)/layout.tsx | 2 +- .../webui/src/app/(main)/medicine/page.tsx | 298 ++ backend/webui/src/app/(main)/page.tsx | 18 +- .../webui/src/app/(main)/settings/page.tsx | 43 +- backend/webui/src/app/(main)/travel/page.tsx | 316 ++ backend/webui/src/app/globals.css | 179 + backend/webui/src/app/layout.tsx | 5 +- backend/webui/src/app/providers.tsx | 17 + backend/webui/src/components/ChatInput.tsx | 172 +- backend/webui/src/components/Sidebar.tsx | 105 +- .../webui/src/components/auth/AuthModal.tsx | 72 + .../components/auth/ForgotPasswordForm.tsx | 132 + .../webui/src/components/auth/LoginForm.tsx | 148 + .../src/components/auth/RegisterForm.tsx | 252 ++ .../webui/src/components/auth/UserMenu.tsx | 140 + backend/webui/src/components/auth/index.ts | 5 + backend/webui/src/components/index.ts | 1 + .../settings/ConnectorsSettings.tsx | 695 ++++ backend/webui/src/lib/api.ts | 476 ++- backend/webui/src/lib/auth.ts | 276 ++ backend/webui/src/lib/config/menu.ts | 56 + .../webui/src/lib/contexts/AuthContext.tsx | 141 + .../src/lib/contexts/LanguageContext.tsx | 61 + backend/webui/src/lib/hooks/useChat.ts | 38 +- backend/webui/src/lib/types.ts | 325 +- deploy/docker/BUILD.md | 34 - deploy/docker/Caddyfile | 38 - deploy/docker/Dockerfile.caddy | 7 - deploy/docker/docker-compose.yml | 724 ----- deploy/docker/run.sh | 171 - deploy/docker/searxng/limiter.toml | 18 - deploy/docker/searxng/settings.yml | 107 - deploy/docker/vendor.sh | 99 - services/api-gateway/Dockerfile | 19 - services/api-gateway/package.json | 22 - services/api-gateway/src/index.ts | 175 - services/api-gateway/tsconfig.json | 15 - services/audit-svc/Dockerfile | 18 - services/audit-svc/package.json | 20 - services/audit-svc/src/index.ts | 43 - services/audit-svc/tsconfig.json | 13 - services/auth-svc/Dockerfile | 26 - services/auth-svc/data/auth.db | Bin 135168 -> 0 bytes services/auth-svc/package.json | 33 - services/auth-svc/src/index.ts | 63 - services/auth-svc/src/lib/auth.ts | 103 - services/auth-svc/src/lib/db.ts | 15 - services/auth-svc/tsconfig.json | 13 - services/billing-svc/Dockerfile | 20 - services/billing-svc/drizzle/0000_init.sql | 36 - services/billing-svc/package.json | 24 - services/billing-svc/src/db/index.ts | 8 - services/billing-svc/src/db/push.ts | 18 - services/billing-svc/src/db/schema.ts | 35 - services/billing-svc/src/index.ts | 321 -- services/billing-svc/src/plans.ts | 56 - services/billing-svc/tsconfig.json | 16 - services/cache-worker/Dockerfile | 18 - services/cache-worker/package.json | 23 - .../cache-worker/src/lib/crawl4ai-client.ts | 154 - services/cache-worker/src/run.ts | 201 -- services/cache-worker/src/tasks/discover.ts | 519 --- services/cache-worker/src/tasks/finance.ts | 55 - services/cache-worker/src/tasks/travel.ts | 103 - services/cache-worker/tsconfig.json | 13 - services/chat-svc/Dockerfile | 20 - services/chat-svc/data/config.json | 21 - services/chat-svc/package.json | 39 - services/chat-svc/src/index.ts | 423 --- .../chat-svc/src/lib/config/clientRegistry.ts | 29 - services/chat-svc/src/lib/config/index.ts | 535 --- .../src/lib/config/providersLoader.ts | 6 - .../chat-svc/src/lib/config/serverRegistry.ts | 17 - services/chat-svc/src/lib/config/types.ts | 109 - services/chat-svc/src/lib/embedding-client.ts | 43 - services/chat-svc/src/lib/serverUtils.ts | 7 - services/chat-svc/src/lib/types.ts | 127 - services/chat-svc/src/lib/uploads/manager.ts | 221 -- services/chat-svc/src/lib/utils/files.ts | 17 - services/chat-svc/src/lib/utils/splitText.ts | 74 - services/chat-svc/tsconfig.json | 15 - services/create-svc/Dockerfile | 18 - services/create-svc/package.json | 22 - services/create-svc/src/index.ts | 399 --- services/create-svc/tsconfig.json | 13 - services/discover-svc/Dockerfile | 20 - services/discover-svc/package.json | 25 - services/discover-svc/src/discover-db.ts | 276 -- services/discover-svc/src/index.ts | 926 ------ services/discover-svc/src/searxng.ts | 133 - services/discover-svc/tsconfig.json | 16 - services/geo-device-svc/.gitignore | 5 - services/geo-device-svc/Dockerfile | 18 - services/geo-device-svc/README.md | 57 - services/geo-device-svc/package.json | 26 - services/geo-device-svc/src/index.ts | 21 - services/geo-device-svc/src/lib/context.ts | 69 - services/geo-device-svc/src/lib/device.ts | 35 - services/geo-device-svc/src/lib/geo.ts | 22 - services/geo-device-svc/src/routes/context.ts | 54 - services/geo-device-svc/src/types.ts | 56 - services/geo-device-svc/tsconfig.json | 13 - services/library-svc/Dockerfile | 19 - services/library-svc/drizzle/0000_init.sql | 8 - .../drizzle/0001_thread_messages.sql | 13 - .../library-svc/drizzle/0002_project_id.sql | 2 - services/library-svc/package.json | 24 - services/library-svc/src/db/index.ts | 7 - services/library-svc/src/db/push.ts | 21 - services/library-svc/src/db/schema.ts | 24 - services/library-svc/src/index.ts | 367 --- services/library-svc/tsconfig.json | 13 - services/llm-svc/Dockerfile | 20 - services/llm-svc/data/llm-providers.json | 25 - services/llm-svc/package.json | 28 - services/llm-svc/src/index.ts | 466 --- .../llm-svc/src/lib/config/ProvidersConfig.ts | 227 -- services/llm-svc/src/lib/config/index.ts | 2 - .../llm-svc/src/lib/config/serverRegistry.ts | 15 - services/llm-svc/src/lib/config/types.ts | 28 - .../llm-svc/src/lib/models/base/embedding.ts | 8 - services/llm-svc/src/lib/models/base/llm.ts | 19 - .../llm-svc/src/lib/models/base/provider.ts | 44 - .../providers/anthropic/anthropicLLM.ts | 5 - .../lib/models/providers/anthropic/index.ts | 115 - .../providers/gemini/geminiEmbedding.ts | 5 - .../lib/models/providers/gemini/geminiLLM.ts | 5 - .../src/lib/models/providers/gemini/index.ts | 144 - .../src/lib/models/providers/groq/groqLLM.ts | 5 - .../src/lib/models/providers/groq/index.ts | 113 - .../llm-svc/src/lib/models/providers/index.ts | 20 - .../lib/models/providers/lemonade/index.ts | 153 - .../providers/lemonade/lemonadeEmbedding.ts | 5 - .../models/providers/lemonade/lemonadeLLM.ts | 5 - .../lib/models/providers/lmstudio/index.ts | 143 - .../providers/lmstudio/lmstudioEmbedding.ts | 5 - .../models/providers/lmstudio/lmstudioLLM.ts | 5 - .../src/lib/models/providers/ollama/index.ts | 133 - .../providers/ollama/ollamaEmbedding.ts | 37 - .../lib/models/providers/ollama/ollamaLLM.ts | 251 -- .../src/lib/models/providers/openai/index.ts | 226 -- .../providers/openai/openaiEmbedding.ts | 42 - .../lib/models/providers/openai/openaiLLM.ts | 275 -- .../src/lib/models/providers/timeweb/index.ts | 165 - .../models/providers/timeweb/timewebLLM.ts | 245 -- services/llm-svc/src/lib/models/registry.ts | 224 -- services/llm-svc/src/lib/models/types.ts | 86 - services/llm-svc/src/lib/serverUtils.ts | 6 - services/llm-svc/src/lib/types.ts | 39 - services/llm-svc/tsconfig.json | 15 - services/localization-svc/Dockerfile | 18 - services/localization-svc/package.json | 22 - services/localization-svc/src/index.ts | 27 - .../src/lib/countryToLocale.ts | 80 - .../localization-svc/src/lib/geoClient.ts | 31 - .../localization-svc/src/lib/resolveLocale.ts | 40 - .../localization-svc/src/routes/locale.ts | 41 - .../src/routes/translations.ts | 34 - .../src/translations/index.ts | 175 - services/localization-svc/src/types.ts | 34 - services/localization-svc/tsconfig.json | 13 - services/master-agents-svc/Dockerfile | 18 - services/master-agents-svc/package.json | 34 - services/master-agents-svc/src/index.ts | 200 -- .../src/lib/actions/__reasoning_preamble.ts | 28 - .../src/lib/actions/academic_search.ts | 93 - .../master-agents-svc/src/lib/actions/done.ts | 16 - .../src/lib/actions/registry.ts | 57 - .../src/lib/actions/scrape_url.ts | 163 - .../src/lib/actions/social_search.ts | 93 - .../src/lib/actions/types.ts | 58 - .../src/lib/actions/web_search.ts | 186 -- .../src/lib/agent/classifier.ts | 40 - .../src/lib/agent/fastClassifier.ts | 109 - .../master-agents-svc/src/lib/agent/master.ts | 106 - .../src/lib/agent/researcher.ts | 308 -- .../src/lib/agent/searchOrchestrator.ts | 624 ---- services/master-agents-svc/src/lib/config.ts | 36 - .../src/lib/content-classifier.ts | 515 --- .../src/lib/crawl4ai-client.ts | 292 -- .../src/lib/embedding-client.ts | 37 - .../master-agents-svc/src/lib/llm-client.ts | 100 - .../master-agents-svc/src/lib/media-search.ts | 176 - .../src/lib/models/base/llm.ts | 28 - .../src/lib/models/ollama.ts | 74 - .../src/lib/models/openai.ts | 90 - .../src/lib/models/registry.ts | 61 - .../src/lib/prompts/classifier.ts | 64 - .../src/lib/prompts/detectLanguage.ts | 29 - .../src/lib/prompts/locale.ts | 17 - .../src/lib/prompts/master.ts | 22 - .../src/lib/prompts/researcher.ts | 173 - .../src/lib/prompts/writer.ts | 115 - .../src/lib/proxy-manager.ts | 324 -- .../master-agents-svc/src/lib/reranker.ts | 314 -- services/master-agents-svc/src/lib/searxng.ts | 112 - services/master-agents-svc/src/lib/session.ts | 68 - .../src/lib/stealth-crawler.ts | 394 --- .../src/lib/tools/calculator.ts | 22 - .../src/lib/tools/finance.ts | 26 - .../master-agents-svc/src/lib/tools/media.ts | 36 - .../src/lib/tools/registry.ts | 30 - .../master-agents-svc/src/lib/tools/scrape.ts | 79 - .../master-agents-svc/src/lib/tools/search.ts | 51 - .../master-agents-svc/src/lib/tools/types.ts | 13 - services/master-agents-svc/src/lib/types.ts | 47 - .../src/lib/types/widgets.ts | 414 --- .../src/lib/utils/formatHistory.ts | 7 - .../src/lib/widgets/calculationWidget.ts | 49 - .../src/lib/widgets/executor.ts | 26 - .../src/lib/widgets/index.ts | 10 - .../src/lib/widgets/stockWidget.ts | 103 - .../src/lib/widgets/types.ts | 20 - .../src/lib/widgets/weatherWidget.ts | 99 - services/master-agents-svc/src/turndown.d.ts | 5 - services/master-agents-svc/tsconfig.json | 13 - services/media-svc/DEPRECATED.md | 24 - services/media-svc/package.json | 26 - services/media-svc/src/index.ts | 85 - services/media-svc/src/lib/agents/image.ts | 38 - services/media-svc/src/lib/agents/video.ts | 47 - services/media-svc/src/lib/config.ts | 44 - services/media-svc/src/lib/models/base/llm.ts | 11 - services/media-svc/src/lib/models/ollama.ts | 48 - services/media-svc/src/lib/models/openai.ts | 49 - services/media-svc/src/lib/models/registry.ts | 65 - services/media-svc/src/lib/prompts/image.ts | 29 - services/media-svc/src/lib/prompts/video.ts | 29 - services/media-svc/src/lib/search.ts | 58 - services/media-svc/src/lib/types.ts | 6 - .../media-svc/src/lib/utils/formatHistory.ts | 7 - services/media-svc/tsconfig.json | 15 - services/memory-svc/Dockerfile | 20 - services/memory-svc/drizzle/0000_init.sql | 12 - services/memory-svc/package.json | 23 - services/memory-svc/src/db/index.ts | 8 - services/memory-svc/src/db/push.ts | 18 - services/memory-svc/src/db/schema.ts | 11 - services/memory-svc/src/index.ts | 138 - services/memory-svc/tsconfig.json | 13 - services/notifications-svc/Dockerfile | 21 - .../notifications-svc/drizzle/0000_init.sql | 30 - services/notifications-svc/package.json | 27 - services/notifications-svc/src/db/index.ts | 8 - services/notifications-svc/src/db/push.ts | 18 - services/notifications-svc/src/db/schema.ts | 28 - services/notifications-svc/src/index.ts | 304 -- services/notifications-svc/src/lib/email.ts | 63 - services/notifications-svc/tsconfig.json | 14 - services/package.json | 6 - services/profile-svc/Dockerfile | 21 - services/profile-svc/drizzle/0000_init.sql | 12 - services/profile-svc/package.json | 23 - services/profile-svc/src/db/index.ts | 8 - services/profile-svc/src/db/push.ts | 18 - services/profile-svc/src/db/schema.ts | 25 - services/profile-svc/src/index.ts | 196 -- services/profile-svc/tsconfig.json | 13 - services/projects-svc/Dockerfile | 18 - services/projects-svc/package.json | 22 - services/projects-svc/src/index.ts | 212 -- services/projects-svc/tsconfig.json | 13 - services/search-svc/Dockerfile | 18 - services/search-svc/package.json | 22 - services/search-svc/src/index.ts | 170 - services/search-svc/tsconfig.json | 15 - services/suggestions-svc/package.json | 26 - services/suggestions-svc/src/index.ts | 57 - .../src/lib/agents/suggestions.ts | 33 - services/suggestions-svc/src/lib/config.ts | 37 - .../src/lib/models/base/llm.ts | 11 - .../suggestions-svc/src/lib/models/ollama.ts | 45 - .../suggestions-svc/src/lib/models/openai.ts | 49 - .../src/lib/models/registry.ts | 65 - .../src/lib/prompts/detectLanguage.ts | 29 - .../suggestions-svc/src/lib/prompts/locale.ts | 17 - .../src/lib/prompts/suggestions.ts | 17 - services/suggestions-svc/src/lib/types.ts | 6 - .../src/lib/utils/formatHistory.ts | 7 - services/suggestions-svc/tsconfig.json | 13 - services/web-svc/.eslintrc.json | 3 - services/web-svc/Dockerfile | 27 - services/web-svc/data/.gitignore | 2 - services/web-svc/next-env.d.ts | 6 - services/web-svc/next.config.mjs | 76 - services/web-svc/package.json | 75 - services/web-svc/postcss.config.js | 6 - services/web-svc/public/fonts/pp-ed-ul.otf | Bin 57792 -> 0 bytes services/web-svc/public/icon-100.png | Bin 916 -> 0 bytes services/web-svc/public/icon-50.png | Bin 515 -> 0 bytes services/web-svc/public/icon.png | Bin 30498 -> 0 bytes services/web-svc/public/logo.svg | 11 - services/web-svc/public/next.svg | 1 - services/web-svc/public/screenshots/p1.png | Bin 187470 -> 0 bytes .../web-svc/public/screenshots/p1_small.png | Bin 133129 -> 0 bytes services/web-svc/public/screenshots/p2.png | Bin 642123 -> 0 bytes .../web-svc/public/screenshots/p2_small.png | Bin 206329 -> 0 bytes services/web-svc/public/vercel.svg | 1 - .../web-svc/public/weather-ico/clear-day.svg | 131 - .../public/weather-ico/clear-night.svg | 159 - .../public/weather-ico/cloudy-1-day.svg | 178 - .../public/weather-ico/cloudy-1-night.svg | 206 -- .../web-svc/public/weather-ico/fog-day.svg | 244 -- .../web-svc/public/weather-ico/fog-night.svg | 309 -- .../web-svc/public/weather-ico/frost-day.svg | 204 -- .../public/weather-ico/frost-night.svg | 269 -- .../public/weather-ico/rain-and-sleet-mix.svg | 141 - .../public/weather-ico/rainy-1-day.svg | 179 - .../public/weather-ico/rainy-1-night.svg | 243 -- .../public/weather-ico/rainy-2-day.svg | 204 -- .../public/weather-ico/rainy-2-night.svg | 256 -- .../public/weather-ico/rainy-3-day.svg | 206 -- .../public/weather-ico/rainy-3-night.svg | 270 -- .../scattered-thunderstorms-day.svg | 374 --- .../scattered-thunderstorms-night.svg | 283 -- .../weather-ico/severe-thunderstorm.svg | 307 -- .../public/weather-ico/snowy-1-day.svg | 241 -- .../public/weather-ico/snowy-1-night.svg | 269 -- .../public/weather-ico/snowy-2-day.svg | 273 -- .../public/weather-ico/snowy-2-night.svg | 301 -- .../public/weather-ico/snowy-3-day.svg | 334 -- .../public/weather-ico/snowy-3-night.svg | 361 -- services/web-svc/src/app/api/chat/route.ts | 85 - services/web-svc/src/app/api/health/route.ts | 11 - services/web-svc/src/app/api/metrics/route.ts | 17 - services/web-svc/src/app/api/ready/route.ts | 12 - .../src/app/api/reconnect/[id]/route.ts | 94 - services/web-svc/src/app/api/uploads/route.ts | 34 - services/web-svc/src/app/c/[chatId]/page.tsx | 5 - .../web-svc/src/app/collections/[id]/page.tsx | 80 - services/web-svc/src/app/discover/page.tsx | 680 ---- .../web-svc/src/app/finance/[ticker]/page.tsx | 243 -- services/web-svc/src/app/finance/page.tsx | 386 --- .../src/app/finance/predictions/[id]/page.tsx | 103 - services/web-svc/src/app/globals.css | 98 - services/web-svc/src/app/icon.svg | 4 - services/web-svc/src/app/layout.tsx | 96 - services/web-svc/src/app/library/layout.tsx | 12 - services/web-svc/src/app/library/page.tsx | 309 -- services/web-svc/src/app/manifest.ts | 54 - services/web-svc/src/app/offline/page.tsx | 29 - services/web-svc/src/app/page.tsx | 14 - services/web-svc/src/app/profile/page.tsx | 726 ----- services/web-svc/src/app/sign-in/page.tsx | 123 - services/web-svc/src/app/sign-up/page.tsx | 146 - .../app/spaces/[projectId]/SpaceCarousels.tsx | 47 - .../spaces/[projectId]/chat/[chatId]/page.tsx | 33 - .../src/app/spaces/[projectId]/page.tsx | 583 ---- services/web-svc/src/app/spaces/page.tsx | 202 -- .../web-svc/src/app/spaces/templates/page.tsx | 93 - services/web-svc/src/app/travel/page.tsx | 184 -- .../web-svc/src/components/AssistantSteps.tsx | 359 -- services/web-svc/src/components/Chat.tsx | 112 - .../web-svc/src/components/ChatWindow.tsx | 90 - .../web-svc/src/components/ClientOnly.tsx | 25 - .../web-svc/src/components/DataFetchError.tsx | 30 - .../web-svc/src/components/DeleteChat.tsx | 138 - .../src/components/Discover/MajorNewsCard.tsx | 87 - .../src/components/Discover/SmallNewsCard.tsx | 84 - services/web-svc/src/components/EmptyChat.tsx | 77 - .../src/components/EmptyChatMessageInput.tsx | 90 - .../web-svc/src/components/GuestMigration.tsx | 48 - .../src/components/GuestWarningBanner.tsx | 51 - services/web-svc/src/components/Layout.tsx | 9 - .../src/components/MessageActions/Copy.tsx | 51 - .../src/components/MessageActions/Rewrite.tsx | 20 - .../web-svc/src/components/MessageBox.tsx | 359 -- .../src/components/MessageBoxLoading.tsx | 11 - .../web-svc/src/components/MessageInput.tsx | 104 - .../MessageInputActions/AnswerMode.tsx | 128 - .../components/MessageInputActions/Attach.tsx | 218 -- .../MessageInputActions/AttachSmall.tsx | 186 -- .../MessageInputActions/InputBarPlus.tsx | 169 - .../components/MessageRenderer/Citation.tsx | 161 - .../CodeBlock/CodeBlockDarkTheme.ts | 102 - .../CodeBlock/CodeBlockLightTheme.ts | 102 - .../MessageRenderer/CodeBlock/index.tsx | 64 - .../web-svc/src/components/MessageSources.tsx | 175 - services/web-svc/src/components/Navbar.tsx | 403 --- .../src/components/NewsArticleWidget.tsx | 80 - .../src/components/RelatedQuestions.tsx | 194 -- .../web-svc/src/components/SearchImages.tsx | 165 - .../web-svc/src/components/SearchVideos.tsx | 240 -- .../Sections/Models/AddModelDialog.tsx | 159 - .../Sections/Models/AddProviderDialog.tsx | 212 -- .../Sections/Models/DeleteProviderDialog.tsx | 115 - .../Sections/Models/ModelProvider.tsx | 224 -- .../Settings/Sections/Models/ModelSelect.tsx | 98 - .../Settings/Sections/Models/Section.tsx | 91 - .../Sections/Models/UpdateProviderDialog.tsx | 184 -- .../Settings/Sections/Personalization.tsx | 32 - .../Settings/Sections/Preferences.tsx | 32 - .../components/Settings/Sections/Search.tsx | 32 - .../components/Settings/SettingsButton.tsx | 29 - .../Settings/SettingsButtonMobile.tsx | 26 - .../components/Settings/SettingsDialogue.tsx | 256 -- .../src/components/Settings/SettingsField.tsx | 410 --- services/web-svc/src/components/Sidebar.tsx | 806 ----- .../components/Sidebar/MenuSettingsPanel.tsx | 220 -- .../web-svc/src/components/SourcesPanel.tsx | 264 -- services/web-svc/src/components/ThinkBox.tsx | 51 - .../web-svc/src/components/TravelStepper.tsx | 445 --- .../web-svc/src/components/UnregisterSW.tsx | 19 - .../web-svc/src/components/WeatherWidget.tsx | 238 -- .../src/components/Widgets/Calculation.tsx | 46 - .../src/components/Widgets/CardGallery.tsx | 203 -- .../components/Widgets/InlineImageGallery.tsx | 266 -- .../src/components/Widgets/KnowledgeCard.tsx | 464 --- .../src/components/Widgets/ProductCard.tsx | 231 -- .../src/components/Widgets/ProfileCard.tsx | 216 -- .../src/components/Widgets/PromoCard.tsx | 245 -- .../src/components/Widgets/Renderer.tsx | 246 -- .../web-svc/src/components/Widgets/Stock.tsx | 517 --- .../src/components/Widgets/UnifiedCard.tsx | 229 -- .../src/components/Widgets/VideoCard.tsx | 234 -- .../src/components/Widgets/VideoEmbed.tsx | 250 -- .../src/components/Widgets/Weather.tsx | 422 --- .../web-svc/src/components/theme/Provider.tsx | 16 - .../web-svc/src/components/theme/Switcher.tsx | 60 - services/web-svc/src/components/ui/Loader.tsx | 22 - services/web-svc/src/components/ui/Select.tsx | 46 - services/web-svc/src/instrumentation.ts | 6 - services/web-svc/src/lib/actions.ts | 26 - services/web-svc/src/lib/auth-client.ts | 36 - .../web-svc/src/lib/config/clientRegistry.ts | 29 - .../web-svc/src/lib/config/sidebarMenu.ts | 87 - services/web-svc/src/lib/config/types.ts | 109 - services/web-svc/src/lib/geoDevice.ts | 131 - services/web-svc/src/lib/guest-migration.ts | 87 - services/web-svc/src/lib/guest-storage.ts | 167 - services/web-svc/src/lib/hooks/useChat.tsx | 1098 ------- services/web-svc/src/lib/localization.ts | 98 - .../web-svc/src/lib/localization/context.tsx | 229 -- .../src/lib/localization/countryToLocale.ts | 62 - .../lib/localization/embeddedTranslations.ts | 640 ---- services/web-svc/src/lib/project-files-db.ts | 103 - services/web-svc/src/lib/project-storage.ts | 79 - services/web-svc/src/lib/serverUtils.ts | 7 - services/web-svc/src/lib/session.ts | 105 - services/web-svc/src/lib/types-ui.ts | 15 - services/web-svc/src/lib/types.ts | 148 - services/web-svc/src/lib/utils.ts | 27 - services/web-svc/tailwind.config.ts | 57 - services/web-svc/tsconfig.json | 28 - services/web-svc/tsconfig.tsbuildinfo | 1 - test-chat-proper.sh | 70 - test-chat-via-webui.sh | 58 - test-chat.sh | 42 - 523 files changed, 10776 insertions(+), 59630 deletions(-) delete mode 100644 TEST_RESULTS.md delete mode 100644 api-1.json delete mode 100644 backend/.env.computer-svc create mode 100644 backend/cmd/admin-svc/main.go create mode 100644 backend/cmd/auth-svc/main.go delete mode 100644 backend/cmd/browser-svc/main.go delete mode 100644 backend/cmd/computer-svc/main.go delete mode 100644 backend/deploy/docker/Dockerfile.computer-svc delete mode 100644 backend/deploy/k8s/computer-svc.yaml create mode 100644 backend/internal/admin/migrations.go create mode 100644 backend/internal/admin/repositories.go create mode 100644 backend/internal/admin/types.go create mode 100644 backend/internal/auth/repository.go create mode 100644 backend/internal/auth/types.go delete mode 100644 backend/internal/computer/browser/browser.go delete mode 100644 backend/internal/computer/browser/server.go delete mode 100644 backend/internal/computer/computer.go delete mode 100644 backend/internal/computer/connectors/connector.go delete mode 100644 backend/internal/computer/connectors/email.go delete mode 100644 backend/internal/computer/connectors/storage.go delete mode 100644 backend/internal/computer/connectors/telegram.go delete mode 100644 backend/internal/computer/connectors/webhook.go delete mode 100644 backend/internal/computer/executor.go delete mode 100644 backend/internal/computer/memory.go delete mode 100644 backend/internal/computer/planner.go delete mode 100644 backend/internal/computer/router.go delete mode 100644 backend/internal/computer/sandbox.go delete mode 100644 backend/internal/computer/scheduler.go delete mode 100644 backend/internal/computer/types.go delete mode 100644 backend/internal/db/computer_artifact_repo.go delete mode 100644 backend/internal/db/computer_memory_repo.go delete mode 100644 backend/internal/db/computer_task_repo.go create mode 100644 backend/internal/usage/repository.go create mode 100644 backend/internal/usage/types.go create mode 100644 backend/pkg/middleware/llm_limits.go create mode 100644 backend/pkg/middleware/ratelimit_tiered.go create mode 100644 backend/pkg/storage/minio.go create mode 100644 backend/webui/src/app/(auth)/forgot-password/page.tsx create mode 100644 backend/webui/src/app/(auth)/layout.tsx create mode 100644 backend/webui/src/app/(auth)/login/page.tsx create mode 100644 backend/webui/src/app/(auth)/register/page.tsx create mode 100644 backend/webui/src/app/(auth)/reset-password/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/audit/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/discover/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/layout.tsx create mode 100644 backend/webui/src/app/(main)/admin/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/posts/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/settings/page.tsx create mode 100644 backend/webui/src/app/(main)/admin/users/page.tsx delete mode 100644 backend/webui/src/app/(main)/computer/page.tsx create mode 100644 backend/webui/src/app/(main)/medicine/page.tsx create mode 100644 backend/webui/src/app/(main)/travel/page.tsx create mode 100644 backend/webui/src/app/providers.tsx create mode 100644 backend/webui/src/components/auth/AuthModal.tsx create mode 100644 backend/webui/src/components/auth/ForgotPasswordForm.tsx create mode 100644 backend/webui/src/components/auth/LoginForm.tsx create mode 100644 backend/webui/src/components/auth/RegisterForm.tsx create mode 100644 backend/webui/src/components/auth/UserMenu.tsx create mode 100644 backend/webui/src/components/auth/index.ts create mode 100644 backend/webui/src/components/settings/ConnectorsSettings.tsx create mode 100644 backend/webui/src/lib/auth.ts create mode 100644 backend/webui/src/lib/config/menu.ts create mode 100644 backend/webui/src/lib/contexts/AuthContext.tsx create mode 100644 backend/webui/src/lib/contexts/LanguageContext.tsx delete mode 100644 deploy/docker/BUILD.md delete mode 100644 deploy/docker/Caddyfile delete mode 100644 deploy/docker/Dockerfile.caddy delete mode 100644 deploy/docker/docker-compose.yml delete mode 100755 deploy/docker/run.sh delete mode 100644 deploy/docker/searxng/limiter.toml delete mode 100644 deploy/docker/searxng/settings.yml delete mode 100755 deploy/docker/vendor.sh delete mode 100644 services/api-gateway/Dockerfile delete mode 100644 services/api-gateway/package.json delete mode 100644 services/api-gateway/src/index.ts delete mode 100644 services/api-gateway/tsconfig.json delete mode 100644 services/audit-svc/Dockerfile delete mode 100644 services/audit-svc/package.json delete mode 100644 services/audit-svc/src/index.ts delete mode 100644 services/audit-svc/tsconfig.json delete mode 100644 services/auth-svc/Dockerfile delete mode 100644 services/auth-svc/data/auth.db delete mode 100644 services/auth-svc/package.json delete mode 100644 services/auth-svc/src/index.ts delete mode 100644 services/auth-svc/src/lib/auth.ts delete mode 100644 services/auth-svc/src/lib/db.ts delete mode 100644 services/auth-svc/tsconfig.json delete mode 100644 services/billing-svc/Dockerfile delete mode 100644 services/billing-svc/drizzle/0000_init.sql delete mode 100644 services/billing-svc/package.json delete mode 100644 services/billing-svc/src/db/index.ts delete mode 100644 services/billing-svc/src/db/push.ts delete mode 100644 services/billing-svc/src/db/schema.ts delete mode 100644 services/billing-svc/src/index.ts delete mode 100644 services/billing-svc/src/plans.ts delete mode 100644 services/billing-svc/tsconfig.json delete mode 100644 services/cache-worker/Dockerfile delete mode 100644 services/cache-worker/package.json delete mode 100644 services/cache-worker/src/lib/crawl4ai-client.ts delete mode 100644 services/cache-worker/src/run.ts delete mode 100644 services/cache-worker/src/tasks/discover.ts delete mode 100644 services/cache-worker/src/tasks/finance.ts delete mode 100644 services/cache-worker/src/tasks/travel.ts delete mode 100644 services/cache-worker/tsconfig.json delete mode 100644 services/chat-svc/Dockerfile delete mode 100644 services/chat-svc/data/config.json delete mode 100644 services/chat-svc/package.json delete mode 100644 services/chat-svc/src/index.ts delete mode 100644 services/chat-svc/src/lib/config/clientRegistry.ts delete mode 100644 services/chat-svc/src/lib/config/index.ts delete mode 100644 services/chat-svc/src/lib/config/providersLoader.ts delete mode 100644 services/chat-svc/src/lib/config/serverRegistry.ts delete mode 100644 services/chat-svc/src/lib/config/types.ts delete mode 100644 services/chat-svc/src/lib/embedding-client.ts delete mode 100644 services/chat-svc/src/lib/serverUtils.ts delete mode 100644 services/chat-svc/src/lib/types.ts delete mode 100644 services/chat-svc/src/lib/uploads/manager.ts delete mode 100644 services/chat-svc/src/lib/utils/files.ts delete mode 100644 services/chat-svc/src/lib/utils/splitText.ts delete mode 100644 services/chat-svc/tsconfig.json delete mode 100644 services/create-svc/Dockerfile delete mode 100644 services/create-svc/package.json delete mode 100644 services/create-svc/src/index.ts delete mode 100644 services/create-svc/tsconfig.json delete mode 100644 services/discover-svc/Dockerfile delete mode 100644 services/discover-svc/package.json delete mode 100644 services/discover-svc/src/discover-db.ts delete mode 100644 services/discover-svc/src/index.ts delete mode 100644 services/discover-svc/src/searxng.ts delete mode 100644 services/discover-svc/tsconfig.json delete mode 100644 services/geo-device-svc/.gitignore delete mode 100644 services/geo-device-svc/Dockerfile delete mode 100644 services/geo-device-svc/README.md delete mode 100644 services/geo-device-svc/package.json delete mode 100644 services/geo-device-svc/src/index.ts delete mode 100644 services/geo-device-svc/src/lib/context.ts delete mode 100644 services/geo-device-svc/src/lib/device.ts delete mode 100644 services/geo-device-svc/src/lib/geo.ts delete mode 100644 services/geo-device-svc/src/routes/context.ts delete mode 100644 services/geo-device-svc/src/types.ts delete mode 100644 services/geo-device-svc/tsconfig.json delete mode 100644 services/library-svc/Dockerfile delete mode 100644 services/library-svc/drizzle/0000_init.sql delete mode 100644 services/library-svc/drizzle/0001_thread_messages.sql delete mode 100644 services/library-svc/drizzle/0002_project_id.sql delete mode 100644 services/library-svc/package.json delete mode 100644 services/library-svc/src/db/index.ts delete mode 100644 services/library-svc/src/db/push.ts delete mode 100644 services/library-svc/src/db/schema.ts delete mode 100644 services/library-svc/src/index.ts delete mode 100644 services/library-svc/tsconfig.json delete mode 100644 services/llm-svc/Dockerfile delete mode 100644 services/llm-svc/data/llm-providers.json delete mode 100644 services/llm-svc/package.json delete mode 100644 services/llm-svc/src/index.ts delete mode 100644 services/llm-svc/src/lib/config/ProvidersConfig.ts delete mode 100644 services/llm-svc/src/lib/config/index.ts delete mode 100644 services/llm-svc/src/lib/config/serverRegistry.ts delete mode 100644 services/llm-svc/src/lib/config/types.ts delete mode 100644 services/llm-svc/src/lib/models/base/embedding.ts delete mode 100644 services/llm-svc/src/lib/models/base/llm.ts delete mode 100644 services/llm-svc/src/lib/models/base/provider.ts delete mode 100644 services/llm-svc/src/lib/models/providers/anthropic/anthropicLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/anthropic/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/gemini/geminiEmbedding.ts delete mode 100644 services/llm-svc/src/lib/models/providers/gemini/geminiLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/gemini/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/groq/groqLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/groq/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lemonade/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lemonade/lemonadeEmbedding.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lemonade/lemonadeLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lmstudio/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lmstudio/lmstudioEmbedding.ts delete mode 100644 services/llm-svc/src/lib/models/providers/lmstudio/lmstudioLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/ollama/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/ollama/ollamaEmbedding.ts delete mode 100644 services/llm-svc/src/lib/models/providers/ollama/ollamaLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/openai/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/openai/openaiEmbedding.ts delete mode 100644 services/llm-svc/src/lib/models/providers/openai/openaiLLM.ts delete mode 100644 services/llm-svc/src/lib/models/providers/timeweb/index.ts delete mode 100644 services/llm-svc/src/lib/models/providers/timeweb/timewebLLM.ts delete mode 100644 services/llm-svc/src/lib/models/registry.ts delete mode 100644 services/llm-svc/src/lib/models/types.ts delete mode 100644 services/llm-svc/src/lib/serverUtils.ts delete mode 100644 services/llm-svc/src/lib/types.ts delete mode 100644 services/llm-svc/tsconfig.json delete mode 100644 services/localization-svc/Dockerfile delete mode 100644 services/localization-svc/package.json delete mode 100644 services/localization-svc/src/index.ts delete mode 100644 services/localization-svc/src/lib/countryToLocale.ts delete mode 100644 services/localization-svc/src/lib/geoClient.ts delete mode 100644 services/localization-svc/src/lib/resolveLocale.ts delete mode 100644 services/localization-svc/src/routes/locale.ts delete mode 100644 services/localization-svc/src/routes/translations.ts delete mode 100644 services/localization-svc/src/translations/index.ts delete mode 100644 services/localization-svc/src/types.ts delete mode 100644 services/localization-svc/tsconfig.json delete mode 100644 services/master-agents-svc/Dockerfile delete mode 100644 services/master-agents-svc/package.json delete mode 100644 services/master-agents-svc/src/index.ts delete mode 100644 services/master-agents-svc/src/lib/actions/__reasoning_preamble.ts delete mode 100644 services/master-agents-svc/src/lib/actions/academic_search.ts delete mode 100644 services/master-agents-svc/src/lib/actions/done.ts delete mode 100644 services/master-agents-svc/src/lib/actions/registry.ts delete mode 100644 services/master-agents-svc/src/lib/actions/scrape_url.ts delete mode 100644 services/master-agents-svc/src/lib/actions/social_search.ts delete mode 100644 services/master-agents-svc/src/lib/actions/types.ts delete mode 100644 services/master-agents-svc/src/lib/actions/web_search.ts delete mode 100644 services/master-agents-svc/src/lib/agent/classifier.ts delete mode 100644 services/master-agents-svc/src/lib/agent/fastClassifier.ts delete mode 100644 services/master-agents-svc/src/lib/agent/master.ts delete mode 100644 services/master-agents-svc/src/lib/agent/researcher.ts delete mode 100644 services/master-agents-svc/src/lib/agent/searchOrchestrator.ts delete mode 100644 services/master-agents-svc/src/lib/config.ts delete mode 100644 services/master-agents-svc/src/lib/content-classifier.ts delete mode 100644 services/master-agents-svc/src/lib/crawl4ai-client.ts delete mode 100644 services/master-agents-svc/src/lib/embedding-client.ts delete mode 100644 services/master-agents-svc/src/lib/llm-client.ts delete mode 100644 services/master-agents-svc/src/lib/media-search.ts delete mode 100644 services/master-agents-svc/src/lib/models/base/llm.ts delete mode 100644 services/master-agents-svc/src/lib/models/ollama.ts delete mode 100644 services/master-agents-svc/src/lib/models/openai.ts delete mode 100644 services/master-agents-svc/src/lib/models/registry.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/classifier.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/detectLanguage.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/locale.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/master.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/researcher.ts delete mode 100644 services/master-agents-svc/src/lib/prompts/writer.ts delete mode 100644 services/master-agents-svc/src/lib/proxy-manager.ts delete mode 100644 services/master-agents-svc/src/lib/reranker.ts delete mode 100644 services/master-agents-svc/src/lib/searxng.ts delete mode 100644 services/master-agents-svc/src/lib/session.ts delete mode 100644 services/master-agents-svc/src/lib/stealth-crawler.ts delete mode 100644 services/master-agents-svc/src/lib/tools/calculator.ts delete mode 100644 services/master-agents-svc/src/lib/tools/finance.ts delete mode 100644 services/master-agents-svc/src/lib/tools/media.ts delete mode 100644 services/master-agents-svc/src/lib/tools/registry.ts delete mode 100644 services/master-agents-svc/src/lib/tools/scrape.ts delete mode 100644 services/master-agents-svc/src/lib/tools/search.ts delete mode 100644 services/master-agents-svc/src/lib/tools/types.ts delete mode 100644 services/master-agents-svc/src/lib/types.ts delete mode 100644 services/master-agents-svc/src/lib/types/widgets.ts delete mode 100644 services/master-agents-svc/src/lib/utils/formatHistory.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/calculationWidget.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/executor.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/index.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/stockWidget.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/types.ts delete mode 100644 services/master-agents-svc/src/lib/widgets/weatherWidget.ts delete mode 100644 services/master-agents-svc/src/turndown.d.ts delete mode 100644 services/master-agents-svc/tsconfig.json delete mode 100644 services/media-svc/DEPRECATED.md delete mode 100644 services/media-svc/package.json delete mode 100644 services/media-svc/src/index.ts delete mode 100644 services/media-svc/src/lib/agents/image.ts delete mode 100644 services/media-svc/src/lib/agents/video.ts delete mode 100644 services/media-svc/src/lib/config.ts delete mode 100644 services/media-svc/src/lib/models/base/llm.ts delete mode 100644 services/media-svc/src/lib/models/ollama.ts delete mode 100644 services/media-svc/src/lib/models/openai.ts delete mode 100644 services/media-svc/src/lib/models/registry.ts delete mode 100644 services/media-svc/src/lib/prompts/image.ts delete mode 100644 services/media-svc/src/lib/prompts/video.ts delete mode 100644 services/media-svc/src/lib/search.ts delete mode 100644 services/media-svc/src/lib/types.ts delete mode 100644 services/media-svc/src/lib/utils/formatHistory.ts delete mode 100644 services/media-svc/tsconfig.json delete mode 100644 services/memory-svc/Dockerfile delete mode 100644 services/memory-svc/drizzle/0000_init.sql delete mode 100644 services/memory-svc/package.json delete mode 100644 services/memory-svc/src/db/index.ts delete mode 100644 services/memory-svc/src/db/push.ts delete mode 100644 services/memory-svc/src/db/schema.ts delete mode 100644 services/memory-svc/src/index.ts delete mode 100644 services/memory-svc/tsconfig.json delete mode 100644 services/notifications-svc/Dockerfile delete mode 100644 services/notifications-svc/drizzle/0000_init.sql delete mode 100644 services/notifications-svc/package.json delete mode 100644 services/notifications-svc/src/db/index.ts delete mode 100644 services/notifications-svc/src/db/push.ts delete mode 100644 services/notifications-svc/src/db/schema.ts delete mode 100644 services/notifications-svc/src/index.ts delete mode 100644 services/notifications-svc/src/lib/email.ts delete mode 100644 services/notifications-svc/tsconfig.json delete mode 100644 services/package.json delete mode 100644 services/profile-svc/Dockerfile delete mode 100644 services/profile-svc/drizzle/0000_init.sql delete mode 100644 services/profile-svc/package.json delete mode 100644 services/profile-svc/src/db/index.ts delete mode 100644 services/profile-svc/src/db/push.ts delete mode 100644 services/profile-svc/src/db/schema.ts delete mode 100644 services/profile-svc/src/index.ts delete mode 100644 services/profile-svc/tsconfig.json delete mode 100644 services/projects-svc/Dockerfile delete mode 100644 services/projects-svc/package.json delete mode 100644 services/projects-svc/src/index.ts delete mode 100644 services/projects-svc/tsconfig.json delete mode 100644 services/search-svc/Dockerfile delete mode 100644 services/search-svc/package.json delete mode 100644 services/search-svc/src/index.ts delete mode 100644 services/search-svc/tsconfig.json delete mode 100644 services/suggestions-svc/package.json delete mode 100644 services/suggestions-svc/src/index.ts delete mode 100644 services/suggestions-svc/src/lib/agents/suggestions.ts delete mode 100644 services/suggestions-svc/src/lib/config.ts delete mode 100644 services/suggestions-svc/src/lib/models/base/llm.ts delete mode 100644 services/suggestions-svc/src/lib/models/ollama.ts delete mode 100644 services/suggestions-svc/src/lib/models/openai.ts delete mode 100644 services/suggestions-svc/src/lib/models/registry.ts delete mode 100644 services/suggestions-svc/src/lib/prompts/detectLanguage.ts delete mode 100644 services/suggestions-svc/src/lib/prompts/locale.ts delete mode 100644 services/suggestions-svc/src/lib/prompts/suggestions.ts delete mode 100644 services/suggestions-svc/src/lib/types.ts delete mode 100644 services/suggestions-svc/src/lib/utils/formatHistory.ts delete mode 100644 services/suggestions-svc/tsconfig.json delete mode 100644 services/web-svc/.eslintrc.json delete mode 100644 services/web-svc/Dockerfile delete mode 100644 services/web-svc/data/.gitignore delete mode 100644 services/web-svc/next-env.d.ts delete mode 100644 services/web-svc/next.config.mjs delete mode 100644 services/web-svc/package.json delete mode 100644 services/web-svc/postcss.config.js delete mode 100644 services/web-svc/public/fonts/pp-ed-ul.otf delete mode 100644 services/web-svc/public/icon-100.png delete mode 100644 services/web-svc/public/icon-50.png delete mode 100644 services/web-svc/public/icon.png delete mode 100644 services/web-svc/public/logo.svg delete mode 100644 services/web-svc/public/next.svg delete mode 100644 services/web-svc/public/screenshots/p1.png delete mode 100644 services/web-svc/public/screenshots/p1_small.png delete mode 100644 services/web-svc/public/screenshots/p2.png delete mode 100644 services/web-svc/public/screenshots/p2_small.png delete mode 100644 services/web-svc/public/vercel.svg delete mode 100644 services/web-svc/public/weather-ico/clear-day.svg delete mode 100644 services/web-svc/public/weather-ico/clear-night.svg delete mode 100644 services/web-svc/public/weather-ico/cloudy-1-day.svg delete mode 100644 services/web-svc/public/weather-ico/cloudy-1-night.svg delete mode 100644 services/web-svc/public/weather-ico/fog-day.svg delete mode 100644 services/web-svc/public/weather-ico/fog-night.svg delete mode 100644 services/web-svc/public/weather-ico/frost-day.svg delete mode 100644 services/web-svc/public/weather-ico/frost-night.svg delete mode 100644 services/web-svc/public/weather-ico/rain-and-sleet-mix.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-1-day.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-1-night.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-2-day.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-2-night.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-3-day.svg delete mode 100644 services/web-svc/public/weather-ico/rainy-3-night.svg delete mode 100644 services/web-svc/public/weather-ico/scattered-thunderstorms-day.svg delete mode 100644 services/web-svc/public/weather-ico/scattered-thunderstorms-night.svg delete mode 100644 services/web-svc/public/weather-ico/severe-thunderstorm.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-1-day.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-1-night.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-2-day.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-2-night.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-3-day.svg delete mode 100644 services/web-svc/public/weather-ico/snowy-3-night.svg delete mode 100644 services/web-svc/src/app/api/chat/route.ts delete mode 100644 services/web-svc/src/app/api/health/route.ts delete mode 100644 services/web-svc/src/app/api/metrics/route.ts delete mode 100644 services/web-svc/src/app/api/ready/route.ts delete mode 100644 services/web-svc/src/app/api/reconnect/[id]/route.ts delete mode 100644 services/web-svc/src/app/api/uploads/route.ts delete mode 100644 services/web-svc/src/app/c/[chatId]/page.tsx delete mode 100644 services/web-svc/src/app/collections/[id]/page.tsx delete mode 100644 services/web-svc/src/app/discover/page.tsx delete mode 100644 services/web-svc/src/app/finance/[ticker]/page.tsx delete mode 100644 services/web-svc/src/app/finance/page.tsx delete mode 100644 services/web-svc/src/app/finance/predictions/[id]/page.tsx delete mode 100644 services/web-svc/src/app/globals.css delete mode 100644 services/web-svc/src/app/icon.svg delete mode 100644 services/web-svc/src/app/layout.tsx delete mode 100644 services/web-svc/src/app/library/layout.tsx delete mode 100644 services/web-svc/src/app/library/page.tsx delete mode 100644 services/web-svc/src/app/manifest.ts delete mode 100644 services/web-svc/src/app/offline/page.tsx delete mode 100644 services/web-svc/src/app/page.tsx delete mode 100644 services/web-svc/src/app/profile/page.tsx delete mode 100644 services/web-svc/src/app/sign-in/page.tsx delete mode 100644 services/web-svc/src/app/sign-up/page.tsx delete mode 100644 services/web-svc/src/app/spaces/[projectId]/SpaceCarousels.tsx delete mode 100644 services/web-svc/src/app/spaces/[projectId]/chat/[chatId]/page.tsx delete mode 100644 services/web-svc/src/app/spaces/[projectId]/page.tsx delete mode 100644 services/web-svc/src/app/spaces/page.tsx delete mode 100644 services/web-svc/src/app/spaces/templates/page.tsx delete mode 100644 services/web-svc/src/app/travel/page.tsx delete mode 100644 services/web-svc/src/components/AssistantSteps.tsx delete mode 100644 services/web-svc/src/components/Chat.tsx delete mode 100644 services/web-svc/src/components/ChatWindow.tsx delete mode 100644 services/web-svc/src/components/ClientOnly.tsx delete mode 100644 services/web-svc/src/components/DataFetchError.tsx delete mode 100644 services/web-svc/src/components/DeleteChat.tsx delete mode 100644 services/web-svc/src/components/Discover/MajorNewsCard.tsx delete mode 100644 services/web-svc/src/components/Discover/SmallNewsCard.tsx delete mode 100644 services/web-svc/src/components/EmptyChat.tsx delete mode 100644 services/web-svc/src/components/EmptyChatMessageInput.tsx delete mode 100644 services/web-svc/src/components/GuestMigration.tsx delete mode 100644 services/web-svc/src/components/GuestWarningBanner.tsx delete mode 100644 services/web-svc/src/components/Layout.tsx delete mode 100644 services/web-svc/src/components/MessageActions/Copy.tsx delete mode 100644 services/web-svc/src/components/MessageActions/Rewrite.tsx delete mode 100644 services/web-svc/src/components/MessageBox.tsx delete mode 100644 services/web-svc/src/components/MessageBoxLoading.tsx delete mode 100644 services/web-svc/src/components/MessageInput.tsx delete mode 100644 services/web-svc/src/components/MessageInputActions/AnswerMode.tsx delete mode 100644 services/web-svc/src/components/MessageInputActions/Attach.tsx delete mode 100644 services/web-svc/src/components/MessageInputActions/AttachSmall.tsx delete mode 100644 services/web-svc/src/components/MessageInputActions/InputBarPlus.tsx delete mode 100644 services/web-svc/src/components/MessageRenderer/Citation.tsx delete mode 100644 services/web-svc/src/components/MessageRenderer/CodeBlock/CodeBlockDarkTheme.ts delete mode 100644 services/web-svc/src/components/MessageRenderer/CodeBlock/CodeBlockLightTheme.ts delete mode 100644 services/web-svc/src/components/MessageRenderer/CodeBlock/index.tsx delete mode 100644 services/web-svc/src/components/MessageSources.tsx delete mode 100644 services/web-svc/src/components/Navbar.tsx delete mode 100644 services/web-svc/src/components/NewsArticleWidget.tsx delete mode 100644 services/web-svc/src/components/RelatedQuestions.tsx delete mode 100644 services/web-svc/src/components/SearchImages.tsx delete mode 100644 services/web-svc/src/components/SearchVideos.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/AddModelDialog.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/AddProviderDialog.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/DeleteProviderDialog.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/ModelProvider.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/ModelSelect.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/Section.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Models/UpdateProviderDialog.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Personalization.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Preferences.tsx delete mode 100644 services/web-svc/src/components/Settings/Sections/Search.tsx delete mode 100644 services/web-svc/src/components/Settings/SettingsButton.tsx delete mode 100644 services/web-svc/src/components/Settings/SettingsButtonMobile.tsx delete mode 100644 services/web-svc/src/components/Settings/SettingsDialogue.tsx delete mode 100644 services/web-svc/src/components/Settings/SettingsField.tsx delete mode 100644 services/web-svc/src/components/Sidebar.tsx delete mode 100644 services/web-svc/src/components/Sidebar/MenuSettingsPanel.tsx delete mode 100644 services/web-svc/src/components/SourcesPanel.tsx delete mode 100644 services/web-svc/src/components/ThinkBox.tsx delete mode 100644 services/web-svc/src/components/TravelStepper.tsx delete mode 100644 services/web-svc/src/components/UnregisterSW.tsx delete mode 100644 services/web-svc/src/components/WeatherWidget.tsx delete mode 100644 services/web-svc/src/components/Widgets/Calculation.tsx delete mode 100644 services/web-svc/src/components/Widgets/CardGallery.tsx delete mode 100644 services/web-svc/src/components/Widgets/InlineImageGallery.tsx delete mode 100644 services/web-svc/src/components/Widgets/KnowledgeCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/ProductCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/ProfileCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/PromoCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/Renderer.tsx delete mode 100644 services/web-svc/src/components/Widgets/Stock.tsx delete mode 100644 services/web-svc/src/components/Widgets/UnifiedCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/VideoCard.tsx delete mode 100644 services/web-svc/src/components/Widgets/VideoEmbed.tsx delete mode 100644 services/web-svc/src/components/Widgets/Weather.tsx delete mode 100644 services/web-svc/src/components/theme/Provider.tsx delete mode 100644 services/web-svc/src/components/theme/Switcher.tsx delete mode 100644 services/web-svc/src/components/ui/Loader.tsx delete mode 100644 services/web-svc/src/components/ui/Select.tsx delete mode 100644 services/web-svc/src/instrumentation.ts delete mode 100644 services/web-svc/src/lib/actions.ts delete mode 100644 services/web-svc/src/lib/auth-client.ts delete mode 100644 services/web-svc/src/lib/config/clientRegistry.ts delete mode 100644 services/web-svc/src/lib/config/sidebarMenu.ts delete mode 100644 services/web-svc/src/lib/config/types.ts delete mode 100644 services/web-svc/src/lib/geoDevice.ts delete mode 100644 services/web-svc/src/lib/guest-migration.ts delete mode 100644 services/web-svc/src/lib/guest-storage.ts delete mode 100644 services/web-svc/src/lib/hooks/useChat.tsx delete mode 100644 services/web-svc/src/lib/localization.ts delete mode 100644 services/web-svc/src/lib/localization/context.tsx delete mode 100644 services/web-svc/src/lib/localization/countryToLocale.ts delete mode 100644 services/web-svc/src/lib/localization/embeddedTranslations.ts delete mode 100644 services/web-svc/src/lib/project-files-db.ts delete mode 100644 services/web-svc/src/lib/project-storage.ts delete mode 100644 services/web-svc/src/lib/serverUtils.ts delete mode 100644 services/web-svc/src/lib/session.ts delete mode 100644 services/web-svc/src/lib/types-ui.ts delete mode 100644 services/web-svc/src/lib/types.ts delete mode 100644 services/web-svc/src/lib/utils.ts delete mode 100644 services/web-svc/tailwind.config.ts delete mode 100644 services/web-svc/tsconfig.json delete mode 100644 services/web-svc/tsconfig.tsbuildinfo delete mode 100755 test-chat-proper.sh delete mode 100755 test-chat-via-webui.sh delete mode 100755 test-chat.sh diff --git a/CONTINUE.md b/CONTINUE.md index f2d08f0..a005550 100644 --- a/CONTINUE.md +++ b/CONTINUE.md @@ -1,810 +1,144 @@ -# Статус проекта — ВСЁ РАБОТАЕТ - -## Последнее обновление: 27.02.2026 - -### Исправлена ошибка 500 в learning-svc (27.02.2026) - -**Проблема:** При создании урока `POST /api/v1/learning/lesson` возвращалась ошибка 500. - -**Причина:** `learning-svc` пытался использовать Timeweb JWT-токен (в переменной `OPENAI_API_KEY`) как ключ OpenAI API. Timeweb JWT имеет формат `eyJ...`, что не является валидным OpenAI ключом. - -**Решение:** - -1. **backend/cmd/learning-svc/main.go** — изменена логика инициализации LLM клиента: - - Приоритет 1: Timeweb Cloud AI (если `TIMEWEB_AGENT_ACCESS_ID` + `TIMEWEB_API_KEY` установлены) - - Приоритет 2: Anthropic (только если ключ не JWT) - - Приоритет 3: OpenAI (только если ключ не JWT) - - Добавлена функция `isJWT()` для детекции JWT-токенов - -2. **backend/deploy/docker/docker-compose.yml** — добавлены env vars для learning-svc: - - `TIMEWEB_API_BASE_URL` - - `TIMEWEB_AGENT_ACCESS_ID` - - `TIMEWEB_API_KEY` - - `DEFAULT_LLM_MODEL` - -**Результат:** -- `POST /api/v1/learning/lesson` → 200 (урок создаётся успешно) -- learning-svc использует Timeweb Cloud AI как LLM провайдер - ---- - -### Исправлены API ошибки 503/404 (27.02.2026) - -**Проблема:** Frontend получал ошибки: -- `api/v1/spaces` - 503 Service Unavailable -- `api/v1/computer/tasks` - 404 Not Found -- `api/v1/computer/execute` - 404 Not Found - -**Причины:** -1. `api-gateway` не имел переменных окружения для новых сервисов (THREAD_SVC_URL, COMPUTER_SVC_URL и др.) -2. `computer-svc` падал при попытке использовать "anonymous" как UUID в PostgreSQL - -**Решение:** - -1. **docker-compose.yml** - добавлены env vars в api-gateway: - - THREAD_SVC_URL=http://thread-svc:3027 - - COMPUTER_SVC_URL=http://computer-svc:3030 - - DISCOVER_SVC_URL=http://discover-svc:3002 - - FINANCE_HEATMAP_SVC_URL=http://finance-heatmap-svc:3033 - - LEARNING_SVC_URL=http://learning-svc:3034 - -2. **computer-svc/main.go** - исправлена обработка anonymous пользователей: - - GET /tasks возвращает пустой список для anonymous - - POST /execute возвращает 401 для anonymous - -**Результат:** -- `GET /api/v1/spaces` → 401 (требует auth, frontend обрабатывает) -- `GET /api/v1/computer/tasks` → 200 (пустой список для anonymous) -- `POST /api/v1/computer/execute` → 401 (требует auth) - ---- - -### Исправлен баг с пустым ответом в Chat (27.02.2026) - -**Проблема:** При отправке сообщения в UI приходил пустой ответ. - -**Причина:** В `backend/cmd/api-gateway/main.go` функция `handleChat` использовала `defer resp.Body.Close()` до `SetBodyStreamWriter`. Поскольку `SetBodyStreamWriter` выполняется асинхронно, defer закрывал body сразу после return из функции, до того как данные успевали передаться клиенту. - -**Решение:** Перенёс `resp.Body.Close()` внутрь callback `SetBodyStreamWriter`: -```go -c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { - defer resp.Body.Close() // <-- теперь здесь - // ... streaming logic -}) -``` - -**Файл:** `backend/cmd/api-gateway/main.go` - ---- - -### Адаптивность и удаление модалок (27.02.2026) - -**Задача:** Адаптировать под мобильные, убрать Dialog (модалки), сделать отдельные страницы. - -**Сделано:** - -1. **Мобильный Sidebar** (`layout.tsx`, `Sidebar.tsx`): - - На мобильных (<768px) sidebar скрыт, есть гамбургер-меню - - Появляется mobile header с кнопкой меню - - Sidebar открывается как overlay с анимацией - - Автозакрытие при навигации - -2. **Удалены все Dialog/модальные окна:** - - `spaces/page.tsx` — убран Dialog для создания/редактирования - - `learning/page.tsx` — убран Dialog для создания урока - -3. **Созданы отдельные страницы вместо модалок:** - - `/spaces/new` — создание пространства - - `/spaces/[id]/edit` — редактирование пространства - - `/learning/new` — создание урока - -4. **Адаптивность всех страниц:** - - `/` — responsive grid для suggestions, адаптивные padding/размеры - - `/discover` — горизонтальный скролл табов, адаптивные карточки - - `/history` — адаптивные padding, видимые кнопки действий на мобильных - - `/spaces` — адаптивные карточки, dropdown всегда видим на мобильных - - `/finance` — адаптивный time range, компактные StockRow - - `/learning` — адаптивные stats cards, уроки - - `/settings` — компактные селекторы, toggle switches - - `/computer` — мобильная панель задач, адаптивные примеры - -5. **Общие улучшения:** - - `h-[100dvh]` вместо `h-screen` для лучшей работы на iOS - - Overflow-x-auto для табов с горизонтальной прокруткой - - Адаптивные padding: `px-4 sm:px-6`, `py-6 sm:py-8` - - Адаптивные размеры текста: `text-xl sm:text-2xl` - - Видимость кнопок на мобильных (без hover-only) - ---- - -### Редизайн WebUI в стиле Cursor IDE 2026 (27.02.2026) - -**Задача:** Редизайн backend/webui в стиле Cursor IDE 2026 — современная тёмная тема с индиговыми акцентами. - -**Сделано:** - -1. **Глобальные стили** (`globals.css`): - - Новая цветовая палитра: base/elevated/surface/overlay для фонов - - Текст: primary/secondary/muted/faint (zinc-based) - - Акценты: indigo (accent) + cyan (accent-secondary) - - CSS переменные для всех цветов - - JetBrains Mono для кода - - Классы: .btn-primary, .btn-primary-solid, .btn-secondary, .btn-ghost, .btn-icon - - .nav-item, .nav-item-active, .card, .card-interactive, .glass-card, .surface-card - - .badge, .badge-accent, .badge-success, .badge-warning, .badge-error - - Анимации: fade-in, fade-in-up, slide-in-right, slide-in-left, scale-in, glow-pulse - -2. **Tailwind Config** (`tailwind.config.ts`): - - Semantic colors: base, elevated, surface, overlay, primary, secondary, muted, faint - - accent с оттенками: DEFAULT, hover, muted, subtle - - border с оттенками: DEFAULT, hover, focus - - success, warning, error с muted вариантами - - Кастомные тени: glow-sm/md/lg, elevated, card, dropdown - - Расширенные анимации - -3. **Компоненты** (Cursor-style): - - `Sidebar.tsx` — минималистичный с индиговыми акцентами, collapsible - - `ChatInput.tsx` — современный инпут с glow эффектом, ArrowUp кнопка отправки - - `ChatMessage.tsx` — чистый рендер, индиговые цитаты и код - - `DiscoverCard.tsx` — карточки с цветными иконками - -4. **Страницы** (все обновлены и адаптированы): - - `/` — главная с suggestions grid - - `/discover` — лента новостей - - `/history` — история поиска - - `/spaces` — пространства (+ /new, /[id]/edit) - - `/finance` — котировки - - `/learning` — уроки (+ /new) - - `/settings` — настройки - - `/computer` — Computer page - -**Цветовая схема (Cursor IDE 2026):** -- Background: base (#0a0a0c) → elevated (#121217) → surface (#1a1a1f) -- Text: primary (#e9e9eb) → secondary (#a8a8af) → muted (#6b6b75) → faint (#4a4a54) -- Accent: indigo (#818cf8) — основной акцент -- Accent Secondary: cyan (#22d3ee) — дополнительный -- Borders: #26262e → #32323d (hover) -- Success: green (#22c55e) -- Error: red (#ef4444) -- Warning: amber (#f59e0b) - ---- - -### Интеграция Frontend с Backend (27.02.2026) - -**Задача:** Подключить backend к frontend в `backend/webui/`, убрать моковые данные. - -**Сделано:** - -1. **API Gateway обновлён** (`backend/cmd/api-gateway/main.go`): - - Добавлен роутинг для threads, spaces, pages, share → thread-svc - - Добавлен роутинг для discover → discover-svc - - Добавлен роутинг для heatmap, movers, markets → finance-heatmap-svc - - Добавлен роутинг для learning → learning-svc - -2. **Config обновлён** (`backend/pkg/config/config.go`): - - Добавлены FinanceHeatmapURL, LearningSvcURL - -3. **Frontend Types** (`backend/webui/src/lib/types.ts`): - - Добавлены Thread, ThreadMessage, ThreadSource - - Добавлены FinanceStock, HeatmapData, HeatmapSector, TopMovers, FinanceMarket - - Добавлены Lesson, LessonProgress, LessonStep, CodeExample, QuizQuestion, PracticeExercise - -4. **Frontend API** (`backend/webui/src/lib/api.ts`): - - `fetchThreads()`, `fetchThread()`, `createThread()`, `deleteThread()`, `shareThread()` - - `fetchSpaces()`, `fetchSpace()`, `createSpace()`, `updateSpace()`, `deleteSpace()` - - `fetchMarkets()`, `fetchHeatmap()`, `fetchTopMovers()` - - `fetchLessons()`, `fetchLesson()`, `createLesson()`, `completeStep()`, `submitQuizAnswer()`, `deleteLesson()` - - Добавлен `getAuthHeaders()` для JWT авторизации - -5. **History Page** (`backend/webui/src/app/(main)/history/page.tsx`): - - Загрузка из threads API - - Удаление тредов - - Шаринг тредов - - Группировка по датам - - Поиск - -6. **Spaces Page** (`backend/webui/src/app/(main)/spaces/page.tsx`): - - Загрузка из spaces API - - Создание/редактирование/удаление пространств - - Выбор focus mode - - Кастомные AI инструкции - -7. **Finance Page** (`backend/webui/src/app/(main)/finance/page.tsx`): - - Загрузка рынков из API - - Heatmap данные по секторам - - Top movers (gainers/losers/most active) - - Переключение timeRange (1d/1w/1m/3m/1y) - -8. **Learning Page** (`backend/webui/src/app/(main)/learning/page.tsx`): - - Загрузка уроков из API - - Создание уроков с выбором сложности и режима - - Отображение прогресса - - Удаление уроков - ---- - -### Новые функции (добавлено в этой сессии): - -#### 0. WebUI — Интерфейс бэкенда (Perplexity-style) -- **Папка**: `backend/webui/` -- **Технологии**: Next.js 14, React 18, Tailwind CSS, TypeScript -- **Цветовая схема**: Глубокий синий (#0a1929) + золотистые акценты (#d4a373) -- **Порт**: 3000 -- **Страницы**: - - `/` — Главная страница с чатом и подсказками - - `/discover` — Лента новостей с AI-саммари - - `/spaces` — Пространства (workspaces) для организации исследований - - `/history` — История поисков - - `/finance` — Котировки MOEX, крипто, валюты - - `/learning` — Интерактивные курсы с AI-наставником - - `/settings` — Настройки (тема, язык, приватность) -- **Компоненты**: - - `Sidebar` — Навигация с коллапсом - - `ChatInput` — Ввод с режимами (speed/balanced/quality) - - `ChatMessage` — Рендер markdown + citations - - `Citation` — Hover preview источников - - `DiscoverCard` — Карточки новостей (large/medium/small) -- **Docker**: `backend/webui/Dockerfile`, добавлен в `docker-compose.yml` -- **Запуск**: `cd backend/webui && npm run dev` или через Docker - -#### 1. Computer с долгосрочными задачами (Perplexity-style) -- **Файлы**: `backend/internal/computer/types.go`, `backend/internal/computer/computer.go` -- **Duration Modes**: Short (30 мин), Medium (4 часа), Long (24 часа), Extended (7 дней), Unlimited (365 дней) -- **Checkpoints**: автоматическое сохранение состояния для возобновления задач -- **Heartbeat**: периодические уведомления о статусе -- **Resource Limits**: контроль CPU, памяти, бюджета - -#### 2. Browser Automation через Playwright -- **Файлы**: `backend/internal/computer/browser/browser.go`, `backend/internal/computer/browser/server.go` -- **Действия**: navigate, click, type, fill, screenshot, scroll, evaluate, PDF, extract text/HTML -- **Session management**: пул сессий, автоочистка, управление viewport -- **Сервис**: browser-svc (порт 3050) - -#### 3. Персонализация Discover — "For You" feed -- **Файлы**: `backend/internal/discover/personalization.go`, `backend/internal/db/user_interests_repo.go` -- **User Interests**: topics, sources, keywords с decay-механизмом -- **Feed Algorithm**: relevance scoring, diversity, recency, trending boost -- **Blocking**: блокировка источников и тем -- **Custom Categories**: пользовательские категории с ключевыми словами - -#### 4. Labs Service — интерактивные отчёты -- **Файлы**: `backend/internal/labs/types.go`, `backend/internal/labs/generator.go`, `backend/cmd/labs-svc/main.go` -- **Визуализации**: bar/line/pie/donut charts, tables, stat cards, KPI, heatmap, timeline, comparison -- **UI-элементы**: tabs, accordion, collapsible, stepper, code blocks, markdown -- **API**: генерация отчётов, экспорт в HTML/JSON -- **Сервис**: labs-svc (порт 3031) - -#### 5. Finance Heatmaps -- **Файлы**: `backend/internal/finance/heatmap.go`, `backend/cmd/finance-heatmap-svc/main.go` -- **Markets**: S&P 500, NASDAQ, Dow Jones, MOEX, Crypto, Forex -- **Views**: treemap, grid -- **Data**: sectors, tickers, top movers (gainers/losers/most active) -- **Colorscale**: красно-зелёная шкала по изменению цены -- **Сервис**: finance-heatmap-svc (порт 3033) - -#### 6. Step-by-step Learning Mode -- **Файлы**: `backend/internal/learning/stepper.go`, `backend/cmd/learning-svc/main.go` -- **Modes**: explain, guided, interactive, practice, quiz -- **Difficulty**: beginner, intermediate, advanced, expert -- **Content**: explanations, code, visualizations, quiz questions, practice exercises -- **Progress tracking**: step completion, score, time spent -- **Сервис**: learning-svc (порт 3034) - -#### 7. Discover Daily Podcast Generation -- **Файлы**: `backend/internal/podcast/generator.go`, `backend/cmd/podcast-svc/main.go` -- **Types**: daily, weekly, topic_deep, breaking -- **Script Generation**: intro, news segments, analysis, outro -- **TTS Integration**: ElevenLabs API для генерации аудио -- **RSS Feed**: автоматическая генерация RSS для подкаст-платформ -- **Сервис**: podcast-svc (порт 3032) - -### Все 21 сервис: -- postgres, redis, searxng, crawl4ai (инфраструктура) -- search-svc (3001) ✅ -- discover-svc (3002) ✅ -- chat-svc (3005) ✅ -- api-gateway (3015) ✅ -- agent-svc (3018) ✅ -- llm-svc (3020) ✅ -- scraper-svc (3021) ✅ -- collection-svc (3025) ✅ -- file-svc (3026) ✅ -- thread-svc (3027) ✅ -- computer-svc (3030) ✅ -- **browser-svc (3050) ✅ NEW** -- **labs-svc (3031) ✅ NEW** -- **podcast-svc (3032) ✅ NEW** -- **finance-heatmap-svc (3033) ✅ NEW** -- **learning-svc (3034) ✅ NEW** - ---- - -## Задача (исходный запрос) -Система виджетов в стиле Perplexity для российского рынка: карточки товаров/видео/профилей/промокодов, Knowledge Cards, улучшенные citations, sources panel, image/video gallery + интеграция с Яндекс, Рутуб, ВКонтакте. - -## Сделано (ВСЁ ЗАВЕРШЕНО) - -### 1. SearXNG — Российские engines -- **Файл**: `deploy/docker/searxng/settings.yml` -- Добавлены: yandex, yandex images, yandex videos, yandex news, yandex music -- Добавлены: vk (через xpath), rutube (через json_engine), dzen -- Добавлены: youtube, aliexpress - -### 2. Типы данных виджетов (Backend) -- **Файл**: `services/master-agents-svc/src/lib/types/widgets.ts` -- ProductData, VideoData, ProfileData, PromoData -- ImageData, KnowledgeCardData (comparison_table, charts, timeline, quote, definition) -- SourceData, ClassifiedResult -- Вспомогательные функции: formatPrice, formatViews, formatDuration, formatFollowers - -### 3. Content Classifier (Backend) -- **Файл**: `services/master-agents-svc/src/lib/content-classifier.ts` -- Определение типа по URL паттернам (товары, видео, профили, промокоды) -- Определение платформы (yandex, vk, rutube, ozon, wildberries и др.) -- Классификация по ключевым словам в запросе -- Функции: classifySearchResult, groupResultsByType, detectKnowledgeCardType - -### 4. UI Компоненты (Frontend) - -#### Улучшенный Citation -- **Файл**: `services/web-svc/src/components/MessageRenderer/Citation.tsx` -- Расширенный hover preview с заголовком, описанием, favicon -- Иконки платформ (Яндекс, VK, Rutube, YouTube и др.) -- Улучшенный дизайн badges - -#### UnifiedCard (базовая карточка) -- **Файл**: `services/web-svc/src/components/Widgets/UnifiedCard.tsx` -- Универсальный компонент для всех типов карточек -- Поддержка compact/horizontal режимов -- Badges, meta, actions - -#### ProductCard (товары) -- **Файл**: `services/web-svc/src/components/Widgets/ProductCard.tsx` -- Цена, скидка, старая цена -- Рейтинг со звёздами, отзывы -- Маркетплейс (Ozon, WB, AliExpress, Яндекс Маркет) -- Наличие, доставка, badges - -#### VideoCard (видео) -- **Файл**: `services/web-svc/src/components/Widgets/VideoCard.tsx` -- Thumbnail с длительностью -- Автор, views, likes -- Платформа (Rutube, VK, YouTube, Дзен) -- Horizontal и compact режимы - -#### ProfileCard (профили) -- **Файл**: `services/web-svc/src/components/Widgets/ProfileCard.tsx` -- Аватар, имя, username -- Verified badge, online статус -- Followers/following -- Платформа (VK, Telegram, YouTube, Дзен) - -#### PromoCard (промокоды) -- **Файл**: `services/web-svc/src/components/Widgets/PromoCard.tsx` -- Код с кнопкой копирования -- Скидка, магазин, логотип -- Срок действия, условия -- Verified badge - -#### CardGallery (карусель) -- **Файл**: `services/web-svc/src/components/Widgets/CardGallery.tsx` -- Горизонтальный скролл с навигацией -- Поддержка всех типов карточек -- Индикаторы, gradient fade - -#### KnowledgeCard (Tako-style) -- **Файл**: `services/web-svc/src/components/Widgets/KnowledgeCard.tsx` -- Сравнительные таблицы -- Bar chart, Pie chart -- Stat cards с изменениями (+/-) -- Timeline -- Quote, Definition - -#### InlineImageGallery -- **Файл**: `services/web-svc/src/components/Widgets/InlineImageGallery.tsx` -- Grid, carousel, masonry layouts -- Lightbox с навигацией -- Lazy loading, источники - -#### VideoEmbed -- **Файл**: `services/web-svc/src/components/Widgets/VideoEmbed.tsx` -- Embed для YouTube, Rutube -- Play preview для VK, Дзен -- Метаданные: views, likes, автор - -#### SourcesPanel -- **Файл**: `services/web-svc/src/components/SourcesPanel.tsx` -- Expand/collapse -- Группировка по домену -- Поиск по источникам -- Favicon, snippets - -#### RelatedQuestions -- **Файл**: `services/web-svc/src/components/RelatedQuestions.tsx` -- 3 стиля: inline, panel, chips -- Expand/collapse -- Иконки, анимации - -### 5. Renderer обновлён -- **Файл**: `services/web-svc/src/components/Widgets/Renderer.tsx` -- Все новые типы виджетов: products, videos, profiles, promos -- knowledge_card, image_gallery, video_embed -- Одиночные карточки: product, video, profile, promo - -### 6. Синхронный медиа-поиск (images/videos) -- **Файл**: `services/master-agents-svc/src/lib/media-search.ts` -- Создан модуль синхронного поиска через SearXNG (без LLM) -- Функции `searchImages()`, `searchVideos()`, `searchMedia()` -- Автодетект платформы видео (YouTube, Rutube, VK, Дзен) - -### 7. Интеграция медиа в searchOrchestrator -- **Файл**: `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` -- Добавлен `mediaPromise` параллельно с `widgetPromise` и `searchPromise` -- Эмит виджетов `image_gallery` (carousel) и `videos` (CardGallery) -- Медиа загружается одновременно с research - -### 8. Backend: Интеграция парсинга и эмиссии виджетов (ГОТОВО) -- **Файл**: `services/master-agents-svc/src/lib/searxng.ts` - - Расширены типы SearxngSearchResult (thumbnail, views, price, duration, author) - - Добавлена функция categorizeResult для автоопределения типа контента - -- **Файл**: `services/master-agents-svc/src/lib/agent/researcher.ts` - - Интегрирован ContentClassifier - - Классификация результатов по типам (products, videos, profiles, promos, images) - - Автоматическая эмиссия WidgetBlock для каждого типа контента - - Эвристика для KnowledgeCard (comparison_table, stat_card) - -### 9. Frontend: Типы для Widget (ГОТОВО) -- **Файл**: `services/web-svc/src/lib/types.ts` - - Добавлен WidgetType union type со всеми типами виджетов - - Типизированы params - -- **Файл**: `services/web-svc/src/components/ChatWindow.tsx` - - Расширен интерфейс Widget новыми типами - -### 10. Улучшение ReasoningSteps (ГОТОВО) -- **Файл**: `services/web-svc/src/components/AssistantSteps.tsx` - - Progress bar с процентами - - Таймер времени выполнения - - CheckCircle2 при завершении - - Loader2 spinner во время работы - - Улучшенная визуализация состояний +# Недоделки — начать отсюда + +## Последнее изменение (28.02.2026) +**Создан полный UI для авторизации:** +- Модальные окна login/register +- Страницы /login, /register, /forgot-password, /reset-password +- UserMenu в сайдбаре +- AuthContext для управления состоянием + +## Сделано (полностью) + +### 1. Auth UI (NEW) + +**Новые файлы:** + +**API клиент:** +- `backend/webui/src/lib/auth.ts` — функции для работы с auth API, хранение токенов + +**Контекст:** +- `backend/webui/src/lib/contexts/AuthContext.tsx` — React контекст для auth состояния + +**Компоненты:** +- `backend/webui/src/components/auth/AuthModal.tsx` — модальное окно +- `backend/webui/src/components/auth/LoginForm.tsx` — форма входа +- `backend/webui/src/components/auth/RegisterForm.tsx` — форма регистрации +- `backend/webui/src/components/auth/ForgotPasswordForm.tsx` — сброс пароля +- `backend/webui/src/components/auth/UserMenu.tsx` — меню пользователя +- `backend/webui/src/components/auth/index.ts` — экспорты + +**Страницы:** +- `backend/webui/src/app/(auth)/login/page.tsx` +- `backend/webui/src/app/(auth)/register/page.tsx` +- `backend/webui/src/app/(auth)/forgot-password/page.tsx` +- `backend/webui/src/app/(auth)/reset-password/page.tsx` +- `backend/webui/src/app/(auth)/layout.tsx` + +**Интеграция:** +- `providers.tsx` — добавлен AuthProvider и AuthModal +- `Sidebar.tsx` — кнопки входа/регистрации для гостей, профиль для авторизованных + +**Функционал:** +- Модальные окна для быстрого входа без перехода на страницу +- Отдельные страницы для полноценного входа +- Валидация паролей в реальном времени +- Показ/скрытие пароля +- Запоминание пользователя +- Сброс пароля по email +- Автоматический refresh токенов +- Отображение tier (Free/Pro/Business) + +### 2. Auth Service (Backend) + +**Файлы:** +- `backend/internal/auth/types.go` +- `backend/internal/auth/repository.go` +- `backend/cmd/auth-svc/main.go` + +**Эндпоинты:** +| Method | Path | Описание | +|--------|------|----------| +| POST | `/api/v1/auth/register` | Регистрация | +| POST | `/api/v1/auth/login` | Вход | +| POST | `/api/v1/auth/refresh` | Обновление токена | +| POST | `/api/v1/auth/logout` | Выход | +| POST | `/api/v1/auth/logout-all` | Выход со всех устройств | +| GET | `/api/v1/auth/validate` | Валидация токена | +| GET | `/api/v1/auth/me` | Профиль | +| PUT | `/api/v1/auth/me` | Обновить профиль | +| POST | `/api/v1/auth/change-password` | Смена пароля | +| POST | `/api/v1/auth/forgot-password` | Запрос сброса | +| POST | `/api/v1/auth/reset-password` | Сброс пароля | + +### 3. Ранее — Аудит безопасности + +- Репозитории с фильтрацией по user_id +- JWT middleware во всех сервисах +- Tiered rate limiting (free/pro/business) +- Usage tracking +- LLM limits ## Осталось сделать -### Go Backend (НОВОЕ) -Создан бэкенд на Go в `backend/` — альтернатива TypeScript сервисам для продакшена. +### Высокий приоритет: +1. **Проверить компиляцию** — `cd backend && go build ./...` +2. **Протестировать auth flow** — регистрация → логин → refresh → logout +3. **Добавить billing-svc** — интеграция с ЮKassa -**Структура:** +### Средний приоритет: +4. **OAuth провайдеры** — Google, GitHub, Yandex +5. **Email уведомления** — подтверждение email, сброс пароля +6. **Страница настроек профиля** — редактирование name, avatar +7. **Страница подписки** — /settings/billing с выбором тарифа + +### Низкий приоритет: +8. **2FA** — TOTP аутентификация +9. **Session management** — список активных сессий +10. **Account deletion** — удаление аккаунта + +## Контекст + +### Новые файлы UI: ``` -backend/ -├── cmd/ # Точки входа (9 сервисов) -│ ├── api-gateway/ # HTTP proxy + routing (port 3015) -│ ├── chat-svc/ # Chat orchestration (port 3005) -│ ├── agent-svc/ # AI Agent / Search Orchestrator (port 3018) -│ ├── search-svc/ # SearXNG wrapper (port 3001) -│ ├── llm-svc/ # LLM providers (port 3020) -│ ├── scraper-svc/ # Web scraping (port 3021) -│ ├── discover-svc/ # Perplexity Discover (port 3002) -│ ├── collection-svc/ # Collections API (port 3025) -│ └── file-svc/ # File upload & analysis (port 3026) -├── internal/ -│ ├── agent/ # Orchestrator, Classifier, Researcher, DeepResearch, FocusModes -│ ├── db/ # PostgreSQL repos (digest, article_summary, collection, file) -│ ├── files/ # File analyzer (PDF, images) -│ ├── llm/ # OpenAI, Anthropic, Gemini clients -│ ├── search/ # SearXNG, Media, BM25 Reranker -│ ├── session/ # Session manager + events -│ ├── prompts/ # LLM prompts -│ └── types/ # Blocks, Chunks, Widgets -├── pkg/ -│ ├── config/ # Configuration -│ ├── ndjson/ # NDJSON streaming -│ ├── cache/ # Redis кэширование -│ └── middleware/ # Auth, RateLimit, Logging, JWT -└── deploy/ - ├── docker/ # Dockerfiles + docker-compose - └── k8s/ # Kubernetes manifests +backend/webui/src/ +├── lib/ +│ ├── auth.ts (NEW) +│ └── contexts/ +│ └── AuthContext.tsx (NEW) +├── components/ +│ └── auth/ +│ ├── AuthModal.tsx (NEW) +│ ├── LoginForm.tsx (NEW) +│ ├── RegisterForm.tsx (NEW) +│ ├── ForgotPasswordForm.tsx (NEW) +│ ├── UserMenu.tsx (NEW) +│ └── index.ts (NEW) +└── app/ + └── (auth)/ + ├── layout.tsx (NEW) + ├── login/page.tsx (NEW) + ├── register/page.tsx (NEW) + ├── forgot-password/page.tsx (NEW) + └── reset-password/page.tsx (NEW) ``` -**Для запуска:** -```bash -cd backend -make deps -make build -make dev # или docker-compose +### Обновлённые файлы: +``` +backend/webui/src/app/providers.tsx — AuthProvider + AuthModal +backend/webui/src/components/Sidebar.tsx — кнопки auth + профиль ``` -**Добавлено (Perplexity-style Article Summary):** -- `fetchPreGeneratedDigest()` — проверка готового дайджеста из discover-svc -- `preScrapeArticleURL()` — параллельный скрейпинг через Crawl4AI -- Обработка `Summary: ` запросов как в Perplexity Discover -- Writer prompt для article digest (структура, follow-up вопросы) -- Researcher prompt для поиска related sources - -**Добавлено (26.02.2026):** -- `discover-svc` на Go (port 3002) — полный функционал Perplexity Discover -- Redis кэширование article summaries (`pkg/cache/redis.go`) -- Сохранение саммари в discover-svc после генерации из chat-svc -- Article summary flow: chat-svc -> agent-svc -> discover-svc - -**Добавлено (26.02.2026 - Perplexity Parity):** -- PostgreSQL персистентность (`internal/db/`) — digests, article_summaries, collections, files -- Redis-based rate limiting (`pkg/middleware/ratelimit_redis.go`) -- JWT валидация токенов (`pkg/middleware/jwt.go`) -- Focus Modes (`internal/agent/focus_modes.go`) — Academic, YouTube, Reddit, Code, News, Math, Finance -- Deep Research (`internal/agent/deep_research.go`) — multi-agent, 30+ searches, insights synthesis -- Collections service (`cmd/collection-svc/`) — saved research projects -- File service (`cmd/file-svc/`) — PDF/image upload & analysis - -**Добавлено (26.02.2026 - Full Perplexity Feature Parity):** -- Thread service (`cmd/thread-svc/`) — Threads, Spaces, Memory, Pages (port 3027) -- **Threads** — сохранение истории чатов, sharing по публичной ссылке -- **Spaces** — workspaces с custom AI instructions, default focus mode -- **Pages** — генерация статей из threads (`internal/pages/generator.go`) -- **Memory** — персонализация (preferences, facts, instructions, interests) -- Clarifying Questions — уточняющие вопросы перед Pro Search (Quality mode) -- Related Questions — связанные вопросы после ответа -- Enhanced Context — buildEnhancedContext() объединяет User Memory + Collection + Files -- runDeepResearchMode() — интеграция Deep Research в orchestrator -- DB repositories: `internal/db/thread_repo.go`, `space_repo.go`, `memory_repo.go`, `page_repo.go` - -**TODO для Go бэкенда:** -1. [ ] Тестирование интеграции с web-svc -2. [x] Добавить Redis кэширование ответов -3. [x] Добавить сохранение саммари в discover-svc после генерации -4. [ ] Unit тесты -5. [x] K8s манифесты для Go сервисов (`backend/deploy/k8s/`) -6. [x] PostgreSQL персистентность -7. [x] Focus Modes (Perplexity-style) -8. [x] Deep Research (multi-agent) -9. [x] Collections API -10. [x] File Analysis (PDF, images) -11. [x] Thread History (chat persistence) -12. [x] Spaces (workspaces) -13. [x] Pages (article generation) -14. [x] Memory (personalization) -15. [x] Clarifying Questions -16. [x] Related Questions -17. [x] Sharing (public links) -18. [x] **GooSeek Computer** (Perplexity Computer аналог) - -### GooSeek Computer — Perplexity Computer Clone (26.02.2026) - -**Новый сервис:** `cmd/computer-svc/main.go` (port 3030) - -Автономная платформа для долгосрочных задач — аналог Perplexity Computer. - -**Архитектура:** +### Хранение токенов: ``` -backend/internal/computer/ -├── types.go # ComputerTask, SubTask, Artifact, Schedule, Memory -├── computer.go # Главный оркестратор + EventBus -├── planner.go # AI планировщик задач -├── router.go # Роутинг на нужную модель -├── executor.go # Исполнение задач (research, code, deploy, report...) -├── sandbox.go # Docker sandbox для исполнения кода -├── scheduler.go # Планировщик (cron, interval, once, daily, weekly) -├── memory.go # Персистентная память пользователя -└── connectors/ # Внешние интеграции - ├── connector.go # Интерфейс коннектора - ├── email.go # SMTP отправка - ├── telegram.go # Telegram Bot API - ├── webhook.go # HTTP webhooks - └── storage.go # S3/MinIO хранилище +localStorage: +- token: JWT access token +- refreshToken: refresh token +- user: JSON с данными пользователя ``` -**Model Registry:** `internal/llm/registry.go` -- Capabilities: reasoning, coding, search, creative, fast, long_context, vision, math -- Роутинг задач на оптимальную модель (Claude для reasoning, Gemini для research, GPT для fast) - -**DB Repositories:** -- `internal/db/computer_task_repo.go` — задачи -- `internal/db/computer_memory_repo.go` — память -- `internal/db/computer_artifact_repo.go` — артефакты (код, отчёты, файлы) - -**API Endpoints:** -- `POST /api/v1/computer/execute` — запуск задачи -- `GET /api/v1/computer/tasks` — список задач пользователя -- `GET /api/v1/computer/tasks/:id` — статус задачи -- `GET /api/v1/computer/tasks/:id/stream` — SSE стрим событий -- `POST /api/v1/computer/tasks/:id/resume` — продолжить с user input -- `DELETE /api/v1/computer/tasks/:id` — отмена -- `GET /api/v1/computer/tasks/:id/artifacts` — артефакты -- `GET /api/v1/computer/models` — доступные модели -- `GET /api/v1/computer/connectors` — доступные интеграции -- `POST /api/v1/computer/connectors/:id/execute` — выполнить action - -**Типы задач:** -- `research` — веб-исследование, сбор данных -- `code` — генерация кода -- `analysis` — анализ данных -- `design` — проектирование архитектуры -- `deploy` — запуск кода в sandbox -- `report` — генерация отчётов -- `communicate` — отправка email/telegram -- `transform` — преобразование данных -- `validate` — валидация результатов - -**Планирование:** -- `cron` — cron-выражения -- `interval` — каждые N секунд -- `once` — одноразовый запуск -- `daily/hourly/weekly/monthly` — preset'ы - -**Пример использования:** -```json -POST /api/v1/computer/execute -{ - "userId": "user-123", - "query": "Мониторь курс доллара каждый час и отправляй мне в Telegram если изменение > 1%", - "options": { - "async": true, - "schedule": { "type": "hourly" } - } -} -``` - -Нет критичных задач - система работает стабильно. - -### GooSeek Computer UI (27.02.2026) - -**Задача:** Создать UI для Computer в стиле Perplexity Computer. - -**Сделано:** - -1. **Типы** (`backend/webui/src/lib/types.ts`): - - ComputerTask, ComputerTaskStatus, DurationMode, TaskPriority - - TaskPlan, SubTask, Artifact, Checkpoint - - ComputerSchedule, ResourceLimits - - ComputerTaskEvent, ComputerExecuteRequest - - ComputerModel, ComputerConnector - -2. **API функции** (`backend/webui/src/lib/api.ts`): - - `executeComputerTask()` — запуск задачи - - `fetchComputerTasks()` — список задач - - `fetchComputerTask()` — статус задачи - - `streamComputerTask()` — SSE стрим событий - - `resumeComputerTask()` — продолжить с user input - - `cancelComputerTask()` — отменить - - `fetchComputerArtifacts()` — артефакты - - `downloadArtifact()` — скачать артефакт - - `fetchComputerModels()` — доступные модели - - `fetchComputerConnectors()` — коннекторы - - `executeConnectorAction()` — выполнить action - -3. **UI страница** (`backend/webui/src/app/(main)/computer/page.tsx`): - - Ввод задачи (textarea с placeholder) - - Duration mode selector (short/medium/long/extended/unlimited) - - Примеры задач (4 карточки) - - Список задач (левая панель) - - Детальный просмотр задачи: - - Progress bar - - Статус с иконками - - Подзадачи с прогрессом - - События (real-time log) - - Артефакты с кнопкой скачивания - - Расширенные настройки (коннекторы) - -4. **Навигация** (`backend/webui/src/components/Sidebar.tsx`): - - Добавлен Computer в раздел "Инструменты" - - Иконка Cpu - -### Оптимизация Speed Mode 2.0 (26.02.2026): - -**Проблема:** Запросы обрабатывались >2 минут из-за 5 последовательных LLM вызовов. - -**Решение — Fast Path для Speed Mode:** -1. **Rule-based classifier** — без LLM вызова (`fastClassifier.ts`) -2. **Direct SearXNG search** — без researcher loop (без LLM) -3. **No scraping** — только сниппеты, без Crawl4AI -4. **Parallel execution** — search + media + widgets одновременно -5. **Single LLM call** — только Writer для финального ответа - -**Новый файл:** `services/master-agents-svc/src/lib/agent/fastClassifier.ts` - -**Изменённые файлы:** -- `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` — добавлен `runSpeedMode()` -- `services/master-agents-svc/src/lib/actions/scrape_url.ts` — отключен для speed mode - -**Ожидаемое время ответа:** -- Speed mode: 8-15 сек (было >2 мин) -- Balanced mode: 20-40 сек -- Quality mode: 60-120 сек - -### Исправлено 26.02.2026: - -1. **SearXNG падал** (ambiguous shortcut errors): - - Файл: `deploy/docker/searxng/settings.yml` - - Убраны дублирующие shortcuts для yandex engines (ya, yai, yav, yan, yam) - - Убраны конфликтующие кастомные engines (vk, rutube, dzen, aliexpress) - -2. **Ghost перезапускался каждые 2 минуты** (HTTP 503): - - Файл: `deploy/docker/docker-compose.yml` - - Исправлен healthcheck (wget --spider не следует за 301 redirect) - - Увеличены start_period (90s), retries (5), interval (30s) - -## Контекст для продолжения - -### Новые файлы (созданы): -- `backend/cmd/computer-svc/main.go` — GooSeek Computer service (port 3030) -- `backend/internal/computer/types.go` — Computer types (Task, SubTask, Artifact, etc.) -- `backend/internal/computer/computer.go` — Main orchestrator + EventBus -- `backend/internal/computer/planner.go` — AI task planner -- `backend/internal/computer/router.go` — Model routing -- `backend/internal/computer/executor.go` — Task execution -- `backend/internal/computer/sandbox.go` — Docker sandbox manager -- `backend/internal/computer/scheduler.go` — Cron/interval scheduler -- `backend/internal/computer/memory.go` — Persistent user memory -- `backend/internal/computer/connectors/connector.go` — Connector interface -- `backend/internal/computer/connectors/email.go` — SMTP connector -- `backend/internal/computer/connectors/telegram.go` — Telegram Bot connector -- `backend/internal/computer/connectors/webhook.go` — HTTP webhook connector -- `backend/internal/computer/connectors/storage.go` — S3/MinIO storage connector -- `backend/internal/llm/registry.go` — Model registry with capabilities -- `backend/internal/db/computer_task_repo.go` — Computer task repository -- `backend/internal/db/computer_memory_repo.go` — Computer memory repository -- `backend/internal/db/computer_artifact_repo.go` — Computer artifact repository -- `backend/deploy/k8s/computer-svc.yaml` — K8s manifest for computer-svc -- `backend/cmd/discover-svc/main.go` — Discover service на Go -- `backend/cmd/collection-svc/main.go` — Collections service -- `backend/cmd/file-svc/main.go` — File upload & analysis service -- `backend/internal/db/postgres.go` — PostgreSQL connection + migrations -- `backend/internal/db/digest_repo.go` — Digest repository -- `backend/internal/db/article_summary_repo.go` — Article summary repository -- `backend/internal/db/collection_repo.go` — Collection repository -- `backend/internal/db/file_repo.go` — Uploaded file repository -- `backend/internal/agent/focus_modes.go` — Perplexity Focus Modes -- `backend/internal/agent/deep_research.go` — Deep Research multi-agent -- `backend/internal/files/analyzer.go` — PDF/image analyzer -- `backend/pkg/cache/redis.go` — Redis кэш клиент -- `backend/pkg/middleware/ratelimit_redis.go` — Redis-based rate limiting -- `backend/pkg/middleware/jwt.go` — JWT validation middleware -- `backend/deploy/k8s/postgres.yaml` — PostgreSQL StatefulSet -- `backend/deploy/k8s/collection-svc.yaml` — Collection service K8s -- `backend/deploy/k8s/file-svc.yaml` — File service K8s -- `backend/deploy/k8s/thread-svc.yaml` — Thread service K8s -- `backend/cmd/thread-svc/main.go` — Thread/Spaces/Memory/Pages service -- `backend/internal/db/thread_repo.go` — Thread repository -- `backend/internal/db/space_repo.go` — Space repository -- `backend/internal/db/memory_repo.go` — Memory repository -- `backend/internal/db/page_repo.go` — Page repository -- `backend/internal/pages/generator.go` — Page generator (Markdown/HTML export) -- `services/master-agents-svc/src/lib/media-search.ts` — синхронный медиа-поиск -- `services/media-svc/DEPRECATED.md` — пометка устаревшего сервиса -- `services/master-agents-svc/src/lib/types/widgets.ts` -- `services/master-agents-svc/src/lib/content-classifier.ts` -- `services/web-svc/src/components/Widgets/UnifiedCard.tsx` -- `services/web-svc/src/components/Widgets/ProductCard.tsx` -- `services/web-svc/src/components/Widgets/VideoCard.tsx` -- `services/web-svc/src/components/Widgets/ProfileCard.tsx` -- `services/web-svc/src/components/Widgets/PromoCard.tsx` -- `services/web-svc/src/components/Widgets/CardGallery.tsx` -- `services/web-svc/src/components/Widgets/KnowledgeCard.tsx` -- `services/web-svc/src/components/Widgets/InlineImageGallery.tsx` -- `services/web-svc/src/components/Widgets/VideoEmbed.tsx` -- `services/web-svc/src/components/SourcesPanel.tsx` -- `services/web-svc/src/components/RelatedQuestions.tsx` - -### Изменённые файлы: -- `backend/internal/agent/orchestrator.go` — Clarifying/Related Questions, Deep Research mode, Enhanced Context -- `backend/pkg/config/config.go` — ThreadSvcURL, ComputerSvcURL -- `backend/go.mod` — добавлены minio-go, robfig/cron -- `backend/Makefile` — добавлен computer-svc -- `backend/deploy/docker/Dockerfile.all` — добавлен build computer-svc -- `backend/deploy/k8s/kustomization.yaml` — добавлен computer-svc -- `backend/Makefile` — thread-svc -- `backend/deploy/docker/docker-compose.yml` — thread-svc -- `backend/deploy/docker/Dockerfile.all` — thread-svc build -- `backend/deploy/k8s/kustomization.yaml` — thread-svc -- `backend/deploy/k8s/configmap.yaml` — THREAD_SVC_URL -- `services/master-agents-svc/src/lib/agent/searchOrchestrator.ts` — интеграция медиа-поиска -- `services/master-agents-svc/src/lib/searxng.ts` — расширенные типы и категоризация -- `services/master-agents-svc/src/lib/agent/researcher.ts` — интеграция classifier + эмиссия виджетов -- `deploy/docker/searxng/settings.yml` — российские engines -- `services/web-svc/src/components/MessageRenderer/Citation.tsx` — enhanced hover -- `services/web-svc/src/components/Widgets/Renderer.tsx` — новые типы виджетов -- `services/web-svc/src/lib/types.ts` — WidgetType -- `services/web-svc/src/components/ChatWindow.tsx` — Widget interface -- `services/web-svc/src/components/AssistantSteps.tsx` — progress bar, timer, icons +### Auth flow: +1. Гость видит кнопки "Войти" / "Регистрация" в сайдбаре +2. Клик открывает модальное окно (или переход на страницу) +3. После успешного входа — сохранение токенов, обновление UI +4. При истечении access token — автоматический refresh +5. При logout — очистка localStorage diff --git a/TEST_RESULTS.md b/TEST_RESULTS.md deleted file mode 100644 index a08a4d6..0000000 --- a/TEST_RESULTS.md +++ /dev/null @@ -1,280 +0,0 @@ -# Результаты тестирования https://gooseek.ru/ - -**Дата:** 27 февраля 2026, 02:40 МСК - -## ❌ Основная проблема - -**Сайт https://gooseek.ru/ НЕДОСТУПЕН** - -``` -$ curl https://gooseek.ru/ -curl: (7) Failed to connect to gooseek.ru port 443: Couldn't connect to server -``` - -## Причины - -### 1. Reverse-proxy (Caddy) не запущен - -Файл `deploy/docker/docker-compose.yml` содержит конфигурацию для Caddy, который должен: -- Слушать порты 80/443 -- Автоматически получать SSL от Let's Encrypt -- Проксировать `gooseek.ru` → `web-svc:3000` - -**Но этот контейнер НЕ ЗАПУЩЕН.** - -### 2. Запущена неправильная конфигурация - -Сейчас работают контейнеры из `/backend/deploy/docker/docker-compose.yml` (Go backend): - -``` -$ docker ps -NAME STATUS -docker-api-gateway-1 Up 21 minutes (unhealthy) -docker-chat-svc-1 Up 21 minutes (unhealthy) -docker-agent-svc-1 Up 21 minutes (unhealthy) -docker-webui-1 Up 20 minutes -docker-llm-svc-1 Up 21 minutes (unhealthy) -docker-search-svc-1 Up 21 minutes (unhealthy) -docker-scraper-svc-1 Up 21 minutes (unhealthy) -... -``` - -**Все сервисы помечены как "unhealthy"**, кроме базовых (postgres, redis, crawl4ai). - -### 3. Chat API не работает - -#### Тест 1: Прямой доступ к API Gateway -```bash -$ curl http://localhost:3015/health -curl: (7) Connection refused -``` -**Результат:** API Gateway недоступен на localhost, хотя порт должен быть проброшен. - -#### Тест 2: Через WebUI -```bash -$ curl http://localhost:3000 -✅ OK - WebUI возвращает HTML -``` - -```bash -$ curl -X POST http://localhost:3000/api/chat \ - -H "Content-Type: application/json" \ - -d '{ - "message": { - "messageId": "msg-123", - "chatId": "chat-123", - "content": "Привет, как дела?" - }, - "optimizationMode": "balanced", - "history": [], - "locale": "ru" - }' - -Internal Server Error -``` - -**Результат:** WebUI работает, но chat API возвращает ошибку. - -**Причина:** WebUI пытается проксировать запрос на `api-gateway:3015` (через Next.js rewrites), но API Gateway недоступен или не отвечает. - -## Что работает ✅ - -1. **WebUI доступен локально** - `http://localhost:3000` возвращает интерфейс -2. **Docker контейнеры запущены** - все сервисы работают (но unhealthy) -3. **Базовые сервисы здоровы:** - - ✅ postgres (healthy) - - ✅ redis (healthy) - - ✅ crawl4ai (healthy) - - ✅ searxng (running) - -## Что НЕ работает ❌ - -1. **Reverse-proxy (Caddy)** - не запущен -2. **API Gateway** - unhealthy, порт 3015 недоступен -3. **Chat сервис** - unhealthy -4. **Agent сервис** - unhealthy -5. **LLM сервис** - unhealthy -6. **Search сервис** - unhealthy -7. **Все остальные backend Go сервисы** - unhealthy - -## Диагностика - -### Проверка портов -```bash -$ lsof -nP -iTCP -sTCP:LISTEN | grep -E "3000|3015|3005" -node 31314 home 16u IPv6 *:3000 (LISTEN) -``` - -Только порт 3000 (webui) слушает. Порты 3015 (api-gateway) и 3005 (chat-svc) не доступны на хосте. - -### Архитектура - -Текущая конфигурация: -``` -[Browser] - ↓ -❌ gooseek.ru:443 (Caddy не запущен) - ↓ -✅ localhost:3000 (WebUI - Next.js) - ↓ (Next.js rewrites /api/* → api-gateway:3015) -❌ api-gateway:3015 (unhealthy, недоступен) - ↓ -❌ chat-svc:3005 (unhealthy) - ↓ -❌ agent-svc:3018 (unhealthy) -``` - -## Рекомендации по исправлению - -### 1. Запустить правильный docker-compose - -```bash -cd /Users/home/Desktop/GooSeek/deploy/docker -./run.sh -``` - -Это должно запустить полный стек включая reverse-proxy. - -### 2. Проверить логи unhealthy контейнеров - -```bash -docker logs docker-api-gateway-1 --tail 100 -docker logs docker-chat-svc-1 --tail 100 -docker logs docker-agent-svc-1 --tail 100 -docker logs docker-llm-svc-1 --tail 100 -``` - -Искать ошибки: -- Отсутствующие переменные окружения -- Проблемы с подключением к зависимостям -- Ошибки при старте сервиса - -### 3. Проверить переменные окружения - -Убедиться что `.env` файл существует и содержит: -```bash -# LLM API Keys -OPENAI_API_KEY=sk-... -ANTHROPIC_API_KEY=sk-ant-... -GEMINI_API_KEY=... -TIMEWEB_API_KEY=... -TIMEWEB_AGENT_ACCESS_ID=... -TIMEWEB_API_BASE_URL=... - -# Ghost CMS -GHOST_CONTENT_API_KEY=... -GHOST_DB_PASSWORD=... -GHOST_MAIL_USER=... -GHOST_MAIL_PASSWORD=... -``` - -### 4. Проверить healthcheck'и - -Каждый сервис имеет healthcheck. Например, для api-gateway: -```yaml -healthcheck: - test: ["CMD", "wget", "-q", "--spider", "http://127.0.0.1:3015/health"] - interval: 15s - timeout: 5s - retries: 3 - start_period: 10s -``` - -Если сервис не отвечает на `/health` в течение 10s после старта → unhealthy. - -Возможные причины unhealthy: -- Сервис не стартует из-за ошибки -- Сервис стартует медленно (нужно увеличить `start_period`) -- Зависимости недоступны (другие сервисы) -- Отсутствуют переменные окружения - -### 5. Проверить сеть Docker - -```bash -docker network ls -docker network inspect gooseek -``` - -Убедиться что все контейнеры в одной сети и могут общаться. - -### 6. Проверить DNS и порты - -```bash -# DNS -nslookup gooseek.ru - -# Порты на роутере -# Убедиться что 80 и 443 проброшены на ПК -``` - -### 7. Перезапустить с нуля - -```bash -# Остановить все -cd /Users/home/Desktop/GooSeek/backend/deploy/docker -docker compose down - -# Запустить правильный compose -cd /Users/home/Desktop/GooSeek/deploy/docker -docker compose down -docker compose up -d - -# Проверить логи -docker compose logs -f reverse-proxy -docker compose logs -f web-svc -docker compose logs -f api-gateway -``` - -## Тестовые скрипты - -Созданы скрипты для тестирования: - -1. **test-chat.sh** - базовая проверка сервисов -2. **test-chat-proper.sh** - тест с правильным payload -3. **test-chat-via-webui.sh** - тест через webui - -Использование: -```bash -cd /Users/home/Desktop/GooSeek -./test-chat-via-webui.sh -``` - -## Скриншоты и ошибки - -### Консоль браузера (ожидаемые ошибки) - -Если открыть https://gooseek.ru/ в браузере: -- **Network:** ERR_CONNECTION_REFUSED или ERR_NAME_NOT_RESOLVED -- **Console:** Нет ошибок (страница не загружается) - -Если открыть http://localhost:3000/ в браузере: -- **Network:** ✅ 200 OK для главной страницы -- **Network:** ❌ 500 Internal Server Error для `/api/chat` -- **Console:** Ошибка при попытке отправить сообщение - -### Пример ошибки в консоли (если бы сайт был доступен) - -```javascript -POST http://localhost:3000/api/chat 500 (Internal Server Error) - -Error: Chat request failed: 500 - at streamChat (api.ts:43) - at sendMessage (useChat.ts:53) -``` - -## Итоговый вывод - -**Сайт https://gooseek.ru/ полностью недоступен.** - -**Причины:** -1. ❌ Reverse-proxy (Caddy) не запущен → нет HTTPS доступа -2. ❌ API Gateway unhealthy → chat не работает даже локально -3. ❌ Все backend сервисы unhealthy → нет функциональности - -**Что нужно сделать:** -1. Запустить правильный docker-compose с reverse-proxy -2. Исправить проблемы с unhealthy контейнерами (проверить логи) -3. Убедиться что все переменные окружения настроены -4. Проверить DNS и проброс портов - -**Приоритет:** КРИТИЧЕСКИЙ - сайт полностью не работает. diff --git a/api-1.json b/api-1.json deleted file mode 100644 index acd49e0..0000000 --- a/api-1.json +++ /dev/null @@ -1,2896 +0,0 @@ -{ - "openapi": "3.0.0", - "paths": { - "/api/v1/cloud-ai/agents/{agent_access_id}/call": { - "post": { - "operationId": "CloudAiAgentsClientController_callAgent", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent access ID", - "schema": { - "type": "string" - } - }, - { - "name": "Authorization", - "required": true, - "in": "header", - "description": "Bearer token", - "schema": { - "type": "string", - "example": "Bearer " - } - }, - { - "name": "x-proxy-source", - "required": true, - "in": "header", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AgentCallDto" - } - } - } - }, - "responses": { - "200": { - "description": "Agent response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AgentCallResponseDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "Call AI agent", - "tags": [ - "ai-agents-client" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/chat/completions": { - "post": { - "description": "\nThis endpoint supports both simple text messages and multimodal content:\n\n**Simple text message:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": \"Hello, how are you?\"\n }\n ]\n}\n```\n\n**Multimodal message with text and image:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": [\n { \"type\": \"text\", \"text\": \"What is in this image?\" },\n { \"type\": \"image_url\", \"image_url\": { \"url\": \"https://example.com/image.jpg\" } }\n ]\n }\n ]\n}\n```\n\n**Message with audio input:**\n```json\n{\n \"model\": \"gpt-4\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": [\n { \"type\": \"text\", \"text\": \"Please transcribe this audio:\" },\n { \"type\": \"input_audio\", \"input_audio\": { \"data\": \"base64_encoded_audio_data\", \"format\": \"wav\" } }\n ]\n }\n ]\n}\n```\n ", - "operationId": "CloudAiAgentsClientController_chatCompletions", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent access ID", - "schema": { - "type": "string" - } - }, - { - "name": "Authorization", - "required": true, - "in": "header", - "description": "Bearer token", - "schema": { - "type": "string", - "example": "Bearer " - } - }, - { - "name": "x-proxy-source", - "required": true, - "in": "header", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ChatCompletionCreateParamsDto" - } - } - } - }, - "responses": { - "200": { - "description": "Chat completion response (non-streaming)", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiChatCompletionResponseDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "security": [ - { - "bearer": [] - } - ], - "summary": "OpenAI-compatible chat completions endpoint for AI agent", - "tags": [ - "ai-agents-client" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/completions": { - "post": { - "deprecated": true, - "operationId": "CloudAiAgentsClientController_textCompletions", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent access ID", - "schema": { - "type": "string" - } - }, - { - "name": "Authorization", - "required": true, - "in": "header", - "description": "Bearer token", - "schema": { - "type": "string", - "example": "Bearer " - } - }, - { - "name": "x-proxy-source", - "required": true, - "in": "header", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiTextCompletionRequestDto" - } - } - } - }, - "responses": { - "200": { - "description": "Text completion response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiTextCompletionResponseDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "OpenAI-compatible text completions endpoint for AI agent (legacy)", - "tags": [ - "ai-agents-client" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/models": { - "get": { - "operationId": "CloudAiAgentsClientController_getModels", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent access ID", - "schema": { - "type": "string" - } - }, - { - "name": "Authorization", - "required": true, - "in": "header", - "description": "Bearer token", - "schema": { - "type": "string", - "example": "Bearer " - } - } - ], - "responses": { - "200": { - "description": "List of available models", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiModelsResponseDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "OpenAI-compatible models endpoint for AI agent", - "tags": [ - "ai-agents-client" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/embed.js": { - "get": { - "operationId": "CloudAiAgentsClientController_getAgentEmbedCode", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "collapsed", - "required": false, - "in": "query", - "description": "Start widget in collapsed mode (default: true)", - "schema": { - "default": true, - "type": "boolean" - } - }, - { - "name": "referer", - "required": true, - "in": "header", - "schema": { - "type": "string" - } - }, - { - "name": "origin", - "required": true, - "in": "header", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "JavaScript code for embedding the chat widget", - "content": { - "application/javascript": { - "schema": { - "type": "string" - } - } - } - }, - "403": { - "description": "Domain not whitelisted or agent suspended" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "Get widget embed JavaScript code", - "tags": [ - "ai-agents-client" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses": { - "post": { - "operationId": "CloudAiAgentsResponsesController_createResponse", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "ID доступа агента", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "in": "header", - "description": "Токен для приватных агентов", - "required": true, - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateResponseDto" - } - } - } - }, - "responses": { - "200": { - "description": "Ответ создан успешно", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiResponseDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "OpenAI-compatible create a response", - "tags": [ - "ai-agents-responses" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses/{response_id}": { - "get": { - "operationId": "CloudAiAgentsResponsesController_getResponse", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "response_id", - "required": true, - "in": "path", - "description": "Response ID", - "schema": { - "type": "string" - } - }, - { - "name": "include", - "required": false, - "in": "query", - "description": "Дополнительные поля для включения в ответ", - "schema": { - "example": [ - "web_search_call.action.sources", - "code_interpreter_call.outputs" - ], - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "include_obfuscation", - "required": false, - "in": "query", - "description": "Включить обфускацию потока для защиты от атак по побочным каналам", - "schema": { - "example": true, - "type": "boolean" - } - }, - { - "name": "starting_after", - "required": false, - "in": "query", - "description": "Номер последовательности события, после которого начать потоковую передачу", - "schema": { - "minimum": 0, - "example": 0, - "type": "number" - } - }, - { - "name": "stream", - "required": false, - "in": "query", - "description": "Потоковая передача данных ответа модели", - "schema": { - "example": false, - "type": "boolean" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Response retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiResponseDto" - } - } - } - }, - "404": { - "description": "Response not found" - } - }, - "summary": "OpenAI-compatible get a response", - "tags": [ - "ai-agents-responses" - ] - }, - "delete": { - "operationId": "CloudAiAgentsResponsesController_deleteResponse", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "response_id", - "required": true, - "in": "path", - "description": "Response ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "204": { - "description": "Ответ удален успешно" - }, - "404": { - "description": "Response not found" - } - }, - "summary": "OpenAI-compatible delete a response", - "tags": [ - "ai-agents-responses" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/responses/{response_id}/cancel": { - "post": { - "operationId": "CloudAiAgentsResponsesController_cancelResponse", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "response_id", - "required": true, - "in": "path", - "description": "Response ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Отмена ответа выполнена успешно", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OpenAiResponseDto" - } - } - } - }, - "404": { - "description": "Response not found" - } - }, - "summary": "OpenAI-compatible cancel a response", - "tags": [ - "ai-agents-responses" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations": { - "post": { - "operationId": "CloudAiAgentsConversationsController_createConversation", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateConversationDto" - } - } - } - }, - "responses": { - "200": { - "description": "Conversation created successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationDto" - } - } - } - }, - "401": { - "description": "Unauthorized" - }, - "404": { - "description": "Agent not found" - } - }, - "summary": "OpenAI-compatible create a conversation", - "tags": [ - "ai-agents-conversations" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}": { - "get": { - "operationId": "CloudAiAgentsConversationsController_getConversation", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Conversation retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationDto" - } - } - } - }, - "404": { - "description": "Conversation not found" - } - }, - "summary": "OpenAI-compatible get a conversation", - "tags": [ - "ai-agents-conversations" - ] - }, - "post": { - "operationId": "CloudAiAgentsConversationsController_updateConversation", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/UpdateConversationDto" - } - } - } - }, - "responses": { - "200": { - "description": "Conversation updated successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationDto" - } - } - } - }, - "404": { - "description": "Conversation not found" - } - }, - "summary": "OpenAI-compatible update a conversation", - "tags": [ - "ai-agents-conversations" - ] - }, - "delete": { - "operationId": "CloudAiAgentsConversationsController_deleteConversation", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Conversation deleted successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationDeletedDto" - } - } - } - }, - "404": { - "description": "Conversation not found" - } - }, - "summary": "OpenAI-compatible delete a conversation", - "tags": [ - "ai-agents-conversations" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}/items": { - "get": { - "operationId": "CloudAiAgentsConversationsController_listItems", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "after", - "required": false, - "in": "query", - "description": "An item ID to list items after, used in pagination", - "schema": { - "type": "string" - } - }, - { - "name": "include", - "required": false, - "in": "query", - "description": "Specify additional output data to include in the model response", - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "limit", - "required": false, - "in": "query", - "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20", - "schema": { - "minimum": 1, - "maximum": 100, - "default": 20, - "type": "number" - } - }, - { - "name": "order", - "required": false, - "in": "query", - "description": "The order to return the input items in", - "schema": { - "default": "desc", - "enum": [ - "asc", - "desc" - ], - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Items retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationItemListDto" - } - } - } - }, - "404": { - "description": "Conversation not found" - } - }, - "summary": "OpenAI-compatible list items in a conversation", - "tags": [ - "ai-agents-conversations" - ] - }, - "post": { - "operationId": "CloudAiAgentsConversationsController_createItems", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "include", - "required": false, - "in": "query", - "description": "Additional fields to include in the response", - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateItemsDto" - } - } - } - }, - "responses": { - "200": { - "description": "Items created successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationItemListDto" - } - } - } - }, - "404": { - "description": "Conversation not found" - } - }, - "summary": "OpenAI-compatible create items in a conversation", - "tags": [ - "ai-agents-conversations" - ] - } - }, - "/api/v1/cloud-ai/agents/{agent_access_id}/v1/conversations/{conversation_id}/items/{item_id}": { - "get": { - "operationId": "CloudAiAgentsConversationsController_getItem", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "item_id", - "required": true, - "in": "path", - "description": "Item ID", - "schema": { - "type": "string" - } - }, - { - "name": "include", - "required": false, - "in": "query", - "description": "Specify additional output data to include in the model response", - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Item retrieved successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationItemDto" - } - } - } - }, - "404": { - "description": "Item not found" - } - }, - "summary": "OpenAI-compatible get a conversation item", - "tags": [ - "ai-agents-conversations" - ] - }, - "delete": { - "operationId": "CloudAiAgentsConversationsController_deleteItem", - "parameters": [ - { - "name": "agent_access_id", - "required": true, - "in": "path", - "description": "Agent Access ID", - "schema": { - "type": "string" - } - }, - { - "name": "conversation_id", - "required": true, - "in": "path", - "description": "Conversation ID", - "schema": { - "type": "string" - } - }, - { - "name": "item_id", - "required": true, - "in": "path", - "description": "Item ID", - "schema": { - "type": "string" - } - }, - { - "name": "authorization", - "required": true, - "in": "header", - "description": "Bearer token for private agents", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Item deleted successfully", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ConversationDto" - } - } - } - }, - "404": { - "description": "Item not found" - } - }, - "summary": "OpenAI-compatible delete a conversation item", - "tags": [ - "ai-agents-conversations" - ] - } - } - }, - "info": { - "title": "Cloud AI API", - "description": "API docs for Cloud AI", - "version": "1.0", - "contact": {} - }, - "tags": [ - { - "name": "ai-agents-client", - "description": "API for working with AI agents" - }, - { - "name": "ai-agents-responses", - "description": "API for managing AI agents responses" - }, - { - "name": "ai-agents-conversations", - "description": "API for managing AI agents conversations" - } - ], - "servers": [ - { - "url": "https://agent.timeweb.cloud", - "description": "Timeweb Cloud AI API" - } - ], - "components": { - "securitySchemes": { - "JWT-auth": { - "scheme": "bearer", - "bearerFormat": "JWT", - "type": "http", - "name": "JWT", - "description": "Enter JWT token", - "in": "header" - } - }, - "schemas": { - "AgentCallDto": { - "type": "object", - "properties": { - "message": { - "type": "string", - "description": "The message to send to the agent (can be empty if files are provided)", - "default": "" - }, - "parent_message_id": { - "type": "string", - "description": "Optional parent message ID for conversation context" - }, - "file_ids": { - "description": "Optional array of file IDs to attach to the message", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "AgentCallResponseDto": { - "type": "object", - "properties": { - "message": { - "type": "string", - "description": "The response message from the agent" - }, - "id": { - "type": "string", - "description": "Unique ID of the message" - }, - "finish_reason": { - "type": "object", - "description": "The reason why the response was finished" - } - }, - "required": [ - "message", - "id", - "finish_reason" - ] - }, - "OpenAiChatCompletionMessageDto": { - "type": "object", - "properties": { - "role": { - "type": "string", - "description": "The role of the author of this message" - }, - "content": { - "type": "string", - "description": "The contents of the message" - } - }, - "required": [ - "role", - "content" - ] - }, - "OpenAiChatCompletionChoiceDto": { - "type": "object", - "properties": { - "index": { - "type": "number", - "description": "The index of the choice in the list of choices" - }, - "message": { - "description": "A chat completion message generated by the model", - "allOf": [ - { - "$ref": "#/components/schemas/OpenAiChatCompletionMessageDto" - } - ] - }, - "finish_reason": { - "type": "string", - "description": "The reason the model stopped generating tokens", - "enum": [ - "stop", - "length", - "content_filter", - "tool_calls" - ] - } - }, - "required": [ - "index", - "message", - "finish_reason" - ] - }, - "OpenAiUsageDto": { - "type": "object", - "properties": { - "prompt_tokens": { - "type": "number", - "description": "Number of tokens in the prompt" - }, - "completion_tokens": { - "type": "number", - "description": "Number of tokens in the generated completion" - }, - "total_tokens": { - "type": "number", - "description": "Total number of tokens used in the request (prompt + completion)" - } - }, - "required": [ - "prompt_tokens", - "completion_tokens", - "total_tokens" - ] - }, - "OpenAiChatCompletionResponseDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "A unique identifier for the chat completion" - }, - "object": { - "type": "string", - "description": "The object type, which is always \"chat.completion\"", - "example": "chat.completion" - }, - "created": { - "type": "number", - "description": "The Unix timestamp (in seconds) of when the chat completion was created" - }, - "model": { - "type": "string", - "description": "The model used for the chat completion" - }, - "choices": { - "description": "A list of chat completion choices", - "type": "array", - "items": { - "$ref": "#/components/schemas/OpenAiChatCompletionChoiceDto" - } - }, - "usage": { - "description": "Usage statistics for the completion request", - "allOf": [ - { - "$ref": "#/components/schemas/OpenAiUsageDto" - } - ] - }, - "system_fingerprint": { - "type": "string", - "description": "System fingerprint" - } - }, - "required": [ - "id", - "object", - "created", - "model", - "choices", - "usage" - ] - }, - "OpenAiStreamDeltaDto": { - "type": "object", - "properties": { - "content": { - "type": "string", - "description": "The content delta for the message" - }, - "role": { - "type": "string", - "description": "The role of the message author (only in first chunk)" - } - } - }, - "OpenAiStreamChoiceDto": { - "type": "object", - "properties": { - "index": { - "type": "number", - "description": "The index of the choice in the list of choices" - }, - "delta": { - "description": "A chat completion delta generated by the model", - "allOf": [ - { - "$ref": "#/components/schemas/OpenAiStreamDeltaDto" - } - ] - }, - "finish_reason": { - "type": "string", - "description": "The reason the model stopped generating tokens", - "enum": [ - "stop", - "length", - "content_filter", - "tool_calls", - null - ] - } - }, - "required": [ - "index", - "delta" - ] - }, - "OpenAiChatCompletionStreamResponseDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "A unique identifier for the chat completion" - }, - "object": { - "type": "string", - "description": "The object type, which is always \"chat.completion.chunk\"", - "example": "chat.completion.chunk" - }, - "created": { - "type": "number", - "description": "The Unix timestamp (in seconds) of when the chat completion was created" - }, - "model": { - "type": "string", - "description": "The model used for the chat completion" - }, - "choices": { - "description": "A list of chat completion choices", - "type": "array", - "items": { - "$ref": "#/components/schemas/OpenAiStreamChoiceDto" - } - }, - "system_fingerprint": { - "type": "string", - "description": "System fingerprint" - } - }, - "required": [ - "id", - "object", - "created", - "model", - "choices" - ] - }, - "OpenAiModelDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "Model identifier", - "example": "gpt-4o-2024-08-06" - }, - "object": { - "type": "string", - "description": "Object type, always \"model\"", - "example": "model" - }, - "created": { - "type": "number", - "description": "Unix timestamp when the model was created", - "example": 1692901427 - }, - "owned_by": { - "type": "string", - "description": "Organization that owns the model", - "example": "openai" - } - }, - "required": [ - "id", - "object", - "created", - "owned_by" - ] - }, - "OpenAiModelsResponseDto": { - "type": "object", - "properties": { - "object": { - "type": "string", - "description": "Object type, always \"list\"", - "example": "list" - }, - "data": { - "description": "Array of available models", - "type": "array", - "items": { - "$ref": "#/components/schemas/OpenAiModelDto" - } - } - }, - "required": [ - "object", - "data" - ] - }, - "OpenAiTextCompletionRequestDto": { - "type": "object", - "properties": { - "prompt": { - "type": "string", - "description": "The prompt to generate completions for", - "example": "Write a short story about a robot" - }, - "model": { - "type": "string", - "description": "The model to use for completion", - "example": "gpt-3.5-turbo-instruct", - "default": "gpt-3.5-turbo-instruct" - }, - "max_tokens": { - "type": "number", - "description": "The maximum number of tokens to generate", - "example": 100, - "minimum": 1, - "maximum": 4096, - "default": 16 - }, - "temperature": { - "type": "number", - "description": "Controls randomness in the output", - "example": 0.7, - "minimum": 0, - "maximum": 2, - "default": 1 - }, - "top_p": { - "type": "number", - "description": "Controls diversity via nucleus sampling", - "example": 0.9, - "minimum": 0, - "maximum": 1, - "default": 1 - }, - "n": { - "type": "number", - "description": "How many completions to generate", - "example": 1, - "minimum": 1, - "maximum": 128, - "default": 1 - }, - "stream": { - "type": "boolean", - "description": "Whether to stream back partial progress", - "example": false, - "default": false - }, - "logprobs": { - "type": "number", - "description": "Include the log probabilities on the logprobs most likely tokens", - "example": null, - "minimum": 0, - "maximum": 5 - }, - "echo": { - "type": "boolean", - "description": "Echo back the prompt in addition to the completion", - "example": false, - "default": false - }, - "stop": { - "description": "Up to 4 sequences where the API will stop generating further tokens", - "example": [ - "\n" - ], - "type": "array", - "items": { - "type": "string" - } - }, - "presence_penalty": { - "type": "number", - "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far", - "example": 0, - "minimum": -2, - "maximum": 2, - "default": 0 - }, - "frequency_penalty": { - "type": "number", - "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far", - "example": 0, - "minimum": -2, - "maximum": 2, - "default": 0 - }, - "best_of": { - "type": "number", - "description": "Generates best_of completions server-side and returns the \"best\"", - "example": 1, - "minimum": 1, - "maximum": 20, - "default": 1 - }, - "user": { - "type": "string", - "description": "A unique identifier representing your end-user", - "example": "user-123" - } - }, - "required": [ - "prompt" - ] - }, - "TextCompletionLogprobs": { - "type": "object", - "properties": { - "tokens": { - "description": "The tokens chosen by the model", - "type": "array", - "items": { - "type": "string" - } - }, - "token_logprobs": { - "description": "The log probability of each token", - "type": "array", - "items": { - "type": "number" - } - }, - "top_logprobs": { - "type": "object", - "description": "The top logprobs for each token", - "additionalProperties": true - }, - "text_offset": { - "description": "The character offsets for each token", - "type": "array", - "items": { - "type": "number" - } - } - } - }, - "TextCompletionChoice": { - "type": "object", - "properties": { - "text": { - "type": "string", - "description": "The generated text", - "example": "Once upon a time, there was a robot named R2D2..." - }, - "index": { - "type": "number", - "description": "The index of this choice", - "example": 0 - }, - "logprobs": { - "description": "Log probability information for the choice", - "allOf": [ - { - "$ref": "#/components/schemas/TextCompletionLogprobs" - } - ] - }, - "finish_reason": { - "type": "string", - "description": "The reason the model stopped generating tokens", - "example": "stop", - "enum": [ - "stop", - "length", - "content_filter" - ] - } - }, - "required": [ - "text", - "index", - "finish_reason" - ] - }, - "TextCompletionUsage": { - "type": "object", - "properties": { - "prompt_tokens": { - "type": "number", - "description": "Number of tokens in the prompt", - "example": 10 - }, - "completion_tokens": { - "type": "number", - "description": "Number of tokens in the generated completion", - "example": 50 - }, - "total_tokens": { - "type": "number", - "description": "Total number of tokens used", - "example": 60 - } - }, - "required": [ - "prompt_tokens", - "completion_tokens", - "total_tokens" - ] - }, - "OpenAiTextCompletionResponseDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "Unique identifier for the completion", - "example": "cmpl-7QyqpwdfhqwajicIEznoc6Q47XAyW" - }, - "object": { - "type": "string", - "description": "The object type, always \"text_completion\"", - "example": "text_completion" - }, - "created": { - "type": "number", - "description": "Unix timestamp when the completion was created", - "example": 1692901427 - }, - "model": { - "type": "string", - "description": "The model used for completion", - "example": "gpt-3.5-turbo-instruct" - }, - "choices": { - "description": "Array of completion choices", - "type": "array", - "items": { - "$ref": "#/components/schemas/TextCompletionChoice" - } - }, - "usage": { - "description": "Usage statistics for the completion", - "allOf": [ - { - "$ref": "#/components/schemas/TextCompletionUsage" - } - ] - } - }, - "required": [ - "id", - "object", - "created", - "model", - "choices", - "usage" - ] - }, - "FunctionCallDto": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "The name of the function to call", - "example": "get_weather" - } - }, - "required": [ - "name" - ] - }, - "ChatMessageDto": { - "type": "object", - "properties": { - "role": { - "type": "string", - "enum": [ - "system", - "user", - "assistant", - "tool", - "function", - "developer" - ], - "description": "The role of the message author" - }, - "content": { - "description": "The contents of the message - can be a string or array of content items", - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/components/schemas/TextContentDto" - }, - { - "$ref": "#/components/schemas/ImageUrlContentDto" - }, - { - "$ref": "#/components/schemas/InputAudioContentDto" - }, - { - "$ref": "#/components/schemas/FileContentDto" - }, - { - "$ref": "#/components/schemas/RefusalContentDto" - } - ] - } - } - ], - "examples": [ - "Simple text message", - [ - { - "type": "text", - "text": "What is in this image?" - }, - { - "type": "image_url", - "image_url": { - "url": "https://example.com/image.jpg" - } - } - ] - ] - }, - "name": { - "type": "object", - "description": "The name of the author of this message (required for function role)", - "example": "user123" - }, - "function_call": { - "description": "The name and arguments of a function that should be called", - "allOf": [ - { - "$ref": "#/components/schemas/FunctionCallDto" - } - ] - }, - "tool_calls": { - "type": "object", - "description": "Tool call information" - }, - "tool_call_id": { - "type": "object", - "description": "Tool call ID (required for tool role messages)", - "example": "call_abc123" - } - }, - "required": [ - "role", - "content" - ] - }, - "StreamOptionsDto": { - "type": "object", - "properties": { - "include_usage": { - "type": "boolean", - "description": "Whether to include usage information in streaming responses", - "example": true - } - } - }, - "ChatCompletionCreateParamsDto": { - "type": "object", - "properties": { - "model": { - "type": "object", - "description": "ID of the model to use. This field is ignored as the agent has its own model configuration.", - "example": "gpt-4" - }, - "messages": { - "description": "A list of messages comprising the conversation so far", - "type": "array", - "items": { - "$ref": "#/components/schemas/ChatMessageDto" - } - }, - "temperature": { - "type": "object", - "description": "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.", - "example": 0.7, - "minimum": 0, - "maximum": 2 - }, - "top_p": { - "type": "object", - "description": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass.", - "example": 1, - "minimum": 0, - "maximum": 1 - }, - "n": { - "type": "object", - "description": "How many chat completion choices to generate for each input message", - "example": 1, - "minimum": 1, - "maximum": 128 - }, - "stream": { - "type": "object", - "description": "Whether to stream back partial responses", - "example": false, - "default": false - }, - "stop": { - "description": "Up to 4 sequences where the API will stop generating further tokens", - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } - } - ], - "example": [ - "\n", - "Human:" - ] - }, - "max_tokens": { - "type": "object", - "deprecated": true, - "description": "The maximum number of tokens to generate in the chat completion", - "example": 100, - "minimum": 1 - }, - "max_completion_tokens": { - "type": "object", - "description": "The maximum number of tokens to generate in the chat completion (alternative to max_tokens)", - "example": 100, - "minimum": 1 - }, - "presence_penalty": { - "type": "object", - "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", - "example": 0, - "minimum": -2, - "maximum": 2 - }, - "frequency_penalty": { - "type": "object", - "description": "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", - "example": 0, - "minimum": -2, - "maximum": 2 - }, - "logit_bias": { - "type": "object", - "description": "Modify the likelihood of specified tokens appearing in the completion", - "additionalProperties": { - "type": "number" - }, - "example": { - "50256": -100 - } - }, - "user": { - "type": "string", - "description": "A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse", - "example": "user-1234" - }, - "response_format": { - "oneOf": [ - { - "$ref": "#/components/schemas/ResponseFormatTextDto" - }, - { - "$ref": "#/components/schemas/ResponseFormatJsonObjectDto" - }, - { - "$ref": "#/components/schemas/ResponseFormatJsonSchemaDto" - } - ], - "description": "An object specifying the format that the model must output" - }, - "tools": { - "oneOf": [ - { - "$ref": "#/components/schemas/FunctionToolDto" - }, - { - "$ref": "#/components/schemas/CustomToolDto" - } - ], - "description": "A list of tools the model may call" - }, - "tool_choice": { - "description": "Controls which (if any) tool is called by the model", - "oneOf": [ - { - "type": "string", - "enum": [ - "none", - "auto", - "required" - ] - }, - { - "type": "object" - } - ], - "example": "auto" - }, - "stream_options": { - "description": "Options for streaming response", - "allOf": [ - { - "$ref": "#/components/schemas/StreamOptionsDto" - } - ] - }, - "logprobs": { - "type": "object", - "description": "Whether to return log probabilities of the output tokens", - "example": false - }, - "top_logprobs": { - "type": "object", - "description": "An integer between 0 and 5 specifying the number of most likely tokens to return at each token position", - "example": 0, - "minimum": 0, - "maximum": 5 - } - }, - "required": [ - "messages" - ] - }, - "ResponseFormatTextDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "text" - ], - "description": "The type of response format", - "example": "text" - } - }, - "required": [ - "type" - ] - }, - "ResponseFormatJsonObjectDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "json_object" - ], - "description": "The type of response format", - "example": "json_object" - } - }, - "required": [ - "type" - ] - }, - "ResponseFormatJsonSchemaDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "json_schema" - ], - "description": "The type of response format", - "example": "json_schema" - }, - "json_schema": { - "type": "object", - "description": "JSON schema for json_schema type", - "additionalProperties": true - } - }, - "required": [ - "type", - "json_schema" - ] - }, - "FunctionToolDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "function" - ], - "description": "The type of tool", - "example": "function" - }, - "function": { - "type": "object", - "description": "The function definition", - "additionalProperties": true - } - }, - "required": [ - "type", - "function" - ] - }, - "CustomToolDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "custom" - ], - "description": "The type of tool", - "example": "custom" - }, - "custom": { - "type": "object", - "description": "Custom tool definition", - "additionalProperties": true - } - }, - "required": [ - "type", - "custom" - ] - }, - "TextContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "text" - ], - "description": "Content type - always \"text\"", - "example": "text" - }, - "text": { - "type": "string", - "description": "The text content", - "example": "What is in this image?" - } - }, - "required": [ - "type", - "text" - ] - }, - "ImageUrlDto": { - "type": "object", - "properties": { - "url": { - "type": "string", - "description": "The URL of the image", - "example": "https://example.com/image.jpg" - }, - "detail": { - "type": "string", - "description": "The detail level of the image", - "enum": [ - "low", - "high", - "auto" - ], - "example": "auto" - } - }, - "required": [ - "url" - ] - }, - "ImageUrlContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "image_url" - ], - "description": "Content type - always \"image_url\"", - "example": "image_url" - }, - "image_url": { - "description": "Image URL object", - "example": { - "url": "https://example.com/image.jpg", - "detail": "auto" - }, - "allOf": [ - { - "$ref": "#/components/schemas/ImageUrlDto" - } - ] - } - }, - "required": [ - "type", - "image_url" - ] - }, - "InputAudioDto": { - "type": "object", - "properties": { - "data": { - "type": "string", - "description": "Base64 encoded audio data", - "example": "UklGRnoGAABXQVZFZm10IBAAAAABAAEAQB8AAEAfAAABAAgAZGF0YQoGAACBhYqFbF1fdJivrJBhNjVgodDbq2EcBj+a2/LDciUFLIHO8tiJNwgZaLvt559NEAxQp+PwtmMcBjiR1/LMeSwFJHfH8N2QQAoUXrTp66hVFApGn+DyvmwhBSuBzvLZiTYIG2m98OScTgwOUarm7bllHgU7jdXzzn0uBSF+zO/eizEIHWq+8+OWT" - }, - "format": { - "type": "string", - "description": "Audio format", - "enum": [ - "wav", - "mp3", - "m4a", - "ogg", - "flac", - "webm" - ], - "example": "wav" - } - }, - "required": [ - "data", - "format" - ] - }, - "InputAudioContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "input_audio" - ], - "description": "Content type - always \"input_audio\"", - "example": "input_audio" - }, - "input_audio": { - "description": "Input audio object", - "example": { - "data": "UklGRnoGAABXQVZFZm10IBAAAAABAAEAQB8AAEAfAAABAAgAZGF0YQoGAACBhYqFbF1fdJivrJBhNjVgodDbq2EcBj+a2/LDciUFLIHO8tiJNwgZaLvt559NEAxQp+PwtmMcBjiR1/LMeSwFJHfH8N2QQAoUXrTp66hVFApGn+DyvmwhBSuBzvLZiTYIG2m98OScTgwOUarm7bllHgU7jdXzzn0uBSF+zO/eizEIHWq+8+OWT", - "format": "wav" - }, - "allOf": [ - { - "$ref": "#/components/schemas/InputAudioDto" - } - ] - } - }, - "required": [ - "type", - "input_audio" - ] - }, - "FileContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "file" - ], - "description": "Content type - always \"file\"", - "example": "file" - }, - "file": { - "type": "object", - "description": "File object (OpenAI File type)", - "additionalProperties": true - } - }, - "required": [ - "type", - "file" - ] - }, - "RefusalContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "enum": [ - "refusal" - ], - "description": "Content type - always \"refusal\"", - "example": "refusal" - }, - "refusal": { - "type": "string", - "description": "Refusal message", - "example": "I cannot help with that request" - } - }, - "required": [ - "type", - "refusal" - ] - }, - "CreateResponseDto": { - "type": "object", - "properties": { - "model": { - "type": "string", - "description": "Модель для использования в ответе. Это поле игнорируется, так как агент имеет свою конфигурацию модели.", - "example": "gpt-4" - }, - "instructions": { - "type": "string", - "description": "Инструкции для генерации ответа", - "example": "Ты - полезный помощник." - }, - "input": { - "description": "Входной текст или сообщения для ответа", - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "object" - } - } - ] - }, - "max_output_tokens": { - "type": "number", - "description": "Максимальное количество токенов для генерации в ответе", - "example": 1000, - "minimum": 1 - }, - "temperature": { - "type": "number", - "description": "Температура для генерации ответа", - "example": 0.7, - "minimum": 0, - "maximum": 2 - }, - "metadata": { - "type": "object", - "description": "Дополнительные метаданные для ответа", - "additionalProperties": true - }, - "tools": { - "type": "array", - "description": "Инструменты, доступные для модели" - }, - "stream": { - "type": "boolean", - "description": "Флаг, указывающий на то, что ответ нужно передавать по частям", - "example": false, - "default": false - }, - "stream_options": { - "type": "object", - "description": "Опции для потоковой передачи. Устанавливается только при stream: true", - "additionalProperties": true - }, - "background": { - "type": "boolean", - "description": "Запуск модели в фоновом режиме", - "example": false, - "default": false - }, - "text": { - "type": "object", - "description": "Конфигурация для текстового ответа от модели", - "additionalProperties": true - }, - "tool_choice": { - "description": "Как модель должна выбирать инструменты при генерации ответа", - "oneOf": [ - { - "type": "string" - }, - { - "type": "object" - } - ] - }, - "parallel_tool_calls": { - "type": "boolean", - "description": "Разрешить модели выполнять вызовы инструментов параллельно", - "example": true, - "default": true - }, - "max_tool_calls": { - "type": "number", - "description": "Максимальное количество вызовов встроенных инструментов", - "example": 10 - }, - "previous_response_id": { - "type": "string", - "description": "ID предыдущего ответа для создания многоходовых диалогов", - "example": "resp_abc123" - }, - "conversation": { - "description": "Диалог, к которому принадлежит этот ответ", - "oneOf": [ - { - "type": "string" - }, - { - "type": "object" - } - ] - }, - "include": { - "type": "array", - "description": "Дополнительные выходные данные для включения в ответ модели", - "items": { - "type": "string" - }, - "example": [ - "web_search_call.action.sources", - "code_interpreter_call.outputs" - ] - }, - "store": { - "type": "boolean", - "description": "Хранить ли сгенерированный ответ модели для последующего получения через API", - "example": true, - "default": true - }, - "top_p": { - "type": "number", - "description": "Альтернатива temperature. Модель рассматривает токены с top_p вероятностной массой", - "example": 1, - "minimum": 0, - "maximum": 1, - "default": 1 - }, - "top_logprobs": { - "type": "number", - "description": "Количество наиболее вероятных токенов для возврата на каждой позиции токена", - "example": 5, - "minimum": 0, - "maximum": 20 - }, - "truncation": { - "type": "string", - "description": "Стратегия усечения для ответа модели", - "enum": [ - "auto", - "disabled" - ], - "default": "disabled" - }, - "service_tier": { - "type": "string", - "description": "Уровень обслуживания для обработки запроса", - "enum": [ - "auto", - "default", - "flex", - "priority" - ], - "default": "auto" - }, - "safety_identifier": { - "type": "string", - "description": "Стабильный идентификатор для обнаружения пользователей, нарушающих политику использования", - "example": "user-hash-12345" - }, - "prompt_cache_key": { - "type": "string", - "description": "Используется OpenAI для кэширования ответов на похожие запросы", - "example": "cache-key-12345" - }, - "prompt": { - "type": "object", - "description": "Ссылка на шаблон промпта и его переменные", - "additionalProperties": true - }, - "reasoning": { - "type": "object", - "description": "Конфигурация для reasoning моделей (gpt-5 и o-series)", - "additionalProperties": true - }, - "user": { - "type": "string", - "description": "Устарело. Используйте safety_identifier или prompt_cache_key", - "example": "user-12345", - "deprecated": true - } - } - }, - "GetResponseQueryDto": { - "type": "object", - "properties": { - "include": { - "description": "Дополнительные поля для включения в ответ", - "example": [ - "web_search_call.action.sources", - "code_interpreter_call.outputs" - ], - "type": "array", - "items": { - "type": "string" - } - }, - "include_obfuscation": { - "type": "boolean", - "description": "Включить обфускацию потока для защиты от атак по побочным каналам", - "example": true - }, - "starting_after": { - "type": "number", - "description": "Номер последовательности события, после которого начать потоковую передачу", - "example": 0, - "minimum": 0 - }, - "stream": { - "type": "boolean", - "description": "Потоковая передача данных ответа модели", - "example": false - } - } - }, - "ResponseUsageDto": { - "type": "object", - "properties": { - "prompt_tokens": { - "type": "number", - "description": "Количество токенов в промпте", - "example": 150 - }, - "completion_tokens": { - "type": "number", - "description": "Количество токенов в ответе", - "example": 300 - }, - "total_tokens": { - "type": "number", - "description": "Общее количество токенов", - "example": 450 - } - }, - "required": [ - "prompt_tokens", - "completion_tokens", - "total_tokens" - ] - }, - "OpenAiResponseDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "Уникальный идентификатор ответа", - "example": "resp_abc123" - }, - "object": { - "type": "string", - "description": "Тип объекта", - "example": "response" - }, - "created_at": { - "type": "number", - "description": "Временная метка создания", - "example": 1234567890 - }, - "model": { - "type": "string", - "description": "Идентификатор модели", - "example": "gpt-4" - }, - "status": { - "type": "string", - "description": "Статус ответа", - "example": "completed" - }, - "usage": { - "description": "Информация об использовании токенов", - "allOf": [ - { - "$ref": "#/components/schemas/ResponseUsageDto" - } - ] - } - }, - "required": [ - "id", - "object", - "created_at", - "model", - "status" - ] - }, - "ConversationItemContentInputText": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Content type", - "example": "input_text" - }, - "text": { - "type": "string", - "description": "Text content", - "example": "Hello!" - } - }, - "required": [ - "type", - "text" - ] - }, - "ConversationItemMessage": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Item type", - "example": "message" - }, - "role": { - "type": "string", - "description": "Role of the message", - "example": "user", - "enum": [ - "user", - "assistant" - ] - }, - "content": { - "description": "Content of the message", - "type": "array", - "items": { - "$ref": "#/components/schemas/ConversationItemContentInputText" - } - } - }, - "required": [ - "type", - "role" - ] - }, - "CreateConversationDto": { - "type": "object", - "properties": { - "items": { - "description": "Initial items to include in the conversation context. You may add up to 20 items at a time.", - "type": "array", - "items": { - "$ref": "#/components/schemas/ConversationItemMessage" - } - }, - "metadata": { - "type": "object", - "description": "Set of 16 key-value pairs that can be attached to an object", - "example": { - "topic": "demo" - }, - "nullable": true - } - } - }, - "UpdateConversationDto": { - "type": "object", - "properties": { - "metadata": { - "type": "object", - "description": "Set of 16 key-value pairs that can be attached to an object", - "example": { - "topic": "project-x" - } - } - }, - "required": [ - "metadata" - ] - }, - "ConversationDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The unique ID of the conversation", - "example": "conv_123" - }, - "object": { - "type": "string", - "description": "The object type, which is always conversation", - "example": "conversation" - }, - "created_at": { - "type": "number", - "description": "The time at which the conversation was created, measured in seconds since the Unix epoch", - "example": 1741900000 - }, - "metadata": { - "type": "object", - "description": "Set of 16 key-value pairs that can be attached to an object", - "example": { - "topic": "demo" - }, - "nullable": true - } - }, - "required": [ - "id", - "object", - "created_at" - ] - }, - "ConversationDeletedDto": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The ID of the deleted conversation", - "example": "conv_123" - }, - "object": { - "type": "string", - "description": "The object type", - "example": "conversation.deleted" - }, - "deleted": { - "type": "boolean", - "description": "Whether the conversation was deleted", - "example": true - } - }, - "required": [ - "id", - "object", - "deleted" - ] - }, - "ConversationItemContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Content type", - "example": "input_text" - }, - "text": { - "type": "string", - "description": "Text content", - "example": "Hello!" - } - }, - "required": [ - "type" - ] - }, - "ConversationItemDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Item type", - "example": "message" - }, - "id": { - "type": "string", - "description": "The unique ID of the item", - "example": "msg_abc" - }, - "status": { - "type": "string", - "description": "Status of the item", - "example": "completed" - }, - "role": { - "type": "string", - "description": "Role of the message", - "example": "user" - }, - "content": { - "description": "Content of the item", - "type": "array", - "items": { - "$ref": "#/components/schemas/ConversationItemContentDto" - } - } - }, - "required": [ - "type", - "id", - "status" - ] - }, - "ConversationItemListDto": { - "type": "object", - "properties": { - "object": { - "type": "string", - "description": "The type of object returned", - "example": "list" - }, - "data": { - "description": "A list of conversation items", - "type": "array", - "items": { - "$ref": "#/components/schemas/ConversationItemDto" - } - }, - "first_id": { - "type": "string", - "description": "The ID of the first item in the list", - "example": "msg_abc" - }, - "last_id": { - "type": "string", - "description": "The ID of the last item in the list", - "example": "msg_abc" - }, - "has_more": { - "type": "boolean", - "description": "Whether there are more items available", - "example": false - } - }, - "required": [ - "object", - "data", - "first_id", - "last_id", - "has_more" - ] - }, - "ListItemsQueryDto": { - "type": "object", - "properties": { - "after": { - "type": "string", - "description": "An item ID to list items after, used in pagination" - }, - "include": { - "description": "Specify additional output data to include in the model response", - "type": "array", - "items": { - "type": "string" - } - }, - "limit": { - "type": "number", - "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20", - "default": 20, - "minimum": 1, - "maximum": 100 - }, - "order": { - "type": "string", - "description": "The order to return the input items in", - "enum": [ - "asc", - "desc" - ], - "default": "desc" - } - } - }, - "GetItemQueryDto": { - "type": "object", - "properties": { - "include": { - "description": "Specify additional output data to include in the model response", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "ItemContentDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Content type", - "example": "input_text" - }, - "text": { - "type": "string", - "description": "Text content", - "example": "Hello!" - } - }, - "required": [ - "type", - "text" - ] - }, - "CreateItemDto": { - "type": "object", - "properties": { - "type": { - "type": "string", - "description": "Item type", - "example": "message" - }, - "role": { - "type": "string", - "description": "Role of the message", - "example": "user", - "enum": [ - "user", - "assistant" - ] - }, - "content": { - "description": "Content of the message", - "type": "array", - "items": { - "$ref": "#/components/schemas/ItemContentDto" - } - } - }, - "required": [ - "type", - "role", - "content" - ] - }, - "CreateItemsDto": { - "type": "object", - "properties": { - "items": { - "description": "The items to add to the conversation. You may add up to 20 items at a time", - "type": "array", - "items": { - "$ref": "#/components/schemas/CreateItemDto" - } - } - }, - "required": [ - "items" - ] - }, - "CreateItemsQueryDto": { - "type": "object", - "properties": { - "include": { - "description": "Additional fields to include in the response", - "type": "array", - "items": { - "type": "string" - } - } - } - } - } - } -} \ No newline at end of file diff --git a/backend/.env.computer-svc b/backend/.env.computer-svc deleted file mode 100644 index 6a7e3f4..0000000 --- a/backend/.env.computer-svc +++ /dev/null @@ -1,65 +0,0 @@ -# ============================================ -# GooSeek Computer Service — Environment -# ============================================ -# Минимальный .env для запуска computer-svc -# Копировать в .env и заполнить значения - -# ============================================ -# ОБЯЗАТЕЛЬНЫЕ -# ============================================ - -# PostgreSQL — хранение задач, памяти, артефактов -DATABASE_URL=postgres://gooseek:password@postgres:5432/gooseek?sslmode=disable - -# Минимум один LLM провайдер -OPENAI_API_KEY=sk-xxx - -# ============================================ -# РЕКОМЕНДУЕМЫЕ (multi-model routing) -# ============================================ - -# Claude — лучший для reasoning и coding -ANTHROPIC_API_KEY=sk-ant-xxx - -# Gemini — лучший для long context и research -GEMINI_API_KEY=xxx - -# Redis — кэширование -REDIS_URL=redis://redis:6379 - -# ============================================ -# ПОРТ СЕРВИСА -# ============================================ - -COMPUTER_SVC_PORT=3030 - -# ============================================ -# SANDBOX (Docker для исполнения кода) -# ============================================ - -SANDBOX_IMAGE=gooseek/sandbox:latest - -# ============================================ -# CONNECTORS (опционально) -# ============================================ - -# --- Telegram Bot --- -# Получить токен: @BotFather → /newbot -TELEGRAM_BOT_TOKEN= - -# --- Email SMTP --- -SMTP_HOST= -SMTP_PORT=587 -SMTP_USERNAME= -SMTP_PASSWORD= -SMTP_FROM= -SMTP_FROM_NAME=GooSeek Computer - -# --- S3/MinIO Storage --- -S3_ENDPOINT= -S3_ACCESS_KEY= -S3_SECRET_KEY= -S3_BUCKET=gooseek-artifacts -S3_USE_SSL=false -S3_REGION=us-east-1 -S3_PUBLIC_URL= diff --git a/backend/Makefile b/backend/Makefile index 01d82ab..473be13 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -3,7 +3,7 @@ GO=go GOFLAGS=-ldflags="-s -w" -SERVICES=api-gateway chat-svc agent-svc search-svc llm-svc scraper-svc discover-svc collection-svc file-svc thread-svc computer-svc +SERVICES=api-gateway auth-svc chat-svc agent-svc search-svc llm-svc scraper-svc discover-svc collection-svc file-svc thread-svc admin-svc all: build diff --git a/backend/cmd/admin-svc/main.go b/backend/cmd/admin-svc/main.go new file mode 100644 index 0000000..322b1f3 --- /dev/null +++ b/backend/cmd/admin-svc/main.go @@ -0,0 +1,440 @@ +package main + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/admin" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + var database *db.PostgresDB + if cfg.DatabaseURL != "" { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err != nil { + log.Fatal("Failed to connect to database:", err) + } + defer database.Close() + + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Migration warning: %v", err) + } + if err := admin.RunAdminMigrations(ctx, database.DB()); err != nil { + log.Printf("Admin migrations warning: %v", err) + } + cancel() + log.Println("PostgreSQL connected") + } else { + log.Fatal("DATABASE_URL is required for admin-svc") + } + + userRepo := admin.NewUserRepository(database.DB()) + postRepo := admin.NewPostRepository(database.DB()) + settingsRepo := admin.NewSettingsRepository(database.DB()) + discoverRepo := admin.NewDiscoverConfigRepository(database.DB()) + auditRepo := admin.NewAuditRepository(database.DB()) + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + IdleTimeout: 60 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready"}) + }) + + api := app.Group("/api/v1/admin") + + api.Use(middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + api.Use(middleware.RequireRole("admin")) + + api.Get("/dashboard", func(c *fiber.Ctx) error { + stats, err := getDashboardStats(c.Context(), userRepo, postRepo) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + return c.JSON(stats) + }) + + usersGroup := api.Group("/users") + { + usersGroup.Get("/", func(c *fiber.Ctx) error { + page := c.QueryInt("page", 1) + perPage := c.QueryInt("perPage", 20) + search := c.Query("search") + + users, total, err := userRepo.List(c.Context(), page, perPage, search) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(admin.UserListResponse{ + Users: users, + Total: total, + Page: page, + PerPage: perPage, + }) + }) + + usersGroup.Get("/:id", func(c *fiber.Ctx) error { + user, err := userRepo.GetByID(c.Context(), c.Params("id")) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "User not found"}) + } + return c.JSON(user) + }) + + usersGroup.Post("/", func(c *fiber.Ctx) error { + var req admin.UserCreateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + user, err := userRepo.Create(c.Context(), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "create", "user", user.ID) + return c.Status(201).JSON(user) + }) + + usersGroup.Patch("/:id", func(c *fiber.Ctx) error { + var req admin.UserUpdateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + user, err := userRepo.Update(c.Context(), c.Params("id"), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "update", "user", user.ID) + return c.JSON(user) + }) + + usersGroup.Delete("/:id", func(c *fiber.Ctx) error { + if err := userRepo.Delete(c.Context(), c.Params("id")); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "delete", "user", c.Params("id")) + return c.SendStatus(204) + }) + } + + postsGroup := api.Group("/posts") + { + postsGroup.Get("/", func(c *fiber.Ctx) error { + page := c.QueryInt("page", 1) + perPage := c.QueryInt("perPage", 20) + status := c.Query("status") + category := c.Query("category") + + posts, total, err := postRepo.List(c.Context(), page, perPage, status, category) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(admin.PostListResponse{ + Posts: posts, + Total: total, + Page: page, + PerPage: perPage, + }) + }) + + postsGroup.Get("/:id", func(c *fiber.Ctx) error { + post, err := postRepo.GetByID(c.Context(), c.Params("id")) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "Post not found"}) + } + return c.JSON(post) + }) + + postsGroup.Post("/", func(c *fiber.Ctx) error { + var req admin.PostCreateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + authorID := middleware.GetUserID(c) + post, err := postRepo.Create(c.Context(), authorID, &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "create", "post", post.ID) + return c.Status(201).JSON(post) + }) + + postsGroup.Patch("/:id", func(c *fiber.Ctx) error { + var req admin.PostUpdateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + post, err := postRepo.Update(c.Context(), c.Params("id"), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "update", "post", post.ID) + return c.JSON(post) + }) + + postsGroup.Delete("/:id", func(c *fiber.Ctx) error { + if err := postRepo.Delete(c.Context(), c.Params("id")); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "delete", "post", c.Params("id")) + return c.SendStatus(204) + }) + + postsGroup.Post("/:id/publish", func(c *fiber.Ctx) error { + post, err := postRepo.Publish(c.Context(), c.Params("id")) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "publish", "post", post.ID) + return c.JSON(post) + }) + } + + settingsGroup := api.Group("/settings") + { + settingsGroup.Get("/", func(c *fiber.Ctx) error { + settings, err := settingsRepo.Get(c.Context()) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + return c.JSON(settings) + }) + + settingsGroup.Patch("/", func(c *fiber.Ctx) error { + var settings admin.PlatformSettings + if err := c.BodyParser(&settings); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + updated, err := settingsRepo.Update(c.Context(), &settings) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "update", "settings", "platform") + return c.JSON(updated) + }) + + settingsGroup.Get("/features", func(c *fiber.Ctx) error { + features, err := settingsRepo.GetFeatures(c.Context()) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + return c.JSON(features) + }) + + settingsGroup.Patch("/features", func(c *fiber.Ctx) error { + var features admin.FeatureFlags + if err := c.BodyParser(&features); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if err := settingsRepo.UpdateFeatures(c.Context(), &features); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "update", "settings", "features") + return c.JSON(features) + }) + } + + discoverGroup := api.Group("/discover") + { + discoverGroup.Get("/categories", func(c *fiber.Ctx) error { + categories, err := discoverRepo.ListCategories(c.Context()) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + return c.JSON(fiber.Map{"categories": categories}) + }) + + discoverGroup.Post("/categories", func(c *fiber.Ctx) error { + var req admin.DiscoverCategoryCreateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + category, err := discoverRepo.CreateCategory(c.Context(), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "create", "discover_category", category.ID) + return c.Status(201).JSON(category) + }) + + discoverGroup.Patch("/categories/:id", func(c *fiber.Ctx) error { + var req admin.DiscoverCategoryUpdateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + category, err := discoverRepo.UpdateCategory(c.Context(), c.Params("id"), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "update", "discover_category", category.ID) + return c.JSON(category) + }) + + discoverGroup.Delete("/categories/:id", func(c *fiber.Ctx) error { + if err := discoverRepo.DeleteCategory(c.Context(), c.Params("id")); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "delete", "discover_category", c.Params("id")) + return c.SendStatus(204) + }) + + discoverGroup.Post("/categories/reorder", func(c *fiber.Ctx) error { + var req struct { + Order []string `json:"order"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if err := discoverRepo.ReorderCategories(c.Context(), req.Order); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "reorder", "discover_categories", "") + return c.SendStatus(204) + }) + + discoverGroup.Get("/sources", func(c *fiber.Ctx) error { + sources, err := discoverRepo.ListSources(c.Context()) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + return c.JSON(fiber.Map{"sources": sources}) + }) + + discoverGroup.Post("/sources", func(c *fiber.Ctx) error { + var req admin.DiscoverSourceCreateRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + source, err := discoverRepo.CreateSource(c.Context(), &req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "create", "discover_source", source.ID) + return c.Status(201).JSON(source) + }) + + discoverGroup.Delete("/sources/:id", func(c *fiber.Ctx) error { + if err := discoverRepo.DeleteSource(c.Context(), c.Params("id")); err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + logAudit(c, auditRepo, "delete", "discover_source", c.Params("id")) + return c.SendStatus(204) + }) + } + + auditGroup := api.Group("/audit") + { + auditGroup.Get("/", func(c *fiber.Ctx) error { + page := c.QueryInt("page", 1) + perPage := c.QueryInt("perPage", 50) + action := c.Query("action") + resource := c.Query("resource") + + logs, total, err := auditRepo.List(c.Context(), page, perPage, action, resource) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(fiber.Map{ + "logs": logs, + "total": total, + "page": page, + "perPage": perPage, + }) + }) + } + + port := config.GetEnvInt("ADMIN_SVC_PORT", 3040) + log.Printf("admin-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func getDashboardStats(ctx context.Context, userRepo *admin.UserRepository, postRepo *admin.PostRepository) (*admin.DashboardStats, error) { + totalUsers, _ := userRepo.Count(ctx, "") + activeUsers, _ := userRepo.CountActive(ctx) + totalPosts, _ := postRepo.Count(ctx, "") + publishedPosts, _ := postRepo.Count(ctx, "published") + + return &admin.DashboardStats{ + TotalUsers: totalUsers, + ActiveUsers: activeUsers, + TotalPosts: totalPosts, + PublishedPosts: publishedPosts, + StorageUsedMB: 0, + StorageLimitMB: 10240, + }, nil +} + +func logAudit(c *fiber.Ctx, repo *admin.AuditRepository, action, resource, resourceID string) { + user := middleware.GetUser(c) + if user == nil { + return + } + + log := &admin.AuditLog{ + UserID: user.UserID, + UserEmail: user.Email, + Action: action, + Resource: resource, + ResourceID: resourceID, + IPAddress: c.IP(), + UserAgent: c.Get("User-Agent"), + } + + go repo.Create(context.Background(), log) +} diff --git a/backend/cmd/agent-svc/main.go b/backend/cmd/agent-svc/main.go index e295e64..5aeb85d 100644 --- a/backend/cmd/agent-svc/main.go +++ b/backend/cmd/agent-svc/main.go @@ -16,6 +16,7 @@ import ( "github.com/gooseek/backend/internal/search" "github.com/gooseek/backend/internal/session" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" "github.com/gooseek/backend/pkg/ndjson" ) @@ -69,7 +70,13 @@ func main() { return c.JSON(fiber.Map{"status": "ok"}) }) - app.Post("/api/v1/agents/search", func(c *fiber.Ctx) error { + agents := app.Group("/api/v1/agents", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + agents.Post("/search", func(c *fiber.Ctx) error { var req SearchRequest if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) @@ -182,6 +189,10 @@ func main() { return nil }) + agents.Get("/status", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ready", "user": middleware.GetUserID(c)}) + }) + port := cfg.AgentSvcPort log.Printf("agent-svc listening on :%d", port) log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) diff --git a/backend/cmd/api-gateway/main.go b/backend/cmd/api-gateway/main.go index 195732c..668999a 100644 --- a/backend/cmd/api-gateway/main.go +++ b/backend/cmd/api-gateway/main.go @@ -14,6 +14,7 @@ import ( "github.com/gofiber/fiber/v2/middleware/cors" "github.com/gofiber/fiber/v2/middleware/logger" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" ) var svcURLs map[string]string @@ -25,6 +26,7 @@ func main() { } svcURLs = map[string]string{ + "auth": cfg.AuthSvcURL, "chat": cfg.ChatSvcURL, "agents": cfg.AgentSvcURL, "search": cfg.SearchSvcURL, @@ -36,7 +38,7 @@ func main() { "discover": cfg.DiscoverSvcURL, "finance": cfg.FinanceHeatmapURL, "learning": cfg.LearningSvcURL, - "computer": cfg.ComputerSvcURL, + "admin": cfg.AdminSvcURL, } app := fiber.New(fiber.Config{ @@ -54,6 +56,21 @@ func main() { AllowMethods: "GET, POST, PUT, PATCH, DELETE, OPTIONS", })) + app.Use(middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: true, + })) + + app.Use(middleware.TieredRateLimit(middleware.TieredRateLimitConfig{ + Tiers: map[string]middleware.TierConfig{ + "free": {Max: 60, Window: time.Minute}, + "pro": {Max: 300, Window: time.Minute}, + "business": {Max: 1000, Window: time.Minute}, + }, + DefaultTier: "free", + })) + app.Get("/health", func(c *fiber.Ctx) error { return c.JSON(fiber.Map{"status": "ok"}) }) @@ -72,6 +89,8 @@ func main() { func getTarget(path string) (base, rewrite string) { switch { + case strings.HasPrefix(path, "/api/v1/auth"): + return svcURLs["auth"], path case path == "/api/chat" || strings.HasPrefix(path, "/api/chat?"): return svcURLs["chat"], "/api/v1/chat" case strings.HasPrefix(path, "/api/v1/agents"): @@ -102,8 +121,8 @@ func getTarget(path string) (base, rewrite string) { return svcURLs["finance"], path case strings.HasPrefix(path, "/api/v1/learning"): return svcURLs["learning"], path - case strings.HasPrefix(path, "/api/v1/computer"): - return svcURLs["computer"], path + case strings.HasPrefix(path, "/api/v1/admin"): + return svcURLs["admin"], path default: return "", "" } @@ -195,11 +214,44 @@ func handleProxy(c *fiber.Ctx) error { } } - client := &http.Client{Timeout: time.Minute} + isSSE := strings.Contains(path, "/stream") || + c.Get("Accept") == "text/event-stream" + + timeout := time.Minute + if isSSE { + timeout = 30 * time.Minute + } + + client := &http.Client{Timeout: timeout} resp, err := client.Do(req) if err != nil { return handleFallback(c, path) } + + if isSSE && resp.Header.Get("Content-Type") == "text/event-stream" { + c.Set("Content-Type", "text/event-stream") + c.Set("Cache-Control", "no-cache") + c.Set("Connection", "keep-alive") + c.Set("Transfer-Encoding", "chunked") + c.Set("X-Accel-Buffering", "no") + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + defer resp.Body.Close() + buf := make([]byte, 4096) + for { + n, readErr := resp.Body.Read(buf) + if n > 0 { + w.Write(buf[:n]) + w.Flush() + } + if readErr != nil { + return + } + } + }) + return nil + } + defer resp.Body.Close() for _, h := range []string{"Content-Type", "Cache-Control", "Set-Cookie"} { diff --git a/backend/cmd/auth-svc/main.go b/backend/cmd/auth-svc/main.go new file mode 100644 index 0000000..f579a41 --- /dev/null +++ b/backend/cmd/auth-svc/main.go @@ -0,0 +1,460 @@ +package main + +import ( + "context" + "database/sql" + "errors" + "fmt" + "log" + "os" + "regexp" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/golang-jwt/jwt/v5" + "github.com/gooseek/backend/internal/auth" + "github.com/gooseek/backend/pkg/config" + _ "github.com/lib/pq" +) + +var ( + jwtSecret string + accessTokenTTL = 15 * time.Minute + refreshTokenTTL = 7 * 24 * time.Hour + emailRegex = regexp.MustCompile(`^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$`) +) + +type JWTClaims struct { + UserID string `json:"userId"` + Email string `json:"email"` + Role string `json:"role"` + Tier string `json:"tier"` + jwt.RegisteredClaims +} + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + jwtSecret = cfg.JWTSecret + if jwtSecret == "" { + jwtSecret = os.Getenv("JWT_SECRET") + if jwtSecret == "" { + log.Fatal("JWT_SECRET is required") + } + } + + if cfg.DatabaseURL == "" { + log.Fatal("DATABASE_URL is required") + } + + db, err := sql.Open("postgres", cfg.DatabaseURL) + if err != nil { + log.Fatal("Failed to connect to database:", err) + } + defer db.Close() + + db.SetMaxOpenConns(25) + db.SetMaxIdleConns(5) + db.SetConnMaxLifetime(5 * time.Minute) + + maxRetries := 30 + for i := 0; i < maxRetries; i++ { + if err := db.Ping(); err == nil { + break + } + log.Printf("Waiting for database (attempt %d/%d)...", i+1, maxRetries) + time.Sleep(2 * time.Second) + } + + authRepo := auth.NewRepository(db) + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + if err := authRepo.RunMigrations(ctx); err != nil { + log.Printf("Migration warning: %v", err) + } + cancel() + log.Println("Auth database ready") + + go func() { + ticker := time.NewTicker(1 * time.Hour) + for range ticker.C { + authRepo.CleanupExpiredTokens(context.Background()) + } + }() + + app := fiber.New(fiber.Config{ + BodyLimit: 10 * 1024 * 1024, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New(cors.Config{ + AllowOrigins: "*", + AllowHeaders: "Origin, Content-Type, Accept, Authorization", + AllowMethods: "GET, POST, PUT, DELETE, OPTIONS", + AllowCredentials: true, + })) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + app.Get("/ready", func(c *fiber.Ctx) error { + if err := db.Ping(); err != nil { + return c.Status(503).JSON(fiber.Map{"status": "database unavailable"}) + } + return c.JSON(fiber.Map{"status": "ready"}) + }) + + api := app.Group("/api/v1/auth") + + api.Post("/register", func(c *fiber.Ctx) error { + var req auth.RegisterRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Email == "" || req.Password == "" || req.Name == "" { + return c.Status(400).JSON(fiber.Map{"error": "Email, password and name are required"}) + } + + if !emailRegex.MatchString(req.Email) { + return c.Status(400).JSON(fiber.Map{"error": "Invalid email format"}) + } + + if len(req.Password) < 8 { + return c.Status(400).JSON(fiber.Map{"error": "Password must be at least 8 characters"}) + } + + user, err := authRepo.CreateUser(c.Context(), req.Email, req.Password, req.Name) + if err != nil { + if errors.Is(err, auth.ErrEmailExists) { + return c.Status(409).JSON(fiber.Map{"error": "Email already registered"}) + } + if errors.Is(err, auth.ErrWeakPassword) { + return c.Status(400).JSON(fiber.Map{"error": "Password too weak"}) + } + log.Printf("Register error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Registration failed"}) + } + + tokens, err := generateTokens(c, authRepo, user) + if err != nil { + log.Printf("Token generation error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Failed to generate tokens"}) + } + + return c.Status(201).JSON(tokens) + }) + + api.Post("/login", func(c *fiber.Ctx) error { + var req auth.LoginRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Email == "" || req.Password == "" { + return c.Status(400).JSON(fiber.Map{"error": "Email and password are required"}) + } + + user, err := authRepo.ValidatePassword(c.Context(), req.Email, req.Password) + if err != nil { + if errors.Is(err, auth.ErrUserNotFound) || errors.Is(err, auth.ErrInvalidPassword) { + return c.Status(401).JSON(fiber.Map{"error": "Invalid email or password"}) + } + log.Printf("Login error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Login failed"}) + } + + tokens, err := generateTokens(c, authRepo, user) + if err != nil { + log.Printf("Token generation error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Failed to generate tokens"}) + } + + return c.JSON(tokens) + }) + + api.Post("/refresh", func(c *fiber.Ctx) error { + var req auth.RefreshRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.RefreshToken == "" { + return c.Status(400).JSON(fiber.Map{"error": "Refresh token is required"}) + } + + rt, err := authRepo.ValidateRefreshToken(c.Context(), req.RefreshToken) + if err != nil { + if errors.Is(err, auth.ErrTokenExpired) { + return c.Status(401).JSON(fiber.Map{"error": "Refresh token expired"}) + } + if errors.Is(err, auth.ErrTokenInvalid) { + return c.Status(401).JSON(fiber.Map{"error": "Invalid refresh token"}) + } + log.Printf("Refresh error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Token refresh failed"}) + } + + authRepo.RevokeRefreshToken(c.Context(), req.RefreshToken) + + user, err := authRepo.GetUserByID(c.Context(), rt.UserID) + if err != nil { + return c.Status(401).JSON(fiber.Map{"error": "User not found"}) + } + + tokens, err := generateTokens(c, authRepo, user) + if err != nil { + log.Printf("Token generation error: %v", err) + return c.Status(500).JSON(fiber.Map{"error": "Failed to generate tokens"}) + } + + return c.JSON(tokens) + }) + + api.Post("/logout", jwtMiddleware, func(c *fiber.Ctx) error { + var req auth.RefreshRequest + if err := c.BodyParser(&req); err == nil && req.RefreshToken != "" { + authRepo.RevokeRefreshToken(c.Context(), req.RefreshToken) + } + return c.JSON(fiber.Map{"message": "Logged out successfully"}) + }) + + api.Post("/logout-all", jwtMiddleware, func(c *fiber.Ctx) error { + userID := c.Locals("userId").(string) + authRepo.RevokeAllRefreshTokens(c.Context(), userID) + return c.JSON(fiber.Map{"message": "Logged out from all devices"}) + }) + + api.Get("/validate", func(c *fiber.Ctx) error { + tokenString := extractToken(c) + if tokenString == "" { + return c.JSON(auth.ValidateResponse{Valid: false}) + } + + claims, err := validateJWT(tokenString) + if err != nil { + return c.JSON(auth.ValidateResponse{Valid: false}) + } + + user, err := authRepo.GetUserByID(c.Context(), claims.UserID) + if err != nil { + return c.JSON(auth.ValidateResponse{Valid: false}) + } + + return c.JSON(auth.ValidateResponse{ + Valid: true, + User: user, + }) + }) + + api.Get("/me", jwtMiddleware, func(c *fiber.Ctx) error { + userID := c.Locals("userId").(string) + + user, err := authRepo.GetUserByID(c.Context(), userID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "User not found"}) + } + + return c.JSON(user) + }) + + api.Put("/me", jwtMiddleware, func(c *fiber.Ctx) error { + userID := c.Locals("userId").(string) + + var req auth.UpdateProfileRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if err := authRepo.UpdateProfile(c.Context(), userID, req.Name, req.Avatar); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to update profile"}) + } + + user, _ := authRepo.GetUserByID(c.Context(), userID) + return c.JSON(user) + }) + + api.Post("/change-password", jwtMiddleware, func(c *fiber.Ctx) error { + userID := c.Locals("userId").(string) + + var req auth.ChangePasswordRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.CurrentPassword == "" || req.NewPassword == "" { + return c.Status(400).JSON(fiber.Map{"error": "Current and new passwords are required"}) + } + + if len(req.NewPassword) < 8 { + return c.Status(400).JSON(fiber.Map{"error": "New password must be at least 8 characters"}) + } + + user, err := authRepo.GetUserByID(c.Context(), userID) + if err != nil { + return c.Status(404).JSON(fiber.Map{"error": "User not found"}) + } + + _, err = authRepo.ValidatePassword(c.Context(), user.Email, req.CurrentPassword) + if err != nil { + return c.Status(401).JSON(fiber.Map{"error": "Current password is incorrect"}) + } + + if err := authRepo.UpdatePassword(c.Context(), userID, req.NewPassword); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to change password"}) + } + + authRepo.RevokeAllRefreshTokens(c.Context(), userID) + + return c.JSON(fiber.Map{"message": "Password changed successfully"}) + }) + + api.Post("/forgot-password", func(c *fiber.Ctx) error { + var req auth.ResetPasswordRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Email == "" { + return c.Status(400).JSON(fiber.Map{"error": "Email is required"}) + } + + user, err := authRepo.GetUserByEmail(c.Context(), req.Email) + if err == nil && user != nil { + token, err := authRepo.CreatePasswordResetToken(c.Context(), user.ID) + if err == nil { + log.Printf("Password reset token for %s: %s", req.Email, token.Token) + } + } + + return c.JSON(fiber.Map{"message": "If the email exists, a reset link has been sent"}) + }) + + api.Post("/reset-password", func(c *fiber.Ctx) error { + var req auth.ResetPasswordConfirm + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + + if req.Token == "" || req.NewPassword == "" { + return c.Status(400).JSON(fiber.Map{"error": "Token and new password are required"}) + } + + if len(req.NewPassword) < 8 { + return c.Status(400).JSON(fiber.Map{"error": "Password must be at least 8 characters"}) + } + + prt, err := authRepo.ValidatePasswordResetToken(c.Context(), req.Token) + if err != nil { + if errors.Is(err, auth.ErrTokenExpired) { + return c.Status(400).JSON(fiber.Map{"error": "Reset token has expired"}) + } + return c.Status(400).JSON(fiber.Map{"error": "Invalid reset token"}) + } + + if err := authRepo.UpdatePassword(c.Context(), prt.UserID, req.NewPassword); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to reset password"}) + } + + authRepo.MarkPasswordResetTokenUsed(c.Context(), prt.ID) + authRepo.RevokeAllRefreshTokens(c.Context(), prt.UserID) + + return c.JSON(fiber.Map{"message": "Password has been reset successfully"}) + }) + + port := config.GetEnvInt("AUTH_SVC_PORT", 3050) + log.Printf("auth-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func generateTokens(c *fiber.Ctx, repo *auth.Repository, user *auth.User) (*auth.TokenResponse, error) { + claims := JWTClaims{ + UserID: user.ID, + Email: user.Email, + Role: user.Role, + Tier: user.Tier, + RegisteredClaims: jwt.RegisteredClaims{ + Subject: user.ID, + IssuedAt: jwt.NewNumericDate(time.Now()), + ExpiresAt: jwt.NewNumericDate(time.Now().Add(accessTokenTTL)), + Issuer: "gooseek", + }, + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + accessToken, err := token.SignedString([]byte(jwtSecret)) + if err != nil { + return nil, err + } + + userAgent := c.Get("User-Agent") + ip := c.IP() + + refreshToken, err := repo.CreateRefreshToken(c.Context(), user.ID, userAgent, ip, refreshTokenTTL) + if err != nil { + return nil, err + } + + return &auth.TokenResponse{ + AccessToken: accessToken, + RefreshToken: refreshToken.Token, + ExpiresIn: int(accessTokenTTL.Seconds()), + TokenType: "Bearer", + User: user, + }, nil +} + +func validateJWT(tokenString string) (*JWTClaims, error) { + token, err := jwt.ParseWithClaims(tokenString, &JWTClaims{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return []byte(jwtSecret), nil + }) + + if err != nil { + return nil, err + } + + claims, ok := token.Claims.(*JWTClaims) + if !ok || !token.Valid { + return nil, fmt.Errorf("invalid token") + } + + return claims, nil +} + +func extractToken(c *fiber.Ctx) string { + auth := c.Get("Authorization") + if len(auth) > 7 && auth[:7] == "Bearer " { + return auth[7:] + } + return c.Query("token") +} + +func jwtMiddleware(c *fiber.Ctx) error { + tokenString := extractToken(c) + if tokenString == "" { + return c.Status(401).JSON(fiber.Map{"error": "Missing authorization token"}) + } + + claims, err := validateJWT(tokenString) + if err != nil { + return c.Status(401).JSON(fiber.Map{"error": "Invalid token"}) + } + + c.Locals("userId", claims.UserID) + c.Locals("userEmail", claims.Email) + c.Locals("userRole", claims.Role) + c.Locals("userTier", claims.Tier) + + return c.Next() +} diff --git a/backend/cmd/browser-svc/main.go b/backend/cmd/browser-svc/main.go deleted file mode 100644 index 2902f05..0000000 --- a/backend/cmd/browser-svc/main.go +++ /dev/null @@ -1,53 +0,0 @@ -package main - -import ( - "context" - "log" - "os" - "os/signal" - "strconv" - "syscall" - "time" - - "github.com/gooseek/backend/internal/computer/browser" -) - -func main() { - port := 3050 - if p := os.Getenv("PORT"); p != "" { - if parsed, err := strconv.Atoi(p); err == nil { - port = parsed - } - } - if p := os.Getenv("BROWSER_SVC_PORT"); p != "" { - if parsed, err := strconv.Atoi(p); err == nil { - port = parsed - } - } - - cfg := browser.ServerConfig{ - Port: port, - MaxSessions: 20, - SessionTimeout: 30 * time.Minute, - CleanupInterval: 5 * time.Minute, - } - - server := browser.NewBrowserServer(cfg) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) - - go func() { - <-sigCh - log.Println("[browser-svc] Shutting down...") - cancel() - }() - - log.Printf("[browser-svc] Starting browser service on port %d", port) - if err := server.Start(ctx); err != nil { - log.Fatalf("[browser-svc] Server error: %v", err) - } -} diff --git a/backend/cmd/chat-svc/main.go b/backend/cmd/chat-svc/main.go index 1d0f741..048ca62 100644 --- a/backend/cmd/chat-svc/main.go +++ b/backend/cmd/chat-svc/main.go @@ -16,6 +16,7 @@ import ( "github.com/gofiber/fiber/v2/middleware/cors" "github.com/gofiber/fiber/v2/middleware/logger" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" ) type ChatRequest struct { @@ -93,7 +94,13 @@ func main() { }) }) - app.Post("/api/v1/chat", func(c *fiber.Ctx) error { + chat := app.Group("/api/v1/chat", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + chat.Post("/", func(c *fiber.Ctx) error { var req ChatRequest if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) diff --git a/backend/cmd/collection-svc/main.go b/backend/cmd/collection-svc/main.go index 5f0bfc3..5a2bff9 100644 --- a/backend/cmd/collection-svc/main.go +++ b/backend/cmd/collection-svc/main.go @@ -153,8 +153,11 @@ func main() { return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) } - items, err := collectionRepo.GetItems(c.Context(), collectionID) + items, err := collectionRepo.GetItems(c.Context(), collectionID, userID) if err != nil { + if err == db.ErrForbidden { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to get items"}) } collection.Items = items @@ -195,7 +198,10 @@ func main() { collection.IsPublic = req.IsPublic collection.ContextEnabled = req.ContextEnabled - if err := collectionRepo.Update(c.Context(), collection); err != nil { + if err := collectionRepo.Update(c.Context(), collection, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to update collection"}) } @@ -210,16 +216,10 @@ func main() { collectionID := c.Params("id") userID := middleware.GetUserID(c) - collection, err := collectionRepo.GetByID(c.Context(), collectionID) - if err != nil || collection == nil { - return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) - } - - if collection.UserID != userID { - return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) - } - - if err := collectionRepo.Delete(c.Context(), collectionID); err != nil { + if err := collectionRepo.Delete(c.Context(), collectionID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Collection not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete collection"}) } @@ -293,7 +293,10 @@ func main() { return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) } - if err := collectionRepo.RemoveItem(c.Context(), itemID); err != nil { + if err := collectionRepo.RemoveItem(c.Context(), itemID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Item not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to remove item"}) } @@ -321,7 +324,7 @@ func main() { return c.JSON(fiber.Map{"context": "", "enabled": false}) } - context, err := collectionRepo.GetCollectionContext(c.Context(), collectionID) + context, err := collectionRepo.GetCollectionContext(c.Context(), collectionID, userID) if err != nil { return c.Status(500).JSON(fiber.Map{"error": "Failed to get context"}) } diff --git a/backend/cmd/computer-svc/main.go b/backend/cmd/computer-svc/main.go deleted file mode 100644 index 84e95e0..0000000 --- a/backend/cmd/computer-svc/main.go +++ /dev/null @@ -1,552 +0,0 @@ -package main - -import ( - "bufio" - "context" - "encoding/json" - "fmt" - "log" - "os" - "os/signal" - "syscall" - "time" - - "github.com/gooseek/backend/internal/computer" - "github.com/gooseek/backend/internal/computer/connectors" - "github.com/gooseek/backend/internal/db" - "github.com/gooseek/backend/internal/llm" - "github.com/gooseek/backend/pkg/config" - "github.com/gooseek/backend/pkg/middleware" - "github.com/gofiber/fiber/v2" - "github.com/gofiber/fiber/v2/middleware/cors" - "github.com/gofiber/fiber/v2/middleware/recover" -) - -func main() { - cfg, err := config.Load() - if err != nil { - log.Fatalf("Failed to load config: %v", err) - } - - var database *db.PostgresDB - maxRetries := 30 - for i := 0; i < maxRetries; i++ { - database, err = db.NewPostgresDB(cfg.DatabaseURL) - if err == nil { - log.Println("PostgreSQL connected successfully") - break - } - log.Printf("Waiting for database (attempt %d/%d): %v", i+1, maxRetries, err) - time.Sleep(2 * time.Second) - } - if err != nil { - log.Fatalf("Failed to connect to database after %d attempts: %v", maxRetries, err) - } - - taskRepo := db.NewComputerTaskRepo(database.DB()) - memoryRepo := db.NewComputerMemoryRepo(database.DB()) - artifactRepo := db.NewComputerArtifactRepo(database.DB()) - - if err := taskRepo.Migrate(); err != nil { - log.Printf("Task repo migration warning: %v", err) - } - if err := memoryRepo.Migrate(); err != nil { - log.Printf("Memory repo migration warning: %v", err) - } - if err := artifactRepo.Migrate(); err != nil { - log.Printf("Artifact repo migration warning: %v", err) - } - - registry := llm.NewModelRegistry() - setupModels(registry, cfg) - - connectorHub := connectors.NewConnectorHub() - setupConnectors(connectorHub, cfg) - - comp := computer.NewComputer(computer.ComputerConfig{ - MaxParallelTasks: 10, - MaxSubTasks: 20, - TaskTimeout: 30 * time.Minute, - SubTaskTimeout: 5 * time.Minute, - TotalBudget: 1.0, - EnableSandbox: true, - EnableScheduling: true, - SandboxImage: getEnv("SANDBOX_IMAGE", "gooseek/sandbox:latest"), - }, computer.Dependencies{ - Registry: registry, - TaskRepo: taskRepo, - MemoryRepo: memoryRepo, - }) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - comp.StartScheduler(ctx) - - app := fiber.New(fiber.Config{ - ErrorHandler: func(c *fiber.Ctx, err error) error { - return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{ - "error": err.Error(), - }) - }, - }) - - app.Use(recover.New()) - app.Use(cors.New(cors.Config{ - AllowOrigins: "*", - AllowHeaders: "Origin, Content-Type, Accept, Authorization", - AllowMethods: "GET, POST, PUT, DELETE, OPTIONS", - })) - app.Use(middleware.Logging(middleware.LoggingConfig{})) - - app.Get("/health", func(c *fiber.Ctx) error { - return c.JSON(fiber.Map{ - "status": "ok", - "service": "computer-svc", - "models": registry.Count(), - }) - }) - - api := app.Group("/api/v1/computer") - - api.Post("/execute", func(c *fiber.Ctx) error { - var req struct { - Query string `json:"query"` - UserID string `json:"userId"` - Options computer.ExecuteOptions `json:"options"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) - } - - if req.Query == "" { - return c.Status(400).JSON(fiber.Map{"error": "query is required"}) - } - - if req.UserID == "" || req.UserID == "anonymous" { - req.UserID = "00000000-0000-0000-0000-000000000000" - } - - task, err := comp.Execute(c.Context(), req.UserID, req.Query, req.Options) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(task) - }) - - api.Get("/tasks", func(c *fiber.Ctx) error { - userID := c.Query("userId", "") - limit := c.QueryInt("limit", 20) - offset := c.QueryInt("offset", 0) - - if userID == "" || userID == "anonymous" { - userID = "00000000-0000-0000-0000-000000000000" - } - - tasks, err := comp.GetUserTasks(c.Context(), userID, limit, offset) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(fiber.Map{ - "tasks": tasks, - "count": len(tasks), - }) - }) - - api.Get("/tasks/:id", func(c *fiber.Ctx) error { - taskID := c.Params("id") - - task, err := comp.GetStatus(c.Context(), taskID) - if err != nil { - return c.Status(404).JSON(fiber.Map{"error": "task not found"}) - } - - return c.JSON(task) - }) - - api.Get("/tasks/:id/stream", func(c *fiber.Ctx) error { - taskID := c.Params("id") - - c.Set("Content-Type", "text/event-stream") - c.Set("Cache-Control", "no-cache") - c.Set("Connection", "keep-alive") - c.Set("Transfer-Encoding", "chunked") - - eventCh, err := comp.Stream(c.Context(), taskID) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { - for event := range eventCh { - data, _ := json.Marshal(event) - fmt.Fprintf(w, "data: %s\n\n", data) - w.Flush() - } - }) - - return nil - }) - - api.Post("/tasks/:id/resume", func(c *fiber.Ctx) error { - taskID := c.Params("id") - - var req struct { - UserInput string `json:"userInput"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) - } - - if err := comp.Resume(c.Context(), taskID, req.UserInput); err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(fiber.Map{"status": "resumed"}) - }) - - api.Delete("/tasks/:id", func(c *fiber.Ctx) error { - taskID := c.Params("id") - - if err := comp.Cancel(c.Context(), taskID); err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(fiber.Map{"status": "cancelled"}) - }) - - api.Get("/tasks/:id/artifacts", func(c *fiber.Ctx) error { - taskID := c.Params("id") - - artifacts, err := artifactRepo.GetByTaskID(c.Context(), taskID) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(fiber.Map{ - "artifacts": artifacts, - "count": len(artifacts), - }) - }) - - api.Get("/artifacts/:id", func(c *fiber.Ctx) error { - artifactID := c.Params("id") - - artifact, err := artifactRepo.GetByID(c.Context(), artifactID) - if err != nil { - return c.Status(404).JSON(fiber.Map{"error": "artifact not found"}) - } - - return c.JSON(artifact) - }) - - api.Get("/artifacts/:id/download", func(c *fiber.Ctx) error { - artifactID := c.Params("id") - - artifact, err := artifactRepo.GetByID(c.Context(), artifactID) - if err != nil { - return c.Status(404).JSON(fiber.Map{"error": "artifact not found"}) - } - - if artifact.MimeType != "" { - c.Set("Content-Type", artifact.MimeType) - } else { - c.Set("Content-Type", "application/octet-stream") - } - c.Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", artifact.Name)) - - return c.Send(artifact.Content) - }) - - api.Get("/models", func(c *fiber.Ctx) error { - models := registry.GetAll() - return c.JSON(fiber.Map{ - "models": models, - "count": len(models), - }) - }) - - api.Get("/connectors", func(c *fiber.Ctx) error { - info := connectorHub.GetInfo() - return c.JSON(fiber.Map{ - "connectors": info, - "count": len(info), - }) - }) - - api.Post("/connectors/:id/execute", func(c *fiber.Ctx) error { - connectorID := c.Params("id") - - var req struct { - Action string `json:"action"` - Params map[string]interface{} `json:"params"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "invalid request body"}) - } - - result, err := connectorHub.Execute(c.Context(), connectorID, req.Action, req.Params) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(result) - }) - - port := getEnv("COMPUTER_SVC_PORT", "3030") - addr := ":" + port - - go func() { - log.Printf("Computer service starting on %s", addr) - if err := app.Listen(addr); err != nil { - log.Fatalf("Failed to start server: %v", err) - } - }() - - quit := make(chan os.Signal, 1) - signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) - <-quit - - log.Println("Shutting down...") - comp.StopScheduler() - app.Shutdown() -} - -func setupModels(registry *llm.ModelRegistry, cfg *config.Config) { - // Timeweb Cloud AI (приоритетный провайдер для России) - if cfg.TimewebAgentAccessID != "" && cfg.TimewebAPIKey != "" { - timewebClient, err := llm.NewTimewebClient(llm.TimewebConfig{ - ProviderID: "timeweb", - ModelKey: "gpt-4o", - BaseURL: cfg.TimewebAPIBaseURL, - AgentAccessID: cfg.TimewebAgentAccessID, - APIKey: cfg.TimewebAPIKey, - ProxySource: cfg.TimewebProxySource, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "timeweb-gpt-4o", - Provider: "timeweb", - Model: "gpt-4o", - Capabilities: []llm.ModelCapability{llm.CapSearch, llm.CapFast, llm.CapVision, llm.CapCoding, llm.CapCreative, llm.CapReasoning}, - CostPer1K: 0.005, - MaxContext: 128000, - MaxTokens: 16384, - Priority: 0, - Description: "GPT-4o via Timeweb Cloud AI", - }, timewebClient) - log.Println("Timeweb GPT-4o registered") - } else { - log.Printf("Failed to create Timeweb client: %v", err) - } - - timewebMiniClient, err := llm.NewTimewebClient(llm.TimewebConfig{ - ProviderID: "timeweb", - ModelKey: "gpt-4o-mini", - BaseURL: cfg.TimewebAPIBaseURL, - AgentAccessID: cfg.TimewebAgentAccessID, - APIKey: cfg.TimewebAPIKey, - ProxySource: cfg.TimewebProxySource, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "timeweb-gpt-4o-mini", - Provider: "timeweb", - Model: "gpt-4o-mini", - Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, - CostPer1K: 0.00015, - MaxContext: 128000, - MaxTokens: 16384, - Priority: 0, - Description: "GPT-4o-mini via Timeweb Cloud AI", - }, timewebMiniClient) - log.Println("Timeweb GPT-4o-mini registered") - } - } - - // OpenAI прямой (fallback если Timeweb недоступен) - if cfg.OpenAIAPIKey != "" { - openaiClient, err := llm.NewOpenAIClient(llm.ProviderConfig{ - ProviderID: "openai", - ModelKey: "gpt-4o", - APIKey: cfg.OpenAIAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "gpt-4o", - Provider: "openai", - Model: "gpt-4o", - Capabilities: []llm.ModelCapability{llm.CapSearch, llm.CapFast, llm.CapVision, llm.CapCoding, llm.CapCreative}, - CostPer1K: 0.005, - MaxContext: 128000, - MaxTokens: 16384, - Priority: 10, - }, openaiClient) - } - - miniClient, err := llm.NewOpenAIClient(llm.ProviderConfig{ - ProviderID: "openai", - ModelKey: "gpt-4o-mini", - APIKey: cfg.OpenAIAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "gpt-4o-mini", - Provider: "openai", - Model: "gpt-4o-mini", - Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, - CostPer1K: 0.00015, - MaxContext: 128000, - MaxTokens: 16384, - Priority: 10, - }, miniClient) - } - } - - if cfg.AnthropicAPIKey != "" { - opusClient, err := llm.NewAnthropicClient(llm.ProviderConfig{ - ProviderID: "anthropic", - ModelKey: "claude-3-opus-20240229", - APIKey: cfg.AnthropicAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "claude-3-opus", - Provider: "anthropic", - Model: "claude-3-opus-20240229", - Capabilities: []llm.ModelCapability{llm.CapReasoning, llm.CapCoding, llm.CapCreative, llm.CapLongContext}, - CostPer1K: 0.015, - MaxContext: 200000, - MaxTokens: 4096, - Priority: 1, - }, opusClient) - } - - sonnetClient, err := llm.NewAnthropicClient(llm.ProviderConfig{ - ProviderID: "anthropic", - ModelKey: "claude-3-5-sonnet-20241022", - APIKey: cfg.AnthropicAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "claude-3-sonnet", - Provider: "anthropic", - Model: "claude-3-5-sonnet-20241022", - Capabilities: []llm.ModelCapability{llm.CapCoding, llm.CapCreative, llm.CapFast}, - CostPer1K: 0.003, - MaxContext: 200000, - MaxTokens: 8192, - Priority: 1, - }, sonnetClient) - } - } - - if cfg.GeminiAPIKey != "" { - geminiClient, err := llm.NewGeminiClient(llm.ProviderConfig{ - ProviderID: "gemini", - ModelKey: "gemini-1.5-pro", - APIKey: cfg.GeminiAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "gemini-1.5-pro", - Provider: "gemini", - Model: "gemini-1.5-pro", - Capabilities: []llm.ModelCapability{llm.CapLongContext, llm.CapSearch, llm.CapVision, llm.CapMath}, - CostPer1K: 0.00125, - MaxContext: 2000000, - MaxTokens: 8192, - Priority: 1, - }, geminiClient) - } - - flashClient, err := llm.NewGeminiClient(llm.ProviderConfig{ - ProviderID: "gemini", - ModelKey: "gemini-1.5-flash", - APIKey: cfg.GeminiAPIKey, - }) - if err == nil { - registry.Register(llm.ModelSpec{ - ID: "gemini-1.5-flash", - Provider: "gemini", - Model: "gemini-1.5-flash", - Capabilities: []llm.ModelCapability{llm.CapFast, llm.CapVision}, - CostPer1K: 0.000075, - MaxContext: 1000000, - MaxTokens: 8192, - Priority: 2, - }, flashClient) - } - } - - log.Printf("Registered %d models", registry.Count()) -} - -func setupConnectors(hub *connectors.ConnectorHub, cfg *config.Config) { - if smtpHost := getEnv("SMTP_HOST", ""); smtpHost != "" { - emailConn := connectors.NewEmailConnector(connectors.EmailConfig{ - SMTPHost: smtpHost, - SMTPPort: getEnvInt("SMTP_PORT", 587), - Username: getEnv("SMTP_USERNAME", ""), - Password: getEnv("SMTP_PASSWORD", ""), - FromAddress: getEnv("SMTP_FROM", ""), - FromName: getEnv("SMTP_FROM_NAME", "GooSeek Computer"), - UseTLS: true, - AllowHTML: true, - }) - hub.Register(emailConn) - log.Println("Email connector registered") - } - - if botToken := getEnv("TELEGRAM_BOT_TOKEN", ""); botToken != "" { - tgConn := connectors.NewTelegramConnector(connectors.TelegramConfig{ - BotToken: botToken, - }) - hub.Register(tgConn) - log.Println("Telegram connector registered") - } - - webhookConn := connectors.NewWebhookConnector(connectors.WebhookConfig{ - Timeout: 30 * time.Second, - MaxRetries: 3, - }) - hub.Register(webhookConn) - log.Println("Webhook connector registered") - - if s3Endpoint := getEnv("S3_ENDPOINT", ""); s3Endpoint != "" { - storageConn, err := connectors.NewStorageConnector(connectors.StorageConfig{ - Endpoint: s3Endpoint, - AccessKeyID: getEnv("S3_ACCESS_KEY", ""), - SecretAccessKey: getEnv("S3_SECRET_KEY", ""), - BucketName: getEnv("S3_BUCKET", "gooseek-artifacts"), - UseSSL: getEnv("S3_USE_SSL", "true") == "true", - Region: getEnv("S3_REGION", "us-east-1"), - PublicURL: getEnv("S3_PUBLIC_URL", ""), - }) - if err == nil { - hub.Register(storageConn) - log.Println("Storage connector registered") - } - } -} - -func getEnv(key, defaultValue string) string { - if value := os.Getenv(key); value != "" { - return value - } - return defaultValue -} - -func getEnvInt(key string, defaultValue int) int { - if value := os.Getenv(key); value != "" { - var i int - fmt.Sscanf(value, "%d", &i) - return i - } - return defaultValue -} diff --git a/backend/cmd/discover-svc/main.go b/backend/cmd/discover-svc/main.go index f10ab2d..865327d 100644 --- a/backend/cmd/discover-svc/main.go +++ b/backend/cmd/discover-svc/main.go @@ -18,6 +18,7 @@ import ( "github.com/gooseek/backend/internal/search" "github.com/gooseek/backend/pkg/cache" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" ) type DigestCitation struct { @@ -237,7 +238,9 @@ func main() { ) }) - app.Get("/api/v1/discover/digest", func(c *fiber.Ctx) error { + discover := app.Group("/api/v1/discover") + + discover.Get("/digest", func(c *fiber.Ctx) error { url := c.Query("url") if url != "" { digest := store.GetDigestByURL(url) @@ -263,7 +266,13 @@ func main() { return c.JSON(digest) }) - app.Post("/api/v1/discover/digest", func(c *fiber.Ctx) error { + discoverAuth := app.Group("/api/v1/discover", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + discoverAuth.Post("/digest", func(c *fiber.Ctx) error { var d Digest if err := c.BodyParser(&d); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) @@ -277,7 +286,7 @@ func main() { return c.Status(204).Send(nil) }) - app.Delete("/api/v1/discover/digest", func(c *fiber.Ctx) error { + discoverAuth.Delete("/digest", func(c *fiber.Ctx) error { topic := c.Query("topic") region := c.Query("region") @@ -289,7 +298,7 @@ func main() { return c.JSON(fiber.Map{"deleted": deleted}) }) - app.Get("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + discover.Get("/article-summary", func(c *fiber.Ctx) error { url := c.Query("url") if url == "" { return c.Status(400).JSON(fiber.Map{"message": "url required"}) @@ -320,7 +329,7 @@ func main() { return c.JSON(fiber.Map{"events": summary.Events}) }) - app.Post("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + discoverAuth.Post("/article-summary", func(c *fiber.Ctx) error { var body struct { URL string `json:"url"` Events []string `json:"events"` @@ -354,7 +363,7 @@ func main() { return c.Status(204).Send(nil) }) - app.Delete("/api/v1/discover/article-summary", func(c *fiber.Ctx) error { + discoverAuth.Delete("/article-summary", func(c *fiber.Ctx) error { url := c.Query("url") if url == "" { return c.Status(400).JSON(fiber.Map{"message": "url required"}) @@ -365,7 +374,7 @@ func main() { return c.Status(204).Send(nil) }) - app.Get("/api/v1/discover/search", func(c *fiber.Ctx) error { + discover.Get("/search", func(c *fiber.Ctx) error { q := c.Query("q") if q == "" { return c.Status(400).JSON(fiber.Map{"message": "Query q is required"}) @@ -386,14 +395,38 @@ func main() { return c.JSON(fiber.Map{"results": result.Results}) }) - app.Get("/api/v1/discover", func(c *fiber.Ctx) error { + discover.Get("/", func(c *fiber.Ctx) error { topic := c.Query("topic", "tech") region := c.Query("region", "world") + page := c.QueryInt("page", 1) + limit := c.QueryInt("limit", 10) + + if page < 1 { + page = 1 + } + if limit < 1 || limit > 30 { + limit = 10 + } digests := store.GetDigests(topic, region) if len(digests) > 0 { - blogs := make([]fiber.Map, len(digests)) - for i, d := range digests { + start := (page - 1) * limit + end := start + limit + if start >= len(digests) { + return c.JSON(fiber.Map{ + "blogs": []fiber.Map{}, + "hasMore": false, + "page": page, + "total": len(digests), + }) + } + if end > len(digests) { + end = len(digests) + } + + pagedDigests := digests[start:end] + blogs := make([]fiber.Map, len(pagedDigests)) + for i, d := range pagedDigests { content := d.ShortDescription if content == "" && len(d.SummaryRu) > 200 { content = d.SummaryRu[:200] + "…" @@ -410,7 +443,12 @@ func main() { "digestId": fmt.Sprintf("%s:%s:%s", d.Topic, d.Region, d.ClusterTitle), } } - return c.JSON(fiber.Map{"blogs": blogs}) + return c.JSON(fiber.Map{ + "blogs": blogs, + "hasMore": end < len(digests), + "page": page, + "total": len(digests), + }) } ctx, cancel := context.WithTimeout(context.Background(), cfg.SearchTimeout*2) @@ -419,15 +457,15 @@ func main() { queries := getQueriesForTopic(topic, region) results, err := searchClient.Search(ctx, queries[0], &search.SearchOptions{ Categories: []string{"news"}, - PageNo: 1, + PageNo: page, }) if err != nil { return c.Status(503).JSON(fiber.Map{"message": "Search failed"}) } - blogs := make([]fiber.Map, 0, 7) + blogs := make([]fiber.Map, 0, limit) for i, r := range results.Results { - if i >= 7 { + if i >= limit { break } thumbnail := r.Thumbnail @@ -454,7 +492,12 @@ func main() { }) } - return c.JSON(fiber.Map{"blogs": blogs}) + hasMore := len(results.Results) > limit + return c.JSON(fiber.Map{ + "blogs": blogs, + "hasMore": hasMore, + "page": page, + }) }) port := getEnvInt("DISCOVER_SVC_PORT", 3002) @@ -466,19 +509,54 @@ func getQueriesForTopic(topic, region string) []string { queries := map[string]map[string][]string{ "tech": { "world": {"technology news AI innovation"}, - "russia": {"технологии новости IT инновации"}, + "russia": {"технологии новости IT инновации искусственный интеллект"}, "eu": {"technology news Europe AI"}, }, "finance": { - "world": {"finance news economy markets"}, - "russia": {"финансы новости экономика рынки"}, - "eu": {"finance news Europe economy"}, + "world": {"finance news economy markets stocks"}, + "russia": {"финансы новости экономика рынки акции"}, + "eu": {"finance news Europe economy markets"}, }, "sports": { - "world": {"sports news football Olympics"}, - "russia": {"спорт новости футбол хоккей"}, + "world": {"sports news football basketball Olympics"}, + "russia": {"спорт новости футбол хоккей КХЛ РПЛ"}, "eu": {"sports news football Champions League"}, }, + "politics": { + "world": {"politics news government elections policy"}, + "russia": {"политика новости Россия правительство законы"}, + "eu": {"politics news Europe EU parliament"}, + }, + "science": { + "world": {"science news research discovery space"}, + "russia": {"наука новости исследования открытия космос"}, + "eu": {"science news Europe research discovery"}, + }, + "health": { + "world": {"health news medicine medical research"}, + "russia": {"здоровье новости медицина лечение"}, + "eu": {"health news Europe medicine healthcare"}, + }, + "entertainment": { + "world": {"entertainment news movies music celebrities"}, + "russia": {"развлечения новости кино музыка шоу-бизнес"}, + "eu": {"entertainment news Europe movies music"}, + }, + "world": { + "world": {"world news international global events"}, + "russia": {"мировые новости международные события"}, + "eu": {"world news Europe international"}, + }, + "business": { + "world": {"business news companies startups industry"}, + "russia": {"бизнес новости компании стартапы предпринимательство"}, + "eu": {"business news Europe companies industry"}, + }, + "culture": { + "world": {"culture news art exhibitions theatre"}, + "russia": {"культура новости искусство выставки театр"}, + "eu": {"culture news Europe art exhibitions"}, + }, } if topicQueries, ok := queries[topic]; ok { diff --git a/backend/cmd/file-svc/main.go b/backend/cmd/file-svc/main.go index 1b31a8c..c389a3a 100644 --- a/backend/cmd/file-svc/main.go +++ b/backend/cmd/file-svc/main.go @@ -161,7 +161,7 @@ func main() { return } - fileRepo.UpdateExtractedText(ctx, uploadedFile.ID, result.ExtractedText) + fileRepo.UpdateExtractedText(ctx, uploadedFile.ID, result.ExtractedText, uploadedFile.UserID) }() return c.Status(201).JSON(fiber.Map{ @@ -260,7 +260,7 @@ func main() { return c.Status(500).JSON(fiber.Map{"error": "Analysis failed: " + err.Error()}) } - fileRepo.UpdateExtractedText(c.Context(), fileID, result.ExtractedText) + fileRepo.UpdateExtractedText(c.Context(), fileID, result.ExtractedText, userID) return c.JSON(result) }) @@ -284,7 +284,10 @@ func main() { fileAnalyzer.DeleteFile(file.StoragePath) - if err := fileRepo.Delete(c.Context(), fileID); err != nil { + if err := fileRepo.Delete(c.Context(), fileID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "File not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete file"}) } diff --git a/backend/cmd/llm-svc/main.go b/backend/cmd/llm-svc/main.go index e13acf1..5853484 100644 --- a/backend/cmd/llm-svc/main.go +++ b/backend/cmd/llm-svc/main.go @@ -3,6 +3,7 @@ package main import ( "bufio" "context" + "database/sql" "fmt" "log" "os" @@ -12,8 +13,11 @@ import ( "github.com/gofiber/fiber/v2/middleware/cors" "github.com/gofiber/fiber/v2/middleware/logger" "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/usage" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" "github.com/gooseek/backend/pkg/ndjson" + _ "github.com/lib/pq" ) type GenerateRequest struct { @@ -36,6 +40,26 @@ func main() { log.Fatal("Failed to load config:", err) } + var usageRepo *usage.Repository + if cfg.DatabaseURL != "" { + db, err := sql.Open("postgres", cfg.DatabaseURL) + if err != nil { + log.Printf("Usage tracking unavailable: %v", err) + } else { + db.SetMaxOpenConns(5) + db.SetMaxIdleConns(2) + defer db.Close() + + usageRepo = usage.NewRepository(db) + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + if err := usageRepo.RunMigrations(ctx); err != nil { + log.Printf("Usage migrations warning: %v", err) + } + cancel() + log.Println("Usage tracking enabled") + } + } + app := fiber.New(fiber.Config{ StreamRequestBody: true, BodyLimit: 10 * 1024 * 1024, @@ -90,7 +114,20 @@ func main() { }) }) - app.Post("/api/v1/generate", func(c *fiber.Ctx) error { + llmAPI := app.Group("/api/v1", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + }), middleware.LLMLimits(middleware.LLMLimitsConfig{ + UsageRepo: usageRepo, + })) + + llmAPI.Post("/generate", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + tier := middleware.GetUserTier(c) + if tier == "" { + tier = "free" + } var req GenerateRequest if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) @@ -100,6 +137,11 @@ func main() { return c.Status(400).JSON(fiber.Map{"error": "Messages required"}) } + limits := usage.GetLimits(tier) + if req.Options.MaxTokens == 0 || req.Options.MaxTokens > limits.MaxTokensPerReq { + req.Options.MaxTokens = limits.MaxTokensPerReq + } + client, err := llm.NewClient(llm.ProviderConfig{ ProviderID: req.ProviderID, ModelKey: req.ModelKey, @@ -161,11 +203,19 @@ func main() { return c.Status(500).JSON(fiber.Map{"error": err.Error()}) } + if usageRepo != nil { + go usageRepo.IncrementLLMUsage(context.Background(), userID, tier, len(response)/4) + } + return c.JSON(fiber.Map{ "content": response, }) }) + llmAPI.Post("/embed", func(c *fiber.Ctx) error { + return c.Status(501).JSON(fiber.Map{"error": "Not implemented"}) + }) + port := cfg.LLMSvcPort log.Printf("llm-svc listening on :%d", port) log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) diff --git a/backend/cmd/thread-svc/main.go b/backend/cmd/thread-svc/main.go index fb28c64..02b38f5 100644 --- a/backend/cmd/thread-svc/main.go +++ b/backend/cmd/thread-svc/main.go @@ -181,7 +181,10 @@ func main() { return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) } - messages, _ := threadRepo.GetMessages(c.Context(), threadID, 100, 0) + messages, err := threadRepo.GetMessages(c.Context(), threadID, userID, 100, 0) + if err != nil && err != db.ErrForbidden { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get messages"}) + } thread.Messages = messages return c.JSON(thread) @@ -225,12 +228,15 @@ func main() { TokensUsed: req.TokensUsed, } - if err := threadRepo.AddMessage(c.Context(), msg); err != nil { + if err := threadRepo.AddMessage(c.Context(), msg, userID); err != nil { + if err == db.ErrForbidden { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to add message"}) } if thread.Title == "New Thread" && req.Role == "user" { - threadRepo.GenerateTitle(c.Context(), threadID, req.Content) + threadRepo.GenerateTitle(c.Context(), threadID, req.Content, userID) } return c.Status(201).JSON(msg) @@ -250,7 +256,10 @@ func main() { } shareID := generateShareID() - if err := threadRepo.SetShareID(c.Context(), threadID, shareID); err != nil { + if err := threadRepo.SetShareID(c.Context(), threadID, shareID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to share thread"}) } @@ -264,16 +273,10 @@ func main() { threadID := c.Params("id") userID := middleware.GetUserID(c) - thread, err := threadRepo.GetByID(c.Context(), threadID) - if err != nil || thread == nil { - return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) - } - - if thread.UserID != userID { - return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) - } - - if err := threadRepo.Delete(c.Context(), threadID); err != nil { + if err := threadRepo.Delete(c.Context(), threadID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Thread not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete thread"}) } @@ -290,7 +293,7 @@ func main() { return c.Status(404).JSON(fiber.Map{"error": "Shared thread not found"}) } - messages, _ := threadRepo.GetMessages(c.Context(), thread.ID, 100, 0) + messages, _ := threadRepo.GetMessages(c.Context(), thread.ID, thread.UserID, 100, 0) thread.Messages = messages return c.JSON(thread) @@ -353,15 +356,6 @@ func main() { spaceID := c.Params("id") userID := middleware.GetUserID(c) - space, err := spaceRepo.GetByID(c.Context(), spaceID) - if err != nil || space == nil { - return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) - } - - if space.UserID != userID { - return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) - } - var req db.Space if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) @@ -370,7 +364,10 @@ func main() { req.ID = spaceID req.UserID = userID - if err := spaceRepo.Update(c.Context(), &req); err != nil { + if err := spaceRepo.Update(c.Context(), &req, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to update space"}) } @@ -381,16 +378,10 @@ func main() { spaceID := c.Params("id") userID := middleware.GetUserID(c) - space, err := spaceRepo.GetByID(c.Context(), spaceID) - if err != nil || space == nil { - return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) - } - - if space.UserID != userID { - return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) - } - - if err := spaceRepo.Delete(c.Context(), spaceID); err != nil { + if err := spaceRepo.Delete(c.Context(), spaceID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Space not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete space"}) } @@ -445,8 +436,12 @@ func main() { memory.Delete("/:id", func(c *fiber.Ctx) error { memID := c.Params("id") + userID := middleware.GetUserID(c) - if err := memoryRepo.Delete(c.Context(), memID); err != nil { + if err := memoryRepo.Delete(c.Context(), memID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Memory not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete memory"}) } @@ -493,7 +488,7 @@ func main() { return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) } - messages, _ := threadRepo.GetMessages(c.Context(), threadID, 100, 0) + messages, _ := threadRepo.GetMessages(c.Context(), threadID, userID, 100, 0) var query, answer string for _, msg := range messages { @@ -559,7 +554,10 @@ func main() { } shareID := generateShareID() - if err := pageRepo.SetShareID(c.Context(), pageID, shareID); err != nil { + if err := pageRepo.SetShareID(c.Context(), pageID, shareID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to share page"}) } @@ -586,16 +584,10 @@ func main() { pageID := c.Params("id") userID := middleware.GetUserID(c) - page, err := pageRepo.GetByID(c.Context(), pageID) - if err != nil || page == nil { - return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) - } - - if page.UserID != userID { - return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) - } - - if err := pageRepo.Delete(c.Context(), pageID); err != nil { + if err := pageRepo.Delete(c.Context(), pageID, userID); err != nil { + if err == db.ErrNotFound { + return c.Status(404).JSON(fiber.Map{"error": "Page not found"}) + } return c.Status(500).JSON(fiber.Map{"error": "Failed to delete page"}) } diff --git a/backend/deploy/docker/Dockerfile.all b/backend/deploy/docker/Dockerfile.all index 134c19d..b155475 100644 --- a/backend/deploy/docker/Dockerfile.all +++ b/backend/deploy/docker/Dockerfile.all @@ -1,5 +1,5 @@ # Multi-service Dockerfile - builds all services -FROM golang:1.22-alpine AS builder +FROM golang:1.24-alpine AS builder RUN apk add --no-cache git ca-certificates @@ -21,17 +21,17 @@ RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/discover-svc ./cm RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/collection-svc ./cmd/collection-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/file-svc ./cmd/file-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/thread-svc ./cmd/thread-svc -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/computer-svc ./cmd/computer-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/finance-heatmap-svc ./cmd/finance-heatmap-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/learning-svc ./cmd/learning-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/labs-svc ./cmd/labs-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/podcast-svc ./cmd/podcast-svc -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/browser-svc ./cmd/browser-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/admin-svc ./cmd/admin-svc # Runtime stage FROM alpine:3.19 -RUN apk add --no-cache ca-certificates tzdata +RUN apk add --no-cache ca-certificates tzdata docker-cli python3 py3-pip && \ + pip3 install --break-system-packages openpyxl python-docx reportlab pillow pandas WORKDIR /app @@ -41,7 +41,7 @@ COPY --from=builder /bin/* /app/ ENV SERVICE=api-gateway ENV PORT=3015 -EXPOSE 3015 3018 3005 3001 3020 3021 3002 3025 3026 3027 3030 +EXPOSE 3015 3018 3005 3001 3020 3021 3002 3025 3026 3027 3040 HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT}/health || exit 1 diff --git a/backend/deploy/docker/Dockerfile.computer-svc b/backend/deploy/docker/Dockerfile.computer-svc deleted file mode 100644 index 94a7387..0000000 --- a/backend/deploy/docker/Dockerfile.computer-svc +++ /dev/null @@ -1,32 +0,0 @@ -# Dockerfile for computer-svc only -FROM golang:1.22-alpine AS builder - -RUN apk add --no-cache git ca-certificates - -WORKDIR /app - -COPY go.mod go.sum ./ -RUN go mod download - -COPY . . - -# Build only computer-svc -RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/computer-svc ./cmd/computer-svc - -# Runtime stage -FROM alpine:3.19 - -RUN apk add --no-cache ca-certificates tzdata docker-cli - -WORKDIR /app - -COPY --from=builder /bin/computer-svc /app/computer-svc - -ENV PORT=3030 - -EXPOSE 3030 - -HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ - CMD wget --no-verbose --tries=1 --spider http://localhost:3030/health || exit 1 - -CMD ["/app/computer-svc"] diff --git a/backend/deploy/docker/docker-compose.yml b/backend/deploy/docker/docker-compose.yml index 2bf15d3..bc3f3cd 100644 --- a/backend/deploy/docker/docker-compose.yml +++ b/backend/deploy/docker/docker-compose.yml @@ -1,6 +1,27 @@ version: '3.8' services: + auth-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=auth-svc + - PORT=3050 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - JWT_SECRET=${JWT_SECRET} + ports: + - "3050:3050" + depends_on: + - postgres + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:3050/health"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - gooseek + api-gateway: build: context: ../.. @@ -8,23 +29,26 @@ services: environment: - SERVICE=api-gateway - PORT=3015 + - AUTH_SVC_URL=http://auth-svc:3050 - CHAT_SVC_URL=http://chat-svc:3005 - MASTER_AGENTS_SVC_URL=http://agent-svc:3018 - SEARCH_SVC_URL=http://search-svc:3001 - LLM_SVC_URL=http://llm-svc:3020 - SCRAPER_SVC_URL=http://scraper-svc:3021 - THREAD_SVC_URL=http://thread-svc:3027 - - COMPUTER_SVC_URL=http://computer-svc:3030 - DISCOVER_SVC_URL=http://discover-svc:3002 - FINANCE_HEATMAP_SVC_URL=http://finance-heatmap-svc:3033 - LEARNING_SVC_URL=http://learning-svc:3034 + - ADMIN_SVC_URL=http://admin-svc:3040 + - JWT_SECRET=${JWT_SECRET} ports: - "3015:3015" depends_on: + - auth-svc - chat-svc - agent-svc - thread-svc - - computer-svc + - admin-svc networks: - gooseek @@ -187,53 +211,6 @@ services: networks: - gooseek - computer-svc: - build: - context: ../.. - dockerfile: deploy/docker/Dockerfile.all - environment: - - SERVICE=computer-svc - - PORT=3030 - - COMPUTER_SVC_PORT=3030 - - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - REDIS_URL=redis://redis:6379 - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} - - GEMINI_API_KEY=${GEMINI_API_KEY} - - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} - - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} - - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} - - TELEGRAM_BOT_TOKEN=${TELEGRAM_BOT_TOKEN} - - SANDBOX_IMAGE=python:3.11-slim - - BROWSER_SVC_URL=http://browser-svc:3050 - ports: - - "3030:3030" - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - checkpoints:/data/checkpoints - depends_on: - - postgres - - redis - - browser-svc - networks: - - gooseek - - browser-svc: - build: - context: ../.. - dockerfile: deploy/docker/Dockerfile.all - environment: - - SERVICE=browser-svc - - PORT=3050 - - BROWSER_SVC_PORT=3050 - ports: - - "3050:3050" - volumes: - - screenshots:/tmp/gooseek-screenshots - - recordings:/tmp/gooseek-recordings - networks: - - gooseek - labs-svc: build: context: ../.. @@ -304,14 +281,60 @@ services: networks: - gooseek + admin-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + environment: + - SERVICE=admin-svc + - PORT=3040 + - ADMIN_SVC_PORT=3040 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - JWT_SECRET=${JWT_SECRET} + - AUTH_SVC_URL=${AUTH_SVC_URL} + - MINIO_ENDPOINT=minio:9000 + - MINIO_ACCESS_KEY=minioadmin + - MINIO_SECRET_KEY=minioadmin + - MINIO_BUCKET=gooseek + - MINIO_USE_SSL=false + ports: + - "3040:3040" + depends_on: + - postgres + - minio + networks: + - gooseek + + minio: + image: minio/minio:latest + command: server /data --console-address ":9001" + environment: + - MINIO_ROOT_USER=minioadmin + - MINIO_ROOT_PASSWORD=minioadmin + volumes: + - minio-data:/data + ports: + - "9000:9000" + - "9001:9001" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - gooseek + webui: build: context: ../../webui dockerfile: Dockerfile + args: + - NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES:-/travel,/medicine,/finance,/learning,/spaces,/history} environment: - NODE_ENV=production - API_URL=http://api-gateway:3015 - NEXT_PUBLIC_API_URL= + - NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES:-/travel,/medicine,/finance,/learning,/spaces,/history} ports: - "3000:3000" depends_on: @@ -379,7 +402,5 @@ volumes: postgres-data: redis-data: file-storage: - checkpoints: - screenshots: - recordings: podcasts: + minio-data: diff --git a/backend/deploy/k8s/computer-svc.yaml b/backend/deploy/k8s/computer-svc.yaml deleted file mode 100644 index 305523b..0000000 --- a/backend/deploy/k8s/computer-svc.yaml +++ /dev/null @@ -1,137 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: computer-svc - namespace: gooseek - labels: - app: computer-svc -spec: - replicas: 1 - selector: - matchLabels: - app: computer-svc - template: - metadata: - labels: - app: computer-svc - spec: - containers: - - name: computer-svc - image: gooseek/backend:latest - command: ["/app/computer-svc"] - ports: - - containerPort: 3030 - env: - - name: COMPUTER_SVC_PORT - value: "3030" - - name: DATABASE_URL - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: DATABASE_URL - - name: REDIS_URL - valueFrom: - configMapKeyRef: - name: gooseek-config - key: REDIS_URL - - name: OPENAI_API_KEY - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: OPENAI_API_KEY - - name: ANTHROPIC_API_KEY - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: ANTHROPIC_API_KEY - optional: true - - name: GEMINI_API_KEY - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: GEMINI_API_KEY - optional: true - - name: TELEGRAM_BOT_TOKEN - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: TELEGRAM_BOT_TOKEN - optional: true - - name: SMTP_HOST - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: SMTP_HOST - optional: true - - name: SMTP_USERNAME - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: SMTP_USERNAME - optional: true - - name: SMTP_PASSWORD - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: SMTP_PASSWORD - optional: true - - name: S3_ENDPOINT - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: S3_ENDPOINT - optional: true - - name: S3_ACCESS_KEY - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: S3_ACCESS_KEY - optional: true - - name: S3_SECRET_KEY - valueFrom: - secretKeyRef: - name: gooseek-secrets - key: S3_SECRET_KEY - optional: true - - name: SANDBOX_IMAGE - value: "gooseek/sandbox:latest" - resources: - requests: - memory: "256Mi" - cpu: "100m" - limits: - memory: "1Gi" - cpu: "1000m" - livenessProbe: - httpGet: - path: /health - port: 3030 - initialDelaySeconds: 10 - periodSeconds: 30 - readinessProbe: - httpGet: - path: /health - port: 3030 - initialDelaySeconds: 5 - periodSeconds: 10 - volumeMounts: - - name: docker-socket - mountPath: /var/run/docker.sock - volumes: - - name: docker-socket - hostPath: - path: /var/run/docker.sock - type: Socket ---- -apiVersion: v1 -kind: Service -metadata: - name: computer-svc - namespace: gooseek -spec: - selector: - app: computer-svc - ports: - - port: 3030 - targetPort: 3030 - type: ClusterIP diff --git a/backend/deploy/k8s/kustomization.yaml b/backend/deploy/k8s/kustomization.yaml index d6866cf..14128db 100644 --- a/backend/deploy/k8s/kustomization.yaml +++ b/backend/deploy/k8s/kustomization.yaml @@ -18,7 +18,6 @@ resources: - collection-svc.yaml - file-svc.yaml - thread-svc.yaml - - computer-svc.yaml - ingress.yaml commonLabels: diff --git a/backend/go.mod b/backend/go.mod index 05e29a4..41fa558 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -1,6 +1,8 @@ module github.com/gooseek/backend -go 1.22 +go 1.24 + +toolchain go1.24.13 require ( github.com/gofiber/fiber/v2 v2.52.0 @@ -11,7 +13,6 @@ require ( github.com/lib/pq v1.10.9 github.com/minio/minio-go/v7 v7.0.70 github.com/redis/go-redis/v9 v9.4.0 - github.com/robfig/cron/v3 v3.0.1 github.com/sashabaranov/go-openai v1.20.0 go.uber.org/zap v1.27.0 golang.org/x/sync v0.6.0 @@ -29,8 +30,10 @@ require ( github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.15 // indirect github.com/minio/md5-simd v1.1.2 // indirect + github.com/philhofer/fwd v1.2.0 // indirect github.com/rivo/uniseg v0.4.6 // indirect github.com/rs/xid v1.5.0 // indirect + github.com/tinylib/msgp v1.6.3 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasthttp v1.52.0 // indirect github.com/valyala/tcplisten v1.0.0 // indirect diff --git a/backend/go.sum b/backend/go.sum index 2d15a09..d8b960d 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -42,6 +42,8 @@ github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= github.com/minio/minio-go/v7 v7.0.70 h1:1u9NtMgfK1U42kUxcsl5v0yj6TEOPR497OAQxpJnn2g= github.com/minio/minio-go/v7 v7.0.70/go.mod h1:4yBA8v80xGA30cfM3fz0DKYMXunWl/AV/6tWEs9ryzo= +github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM= +github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/redis/go-redis/v9 v9.4.0 h1:Yzoz33UZw9I/mFhx4MNrB6Fk+XHO1VukNcCa1+lwyKk= @@ -49,14 +51,14 @@ github.com/redis/go-redis/v9 v9.4.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0 github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.6 h1:Sovz9sDSwbOz9tgUy8JpT+KgCkPYJEN/oYzlJiYTNLg= github.com/rivo/uniseg v0.4.6/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= -github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/sashabaranov/go-openai v1.20.0 h1:r9WiwJY6Q2aPDhVyfOSKm83Gs04ogN1yaaBoQOnusS4= github.com/sashabaranov/go-openai v1.20.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/tinylib/msgp v1.6.3 h1:bCSxiTz386UTgyT1i0MSCvdbWjVW+8sG3PjkGsZQt4s= +github.com/tinylib/msgp v1.6.3/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.52.0 h1:wqBQpxH71XW0e2g+Og4dzQM8pk34aFYlA1Ga8db7gU0= diff --git a/backend/internal/admin/migrations.go b/backend/internal/admin/migrations.go new file mode 100644 index 0000000..d657932 --- /dev/null +++ b/backend/internal/admin/migrations.go @@ -0,0 +1,153 @@ +package admin + +import ( + "context" + "database/sql" +) + +func RunAdminMigrations(ctx context.Context, db *sql.DB) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) NOT NULL UNIQUE, + password_hash VARCHAR(255) NOT NULL, + display_name VARCHAR(255) NOT NULL, + avatar_url TEXT, + role VARCHAR(50) NOT NULL DEFAULT 'user', + tier VARCHAR(50) NOT NULL DEFAULT 'free', + is_active BOOLEAN NOT NULL DEFAULT true, + last_login_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)`, + `CREATE INDEX IF NOT EXISTS idx_users_role ON users(role)`, + `CREATE INDEX IF NOT EXISTS idx_users_is_active ON users(is_active)`, + + `CREATE TABLE IF NOT EXISTS posts ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + title VARCHAR(500) NOT NULL, + slug VARCHAR(255) NOT NULL UNIQUE, + content TEXT NOT NULL, + excerpt TEXT, + cover_image TEXT, + author_id UUID REFERENCES users(id) ON DELETE SET NULL, + category VARCHAR(100) NOT NULL DEFAULT 'general', + tags JSONB DEFAULT '[]', + status VARCHAR(50) NOT NULL DEFAULT 'draft', + view_count INT NOT NULL DEFAULT 0, + published_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_posts_slug ON posts(slug)`, + `CREATE INDEX IF NOT EXISTS idx_posts_author ON posts(author_id)`, + `CREATE INDEX IF NOT EXISTS idx_posts_status ON posts(status)`, + `CREATE INDEX IF NOT EXISTS idx_posts_category ON posts(category)`, + `CREATE INDEX IF NOT EXISTS idx_posts_published_at ON posts(published_at DESC)`, + + `CREATE TABLE IF NOT EXISTS platform_settings ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + site_name VARCHAR(255) NOT NULL DEFAULT 'GooSeek', + site_url VARCHAR(500) NOT NULL DEFAULT 'https://gooseek.ru', + logo_url TEXT, + favicon_url TEXT, + description TEXT, + support_email VARCHAR(255), + features JSONB NOT NULL DEFAULT '{}', + llm_settings JSONB NOT NULL DEFAULT '{}', + search_settings JSONB NOT NULL DEFAULT '{}', + metadata JSONB DEFAULT '{}', + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + + `CREATE TABLE IF NOT EXISTS discover_categories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(100) NOT NULL UNIQUE, + name_ru VARCHAR(100) NOT NULL, + icon VARCHAR(10) NOT NULL DEFAULT '📰', + color VARCHAR(20) NOT NULL DEFAULT '#6B7280', + keywords JSONB NOT NULL DEFAULT '[]', + regions JSONB NOT NULL DEFAULT '["world"]', + is_active BOOLEAN NOT NULL DEFAULT true, + sort_order INT NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_discover_categories_name ON discover_categories(name)`, + `CREATE INDEX IF NOT EXISTS idx_discover_categories_sort ON discover_categories(sort_order)`, + + `CREATE TABLE IF NOT EXISTS discover_sources ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(255) NOT NULL, + url VARCHAR(500) NOT NULL UNIQUE, + logo_url TEXT, + categories JSONB NOT NULL DEFAULT '[]', + trust_score DECIMAL(3,2) NOT NULL DEFAULT 0.5, + is_active BOOLEAN NOT NULL DEFAULT true, + description TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_discover_sources_url ON discover_sources(url)`, + `CREATE INDEX IF NOT EXISTS idx_discover_sources_trust ON discover_sources(trust_score DESC)`, + + `CREATE TABLE IF NOT EXISTS audit_logs ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + user_email VARCHAR(255) NOT NULL, + action VARCHAR(100) NOT NULL, + resource VARCHAR(100) NOT NULL, + resource_id VARCHAR(255), + details JSONB, + ip_address VARCHAR(45), + user_agent TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_audit_logs_user ON audit_logs(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_audit_logs_action ON audit_logs(action)`, + `CREATE INDEX IF NOT EXISTS idx_audit_logs_resource ON audit_logs(resource)`, + `CREATE INDEX IF NOT EXISTS idx_audit_logs_created ON audit_logs(created_at DESC)`, + + `CREATE TABLE IF NOT EXISTS connectors ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + type VARCHAR(50) NOT NULL, + name VARCHAR(255) NOT NULL, + config JSONB NOT NULL DEFAULT '{}', + is_active BOOLEAN NOT NULL DEFAULT true, + last_sync_at TIMESTAMPTZ, + status VARCHAR(50) NOT NULL DEFAULT 'pending', + error_msg TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_connectors_type ON connectors(type)`, + `CREATE INDEX IF NOT EXISTS idx_connectors_status ON connectors(status)`, + + `CREATE TABLE IF NOT EXISTS user_files ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + filename VARCHAR(500) NOT NULL, + original_name VARCHAR(500) NOT NULL, + file_type VARCHAR(100) NOT NULL, + file_size BIGINT NOT NULL, + bucket VARCHAR(100) NOT NULL DEFAULT 'user-files', + storage_key TEXT NOT NULL, + mime_type VARCHAR(100), + metadata JSONB DEFAULT '{}', + is_public BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_user_files_user ON user_files(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_user_files_type ON user_files(file_type)`, + `CREATE INDEX IF NOT EXISTS idx_user_files_bucket ON user_files(bucket)`, + } + + for _, migration := range migrations { + if _, err := db.ExecContext(ctx, migration); err != nil { + return err + } + } + + return nil +} diff --git a/backend/internal/admin/repositories.go b/backend/internal/admin/repositories.go new file mode 100644 index 0000000..0c3e624 --- /dev/null +++ b/backend/internal/admin/repositories.go @@ -0,0 +1,984 @@ +package admin + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/google/uuid" + "golang.org/x/crypto/bcrypt" +) + +type UserRepository struct { + db *sql.DB +} + +func NewUserRepository(db *sql.DB) *UserRepository { + return &UserRepository{db: db} +} + +func (r *UserRepository) List(ctx context.Context, page, perPage int, search string) ([]User, int, error) { + offset := (page - 1) * perPage + + countQuery := `SELECT COUNT(*) FROM users WHERE 1=1` + listQuery := `SELECT id, email, display_name, avatar_url, role, tier, is_active, last_login_at, created_at, updated_at + FROM users WHERE 1=1` + + args := []interface{}{} + argIndex := 1 + + if search != "" { + searchCondition := fmt.Sprintf(` AND (email ILIKE $%d OR display_name ILIKE $%d)`, argIndex, argIndex+1) + countQuery += searchCondition + listQuery += searchCondition + searchPattern := "%" + search + "%" + args = append(args, searchPattern, searchPattern) + argIndex += 2 + } + + listQuery += fmt.Sprintf(` ORDER BY created_at DESC LIMIT $%d OFFSET $%d`, argIndex, argIndex+1) + args = append(args, perPage, offset) + + var total int + if err := r.db.QueryRowContext(ctx, countQuery, args[:len(args)-2]...).Scan(&total); err != nil { + return nil, 0, err + } + + rows, err := r.db.QueryContext(ctx, listQuery, args...) + if err != nil { + return nil, 0, err + } + defer rows.Close() + + var users []User + for rows.Next() { + var u User + var lastLoginAt sql.NullTime + var avatarURL sql.NullString + if err := rows.Scan(&u.ID, &u.Email, &u.DisplayName, &avatarURL, &u.Role, &u.Tier, &u.IsActive, &lastLoginAt, &u.CreatedAt, &u.UpdatedAt); err != nil { + return nil, 0, err + } + if lastLoginAt.Valid { + u.LastLoginAt = &lastLoginAt.Time + } + if avatarURL.Valid { + u.AvatarURL = avatarURL.String + } + users = append(users, u) + } + + return users, total, nil +} + +func (r *UserRepository) GetByID(ctx context.Context, id string) (*User, error) { + query := `SELECT id, email, display_name, avatar_url, role, tier, is_active, last_login_at, created_at, updated_at + FROM users WHERE id = $1` + + var u User + var lastLoginAt sql.NullTime + var avatarURL sql.NullString + err := r.db.QueryRowContext(ctx, query, id).Scan(&u.ID, &u.Email, &u.DisplayName, &avatarURL, &u.Role, &u.Tier, &u.IsActive, &lastLoginAt, &u.CreatedAt, &u.UpdatedAt) + if err != nil { + return nil, err + } + + if lastLoginAt.Valid { + u.LastLoginAt = &lastLoginAt.Time + } + if avatarURL.Valid { + u.AvatarURL = avatarURL.String + } + + return &u, nil +} + +func (r *UserRepository) GetByEmail(ctx context.Context, email string) (*User, error) { + query := `SELECT id, email, password_hash, display_name, avatar_url, role, tier, is_active, last_login_at, created_at, updated_at + FROM users WHERE email = $1` + + var u User + var lastLoginAt sql.NullTime + var avatarURL sql.NullString + err := r.db.QueryRowContext(ctx, query, email).Scan(&u.ID, &u.Email, &u.PasswordHash, &u.DisplayName, &avatarURL, &u.Role, &u.Tier, &u.IsActive, &lastLoginAt, &u.CreatedAt, &u.UpdatedAt) + if err != nil { + return nil, err + } + + if lastLoginAt.Valid { + u.LastLoginAt = &lastLoginAt.Time + } + if avatarURL.Valid { + u.AvatarURL = avatarURL.String + } + + return &u, nil +} + +func (r *UserRepository) Create(ctx context.Context, req *UserCreateRequest) (*User, error) { + id := uuid.New().String() + + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost) + if err != nil { + return nil, fmt.Errorf("failed to hash password: %w", err) + } + + role := req.Role + if role == "" { + role = RoleUser + } + tier := req.Tier + if tier == "" { + tier = TierFree + } + + query := `INSERT INTO users (id, email, password_hash, display_name, role, tier, is_active, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, true, NOW(), NOW()) + RETURNING id, email, display_name, role, tier, is_active, created_at, updated_at` + + var u User + err = r.db.QueryRowContext(ctx, query, id, req.Email, string(hashedPassword), req.DisplayName, role, tier). + Scan(&u.ID, &u.Email, &u.DisplayName, &u.Role, &u.Tier, &u.IsActive, &u.CreatedAt, &u.UpdatedAt) + if err != nil { + return nil, err + } + + return &u, nil +} + +func (r *UserRepository) Update(ctx context.Context, id string, req *UserUpdateRequest) (*User, error) { + sets := []string{} + args := []interface{}{} + argIndex := 1 + + if req.Email != nil { + sets = append(sets, fmt.Sprintf("email = $%d", argIndex)) + args = append(args, *req.Email) + argIndex++ + } + if req.DisplayName != nil { + sets = append(sets, fmt.Sprintf("display_name = $%d", argIndex)) + args = append(args, *req.DisplayName) + argIndex++ + } + if req.Role != nil { + sets = append(sets, fmt.Sprintf("role = $%d", argIndex)) + args = append(args, *req.Role) + argIndex++ + } + if req.Tier != nil { + sets = append(sets, fmt.Sprintf("tier = $%d", argIndex)) + args = append(args, *req.Tier) + argIndex++ + } + if req.IsActive != nil { + sets = append(sets, fmt.Sprintf("is_active = $%d", argIndex)) + args = append(args, *req.IsActive) + argIndex++ + } + + if len(sets) == 0 { + return r.GetByID(ctx, id) + } + + sets = append(sets, "updated_at = NOW()") + args = append(args, id) + + query := fmt.Sprintf(`UPDATE users SET %s WHERE id = $%d + RETURNING id, email, display_name, avatar_url, role, tier, is_active, last_login_at, created_at, updated_at`, + strings.Join(sets, ", "), argIndex) + + var u User + var lastLoginAt sql.NullTime + var avatarURL sql.NullString + err := r.db.QueryRowContext(ctx, query, args...).Scan(&u.ID, &u.Email, &u.DisplayName, &avatarURL, &u.Role, &u.Tier, &u.IsActive, &lastLoginAt, &u.CreatedAt, &u.UpdatedAt) + if err != nil { + return nil, err + } + + if lastLoginAt.Valid { + u.LastLoginAt = &lastLoginAt.Time + } + if avatarURL.Valid { + u.AvatarURL = avatarURL.String + } + + return &u, nil +} + +func (r *UserRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.ExecContext(ctx, `DELETE FROM users WHERE id = $1`, id) + return err +} + +func (r *UserRepository) Count(ctx context.Context, filter string) (int, error) { + query := `SELECT COUNT(*) FROM users` + if filter != "" { + query += ` WHERE role = $1` + var count int + err := r.db.QueryRowContext(ctx, query, filter).Scan(&count) + return count, err + } + var count int + err := r.db.QueryRowContext(ctx, query).Scan(&count) + return count, err +} + +func (r *UserRepository) CountActive(ctx context.Context) (int, error) { + query := `SELECT COUNT(*) FROM users WHERE is_active = true AND last_login_at > NOW() - INTERVAL '30 days'` + var count int + err := r.db.QueryRowContext(ctx, query).Scan(&count) + return count, err +} + +type PostRepository struct { + db *sql.DB +} + +func NewPostRepository(db *sql.DB) *PostRepository { + return &PostRepository{db: db} +} + +func (r *PostRepository) List(ctx context.Context, page, perPage int, status, category string) ([]Post, int, error) { + offset := (page - 1) * perPage + + countQuery := `SELECT COUNT(*) FROM posts WHERE 1=1` + listQuery := `SELECT p.id, p.title, p.slug, p.content, p.excerpt, p.cover_image, p.author_id, + COALESCE(u.display_name, u.email, 'Unknown') as author_name, p.category, p.tags, p.status, + p.view_count, p.published_at, p.created_at, p.updated_at + FROM posts p LEFT JOIN users u ON p.author_id = u.id WHERE 1=1` + + args := []interface{}{} + argIndex := 1 + + if status != "" { + condition := fmt.Sprintf(` AND p.status = $%d`, argIndex) + countQuery += strings.Replace(condition, "p.", "", 1) + listQuery += condition + args = append(args, status) + argIndex++ + } + if category != "" { + condition := fmt.Sprintf(` AND p.category = $%d`, argIndex) + countQuery += strings.Replace(condition, "p.", "", 1) + listQuery += condition + args = append(args, category) + argIndex++ + } + + listQuery += fmt.Sprintf(` ORDER BY p.created_at DESC LIMIT $%d OFFSET $%d`, argIndex, argIndex+1) + args = append(args, perPage, offset) + + var total int + countArgs := args[:len(args)-2] + if len(countArgs) == 0 { + countArgs = nil + } + if err := r.db.QueryRowContext(ctx, countQuery, countArgs...).Scan(&total); err != nil { + return nil, 0, err + } + + rows, err := r.db.QueryContext(ctx, listQuery, args...) + if err != nil { + return nil, 0, err + } + defer rows.Close() + + var posts []Post + for rows.Next() { + var p Post + var publishedAt sql.NullTime + var excerpt, coverImage sql.NullString + var tagsJSON []byte + + if err := rows.Scan(&p.ID, &p.Title, &p.Slug, &p.Content, &excerpt, &coverImage, &p.AuthorID, &p.AuthorName, &p.Category, &tagsJSON, &p.Status, &p.ViewCount, &publishedAt, &p.CreatedAt, &p.UpdatedAt); err != nil { + return nil, 0, err + } + + if publishedAt.Valid { + p.PublishedAt = &publishedAt.Time + } + if excerpt.Valid { + p.Excerpt = excerpt.String + } + if coverImage.Valid { + p.CoverImage = coverImage.String + } + if tagsJSON != nil { + json.Unmarshal(tagsJSON, &p.Tags) + } + + posts = append(posts, p) + } + + return posts, total, nil +} + +func (r *PostRepository) GetByID(ctx context.Context, id string) (*Post, error) { + query := `SELECT p.id, p.title, p.slug, p.content, p.excerpt, p.cover_image, p.author_id, + COALESCE(u.display_name, u.email, 'Unknown') as author_name, p.category, p.tags, p.status, + p.view_count, p.published_at, p.created_at, p.updated_at + FROM posts p LEFT JOIN users u ON p.author_id = u.id WHERE p.id = $1` + + var p Post + var publishedAt sql.NullTime + var excerpt, coverImage sql.NullString + var tagsJSON []byte + + err := r.db.QueryRowContext(ctx, query, id).Scan(&p.ID, &p.Title, &p.Slug, &p.Content, &excerpt, &coverImage, &p.AuthorID, &p.AuthorName, &p.Category, &tagsJSON, &p.Status, &p.ViewCount, &publishedAt, &p.CreatedAt, &p.UpdatedAt) + if err != nil { + return nil, err + } + + if publishedAt.Valid { + p.PublishedAt = &publishedAt.Time + } + if excerpt.Valid { + p.Excerpt = excerpt.String + } + if coverImage.Valid { + p.CoverImage = coverImage.String + } + if tagsJSON != nil { + json.Unmarshal(tagsJSON, &p.Tags) + } + + return &p, nil +} + +func (r *PostRepository) Create(ctx context.Context, authorID string, req *PostCreateRequest) (*Post, error) { + id := uuid.New().String() + slug := generateSlug(req.Title) + + tagsJSON, _ := json.Marshal(req.Tags) + + status := req.Status + if status == "" { + status = string(PostStatusDraft) + } + + query := `INSERT INTO posts (id, title, slug, content, excerpt, cover_image, author_id, category, tags, status, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW(), NOW()) + RETURNING id, title, slug, content, excerpt, cover_image, author_id, category, tags, status, view_count, published_at, created_at, updated_at` + + var p Post + var publishedAt sql.NullTime + var excerpt, coverImage sql.NullString + var returnedTags []byte + + err := r.db.QueryRowContext(ctx, query, id, req.Title, slug, req.Content, req.Excerpt, req.CoverImage, authorID, req.Category, tagsJSON, status). + Scan(&p.ID, &p.Title, &p.Slug, &p.Content, &excerpt, &coverImage, &p.AuthorID, &p.Category, &returnedTags, &p.Status, &p.ViewCount, &publishedAt, &p.CreatedAt, &p.UpdatedAt) + if err != nil { + return nil, err + } + + if publishedAt.Valid { + p.PublishedAt = &publishedAt.Time + } + if excerpt.Valid { + p.Excerpt = excerpt.String + } + if coverImage.Valid { + p.CoverImage = coverImage.String + } + if returnedTags != nil { + json.Unmarshal(returnedTags, &p.Tags) + } + + return &p, nil +} + +func (r *PostRepository) Update(ctx context.Context, id string, req *PostUpdateRequest) (*Post, error) { + sets := []string{} + args := []interface{}{} + argIndex := 1 + + if req.Title != nil { + sets = append(sets, fmt.Sprintf("title = $%d", argIndex)) + args = append(args, *req.Title) + argIndex++ + sets = append(sets, fmt.Sprintf("slug = $%d", argIndex)) + args = append(args, generateSlug(*req.Title)) + argIndex++ + } + if req.Content != nil { + sets = append(sets, fmt.Sprintf("content = $%d", argIndex)) + args = append(args, *req.Content) + argIndex++ + } + if req.Excerpt != nil { + sets = append(sets, fmt.Sprintf("excerpt = $%d", argIndex)) + args = append(args, *req.Excerpt) + argIndex++ + } + if req.CoverImage != nil { + sets = append(sets, fmt.Sprintf("cover_image = $%d", argIndex)) + args = append(args, *req.CoverImage) + argIndex++ + } + if req.Category != nil { + sets = append(sets, fmt.Sprintf("category = $%d", argIndex)) + args = append(args, *req.Category) + argIndex++ + } + if req.Tags != nil { + tagsJSON, _ := json.Marshal(*req.Tags) + sets = append(sets, fmt.Sprintf("tags = $%d", argIndex)) + args = append(args, tagsJSON) + argIndex++ + } + if req.Status != nil { + sets = append(sets, fmt.Sprintf("status = $%d", argIndex)) + args = append(args, *req.Status) + argIndex++ + } + + if len(sets) == 0 { + return r.GetByID(ctx, id) + } + + sets = append(sets, "updated_at = NOW()") + args = append(args, id) + + query := fmt.Sprintf(`UPDATE posts SET %s WHERE id = $%d RETURNING id`, strings.Join(sets, ", "), argIndex) + + var returnedID string + if err := r.db.QueryRowContext(ctx, query, args...).Scan(&returnedID); err != nil { + return nil, err + } + + return r.GetByID(ctx, id) +} + +func (r *PostRepository) Delete(ctx context.Context, id string) error { + _, err := r.db.ExecContext(ctx, `DELETE FROM posts WHERE id = $1`, id) + return err +} + +func (r *PostRepository) Publish(ctx context.Context, id string) (*Post, error) { + query := `UPDATE posts SET status = 'published', published_at = NOW(), updated_at = NOW() WHERE id = $1 RETURNING id` + var returnedID string + if err := r.db.QueryRowContext(ctx, query, id).Scan(&returnedID); err != nil { + return nil, err + } + return r.GetByID(ctx, id) +} + +func (r *PostRepository) Count(ctx context.Context, status string) (int, error) { + query := `SELECT COUNT(*) FROM posts` + if status != "" { + query += ` WHERE status = $1` + var count int + err := r.db.QueryRowContext(ctx, query, status).Scan(&count) + return count, err + } + var count int + err := r.db.QueryRowContext(ctx, query).Scan(&count) + return count, err +} + +type SettingsRepository struct { + db *sql.DB +} + +func NewSettingsRepository(db *sql.DB) *SettingsRepository { + return &SettingsRepository{db: db} +} + +func (r *SettingsRepository) Get(ctx context.Context) (*PlatformSettings, error) { + query := `SELECT id, site_name, site_url, logo_url, favicon_url, description, support_email, features, llm_settings, search_settings, metadata, updated_at FROM platform_settings LIMIT 1` + + var s PlatformSettings + var logoURL, faviconURL, description, supportEmail sql.NullString + var featuresJSON, llmJSON, searchJSON, metadataJSON []byte + + err := r.db.QueryRowContext(ctx, query).Scan(&s.ID, &s.SiteName, &s.SiteURL, &logoURL, &faviconURL, &description, &supportEmail, &featuresJSON, &llmJSON, &searchJSON, &metadataJSON, &s.UpdatedAt) + if err == sql.ErrNoRows { + return r.createDefault(ctx) + } + if err != nil { + return nil, err + } + + if logoURL.Valid { + s.LogoURL = logoURL.String + } + if faviconURL.Valid { + s.FaviconURL = faviconURL.String + } + if description.Valid { + s.Description = description.String + } + if supportEmail.Valid { + s.SupportEmail = supportEmail.String + } + + json.Unmarshal(featuresJSON, &s.Features) + json.Unmarshal(llmJSON, &s.LLMSettings) + json.Unmarshal(searchJSON, &s.SearchSettings) + json.Unmarshal(metadataJSON, &s.Metadata) + + return &s, nil +} + +func (r *SettingsRepository) createDefault(ctx context.Context) (*PlatformSettings, error) { + s := &PlatformSettings{ + ID: uuid.New().String(), + SiteName: "GooSeek", + SiteURL: "https://gooseek.ru", + Features: FeatureFlags{ + EnableRegistration: true, + EnableDiscover: true, + EnableFinance: true, + EnableLearning: true, + EnableTravel: true, + EnableMedicine: true, + EnableFileUploads: true, + MaintenanceMode: false, + }, + LLMSettings: LLMSettings{ + DefaultProvider: "timeweb", + DefaultModel: "gpt-4o-mini", + MaxTokens: 4096, + Temperature: 0.7, + }, + SearchSettings: SearchSettings{ + DefaultEngine: "searxng", + SafeSearch: true, + MaxResults: 10, + EnabledCategories: []string{"general", "news", "images"}, + }, + UpdatedAt: time.Now(), + } + + featuresJSON, _ := json.Marshal(s.Features) + llmJSON, _ := json.Marshal(s.LLMSettings) + searchJSON, _ := json.Marshal(s.SearchSettings) + metadataJSON, _ := json.Marshal(s.Metadata) + + query := `INSERT INTO platform_settings (id, site_name, site_url, features, llm_settings, search_settings, metadata, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, NOW())` + + _, err := r.db.ExecContext(ctx, query, s.ID, s.SiteName, s.SiteURL, featuresJSON, llmJSON, searchJSON, metadataJSON) + if err != nil { + return nil, err + } + + return s, nil +} + +func (r *SettingsRepository) Update(ctx context.Context, s *PlatformSettings) (*PlatformSettings, error) { + featuresJSON, _ := json.Marshal(s.Features) + llmJSON, _ := json.Marshal(s.LLMSettings) + searchJSON, _ := json.Marshal(s.SearchSettings) + metadataJSON, _ := json.Marshal(s.Metadata) + + query := `UPDATE platform_settings SET + site_name = COALESCE(NULLIF($1, ''), site_name), + site_url = COALESCE(NULLIF($2, ''), site_url), + logo_url = $3, + favicon_url = $4, + description = $5, + support_email = $6, + features = $7, + llm_settings = $8, + search_settings = $9, + metadata = $10, + updated_at = NOW()` + + _, err := r.db.ExecContext(ctx, query, s.SiteName, s.SiteURL, s.LogoURL, s.FaviconURL, s.Description, s.SupportEmail, featuresJSON, llmJSON, searchJSON, metadataJSON) + if err != nil { + return nil, err + } + + return r.Get(ctx) +} + +func (r *SettingsRepository) GetFeatures(ctx context.Context) (*FeatureFlags, error) { + settings, err := r.Get(ctx) + if err != nil { + return nil, err + } + return &settings.Features, nil +} + +func (r *SettingsRepository) UpdateFeatures(ctx context.Context, features *FeatureFlags) error { + featuresJSON, _ := json.Marshal(features) + _, err := r.db.ExecContext(ctx, `UPDATE platform_settings SET features = $1, updated_at = NOW()`, featuresJSON) + return err +} + +type DiscoverConfigRepository struct { + db *sql.DB +} + +func NewDiscoverConfigRepository(db *sql.DB) *DiscoverConfigRepository { + return &DiscoverConfigRepository{db: db} +} + +func (r *DiscoverConfigRepository) ListCategories(ctx context.Context) ([]DiscoverCategory, error) { + query := `SELECT id, name, name_ru, icon, color, keywords, regions, is_active, sort_order, created_at, updated_at + FROM discover_categories ORDER BY sort_order ASC, name ASC` + + rows, err := r.db.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var categories []DiscoverCategory + for rows.Next() { + var c DiscoverCategory + var keywordsJSON, regionsJSON []byte + if err := rows.Scan(&c.ID, &c.Name, &c.NameRu, &c.Icon, &c.Color, &keywordsJSON, ®ionsJSON, &c.IsActive, &c.SortOrder, &c.CreatedAt, &c.UpdatedAt); err != nil { + return nil, err + } + json.Unmarshal(keywordsJSON, &c.Keywords) + json.Unmarshal(regionsJSON, &c.Regions) + categories = append(categories, c) + } + + if len(categories) == 0 { + return r.seedDefaultCategories(ctx) + } + + return categories, nil +} + +func (r *DiscoverConfigRepository) seedDefaultCategories(ctx context.Context) ([]DiscoverCategory, error) { + defaults := []DiscoverCategoryCreateRequest{ + {Name: "tech", NameRu: "Технологии", Icon: "💻", Color: "#3B82F6", Keywords: []string{"technology", "AI", "software"}, Regions: []string{"world", "russia"}}, + {Name: "finance", NameRu: "Финансы", Icon: "💰", Color: "#10B981", Keywords: []string{"finance", "economy", "stocks"}, Regions: []string{"world", "russia"}}, + {Name: "sports", NameRu: "Спорт", Icon: "⚽", Color: "#F59E0B", Keywords: []string{"sports", "football", "hockey"}, Regions: []string{"world", "russia"}}, + {Name: "politics", NameRu: "Политика", Icon: "🏛️", Color: "#6366F1", Keywords: []string{"politics", "government"}, Regions: []string{"world", "russia"}}, + {Name: "science", NameRu: "Наука", Icon: "🔬", Color: "#8B5CF6", Keywords: []string{"science", "research"}, Regions: []string{"world", "russia"}}, + {Name: "health", NameRu: "Здоровье", Icon: "🏥", Color: "#EC4899", Keywords: []string{"health", "medicine"}, Regions: []string{"world", "russia"}}, + {Name: "entertainment", NameRu: "Развлечения", Icon: "🎬", Color: "#F97316", Keywords: []string{"entertainment", "movies"}, Regions: []string{"world", "russia"}}, + {Name: "world", NameRu: "В мире", Icon: "🌍", Color: "#14B8A6", Keywords: []string{"world", "international"}, Regions: []string{"world"}}, + {Name: "business", NameRu: "Бизнес", Icon: "📊", Color: "#6B7280", Keywords: []string{"business", "startups"}, Regions: []string{"world", "russia"}}, + {Name: "culture", NameRu: "Культура", Icon: "🎭", Color: "#A855F7", Keywords: []string{"culture", "art"}, Regions: []string{"world", "russia"}}, + } + + var categories []DiscoverCategory + for i, d := range defaults { + c, err := r.createCategoryInternal(ctx, &d, i) + if err != nil { + continue + } + categories = append(categories, *c) + } + + return categories, nil +} + +func (r *DiscoverConfigRepository) createCategoryInternal(ctx context.Context, req *DiscoverCategoryCreateRequest, sortOrder int) (*DiscoverCategory, error) { + id := uuid.New().String() + keywordsJSON, _ := json.Marshal(req.Keywords) + regionsJSON, _ := json.Marshal(req.Regions) + + query := `INSERT INTO discover_categories (id, name, name_ru, icon, color, keywords, regions, is_active, sort_order, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, true, $8, NOW(), NOW()) + RETURNING id, name, name_ru, icon, color, keywords, regions, is_active, sort_order, created_at, updated_at` + + var c DiscoverCategory + var retKeywords, retRegions []byte + err := r.db.QueryRowContext(ctx, query, id, req.Name, req.NameRu, req.Icon, req.Color, keywordsJSON, regionsJSON, sortOrder). + Scan(&c.ID, &c.Name, &c.NameRu, &c.Icon, &c.Color, &retKeywords, &retRegions, &c.IsActive, &c.SortOrder, &c.CreatedAt, &c.UpdatedAt) + if err != nil { + return nil, err + } + + json.Unmarshal(retKeywords, &c.Keywords) + json.Unmarshal(retRegions, &c.Regions) + + return &c, nil +} + +func (r *DiscoverConfigRepository) CreateCategory(ctx context.Context, req *DiscoverCategoryCreateRequest) (*DiscoverCategory, error) { + var maxOrder int + r.db.QueryRowContext(ctx, `SELECT COALESCE(MAX(sort_order), 0) FROM discover_categories`).Scan(&maxOrder) + return r.createCategoryInternal(ctx, req, maxOrder+1) +} + +func (r *DiscoverConfigRepository) UpdateCategory(ctx context.Context, id string, req *DiscoverCategoryUpdateRequest) (*DiscoverCategory, error) { + sets := []string{} + args := []interface{}{} + argIndex := 1 + + if req.Name != nil { + sets = append(sets, fmt.Sprintf("name = $%d", argIndex)) + args = append(args, *req.Name) + argIndex++ + } + if req.NameRu != nil { + sets = append(sets, fmt.Sprintf("name_ru = $%d", argIndex)) + args = append(args, *req.NameRu) + argIndex++ + } + if req.Icon != nil { + sets = append(sets, fmt.Sprintf("icon = $%d", argIndex)) + args = append(args, *req.Icon) + argIndex++ + } + if req.Color != nil { + sets = append(sets, fmt.Sprintf("color = $%d", argIndex)) + args = append(args, *req.Color) + argIndex++ + } + if req.Keywords != nil { + keywordsJSON, _ := json.Marshal(*req.Keywords) + sets = append(sets, fmt.Sprintf("keywords = $%d", argIndex)) + args = append(args, keywordsJSON) + argIndex++ + } + if req.Regions != nil { + regionsJSON, _ := json.Marshal(*req.Regions) + sets = append(sets, fmt.Sprintf("regions = $%d", argIndex)) + args = append(args, regionsJSON) + argIndex++ + } + if req.IsActive != nil { + sets = append(sets, fmt.Sprintf("is_active = $%d", argIndex)) + args = append(args, *req.IsActive) + argIndex++ + } + if req.SortOrder != nil { + sets = append(sets, fmt.Sprintf("sort_order = $%d", argIndex)) + args = append(args, *req.SortOrder) + argIndex++ + } + + if len(sets) == 0 { + return r.getCategoryByID(ctx, id) + } + + sets = append(sets, "updated_at = NOW()") + args = append(args, id) + + query := fmt.Sprintf(`UPDATE discover_categories SET %s WHERE id = $%d RETURNING id`, strings.Join(sets, ", "), argIndex) + + var returnedID string + if err := r.db.QueryRowContext(ctx, query, args...).Scan(&returnedID); err != nil { + return nil, err + } + + return r.getCategoryByID(ctx, id) +} + +func (r *DiscoverConfigRepository) getCategoryByID(ctx context.Context, id string) (*DiscoverCategory, error) { + query := `SELECT id, name, name_ru, icon, color, keywords, regions, is_active, sort_order, created_at, updated_at + FROM discover_categories WHERE id = $1` + + var c DiscoverCategory + var keywordsJSON, regionsJSON []byte + err := r.db.QueryRowContext(ctx, query, id).Scan(&c.ID, &c.Name, &c.NameRu, &c.Icon, &c.Color, &keywordsJSON, ®ionsJSON, &c.IsActive, &c.SortOrder, &c.CreatedAt, &c.UpdatedAt) + if err != nil { + return nil, err + } + + json.Unmarshal(keywordsJSON, &c.Keywords) + json.Unmarshal(regionsJSON, &c.Regions) + + return &c, nil +} + +func (r *DiscoverConfigRepository) DeleteCategory(ctx context.Context, id string) error { + _, err := r.db.ExecContext(ctx, `DELETE FROM discover_categories WHERE id = $1`, id) + return err +} + +func (r *DiscoverConfigRepository) ReorderCategories(ctx context.Context, order []string) error { + tx, err := r.db.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + for i, id := range order { + if _, err := tx.ExecContext(ctx, `UPDATE discover_categories SET sort_order = $1 WHERE id = $2`, i, id); err != nil { + return err + } + } + + return tx.Commit() +} + +func (r *DiscoverConfigRepository) ListSources(ctx context.Context) ([]DiscoverSource, error) { + query := `SELECT id, name, url, logo_url, categories, trust_score, is_active, description, created_at, updated_at + FROM discover_sources ORDER BY trust_score DESC, name ASC` + + rows, err := r.db.QueryContext(ctx, query) + if err != nil { + return nil, err + } + defer rows.Close() + + var sources []DiscoverSource + for rows.Next() { + var s DiscoverSource + var logoURL, description sql.NullString + var categoriesJSON []byte + if err := rows.Scan(&s.ID, &s.Name, &s.URL, &logoURL, &categoriesJSON, &s.TrustScore, &s.IsActive, &description, &s.CreatedAt, &s.UpdatedAt); err != nil { + return nil, err + } + if logoURL.Valid { + s.LogoURL = logoURL.String + } + if description.Valid { + s.Description = description.String + } + json.Unmarshal(categoriesJSON, &s.Categories) + sources = append(sources, s) + } + + return sources, nil +} + +func (r *DiscoverConfigRepository) CreateSource(ctx context.Context, req *DiscoverSourceCreateRequest) (*DiscoverSource, error) { + id := uuid.New().String() + categoriesJSON, _ := json.Marshal(req.Categories) + + query := `INSERT INTO discover_sources (id, name, url, logo_url, categories, trust_score, is_active, description, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, true, $7, NOW(), NOW()) + RETURNING id, name, url, logo_url, categories, trust_score, is_active, description, created_at, updated_at` + + var s DiscoverSource + var logoURL, description sql.NullString + var retCategories []byte + + err := r.db.QueryRowContext(ctx, query, id, req.Name, req.URL, req.LogoURL, categoriesJSON, req.TrustScore, req.Description). + Scan(&s.ID, &s.Name, &s.URL, &logoURL, &retCategories, &s.TrustScore, &s.IsActive, &description, &s.CreatedAt, &s.UpdatedAt) + if err != nil { + return nil, err + } + + if logoURL.Valid { + s.LogoURL = logoURL.String + } + if description.Valid { + s.Description = description.String + } + json.Unmarshal(retCategories, &s.Categories) + + return &s, nil +} + +func (r *DiscoverConfigRepository) DeleteSource(ctx context.Context, id string) error { + _, err := r.db.ExecContext(ctx, `DELETE FROM discover_sources WHERE id = $1`, id) + return err +} + +type AuditRepository struct { + db *sql.DB +} + +func NewAuditRepository(db *sql.DB) *AuditRepository { + return &AuditRepository{db: db} +} + +func (r *AuditRepository) Create(ctx context.Context, log *AuditLog) error { + id := uuid.New().String() + detailsJSON, _ := json.Marshal(log.Details) + + query := `INSERT INTO audit_logs (id, user_id, user_email, action, resource, resource_id, details, ip_address, user_agent, created_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW())` + + _, err := r.db.ExecContext(ctx, query, id, log.UserID, log.UserEmail, log.Action, log.Resource, log.ResourceID, detailsJSON, log.IPAddress, log.UserAgent) + return err +} + +func (r *AuditRepository) List(ctx context.Context, page, perPage int, action, resource string) ([]AuditLog, int, error) { + offset := (page - 1) * perPage + + countQuery := `SELECT COUNT(*) FROM audit_logs WHERE 1=1` + listQuery := `SELECT id, user_id, user_email, action, resource, resource_id, details, ip_address, user_agent, created_at + FROM audit_logs WHERE 1=1` + + args := []interface{}{} + argIndex := 1 + + if action != "" { + condition := fmt.Sprintf(` AND action = $%d`, argIndex) + countQuery += condition + listQuery += condition + args = append(args, action) + argIndex++ + } + if resource != "" { + condition := fmt.Sprintf(` AND resource = $%d`, argIndex) + countQuery += condition + listQuery += condition + args = append(args, resource) + argIndex++ + } + + listQuery += fmt.Sprintf(` ORDER BY created_at DESC LIMIT $%d OFFSET $%d`, argIndex, argIndex+1) + args = append(args, perPage, offset) + + var total int + countArgs := args[:len(args)-2] + if len(countArgs) == 0 { + countArgs = nil + } + if err := r.db.QueryRowContext(ctx, countQuery, countArgs...).Scan(&total); err != nil { + return nil, 0, err + } + + rows, err := r.db.QueryContext(ctx, listQuery, args...) + if err != nil { + return nil, 0, err + } + defer rows.Close() + + var logs []AuditLog + for rows.Next() { + var l AuditLog + var resourceID, ipAddress, userAgent sql.NullString + var detailsJSON []byte + + if err := rows.Scan(&l.ID, &l.UserID, &l.UserEmail, &l.Action, &l.Resource, &resourceID, &detailsJSON, &ipAddress, &userAgent, &l.CreatedAt); err != nil { + return nil, 0, err + } + + if resourceID.Valid { + l.ResourceID = resourceID.String + } + if ipAddress.Valid { + l.IPAddress = ipAddress.String + } + if userAgent.Valid { + l.UserAgent = userAgent.String + } + if detailsJSON != nil { + json.Unmarshal(detailsJSON, &l.Details) + } + + logs = append(logs, l) + } + + return logs, total, nil +} + +func generateSlug(title string) string { + slug := strings.ToLower(title) + slug = strings.ReplaceAll(slug, " ", "-") + slug = strings.Map(func(r rune) rune { + if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' || (r >= 0x0400 && r <= 0x04FF) { + return r + } + return -1 + }, slug) + + for strings.Contains(slug, "--") { + slug = strings.ReplaceAll(slug, "--", "-") + } + slug = strings.Trim(slug, "-") + + if len(slug) > 100 { + slug = slug[:100] + } + + return slug + "-" + uuid.New().String()[:8] +} diff --git a/backend/internal/admin/types.go b/backend/internal/admin/types.go new file mode 100644 index 0000000..05b0fd1 --- /dev/null +++ b/backend/internal/admin/types.go @@ -0,0 +1,256 @@ +package admin + +import ( + "time" +) + +type UserRole string + +const ( + RoleUser UserRole = "user" + RoleAdmin UserRole = "admin" +) + +type UserTier string + +const ( + TierFree UserTier = "free" + TierPro UserTier = "pro" + TierBusiness UserTier = "business" +) + +type User struct { + ID string `json:"id"` + Email string `json:"email"` + PasswordHash string `json:"-"` + DisplayName string `json:"displayName"` + AvatarURL string `json:"avatarUrl,omitempty"` + Role UserRole `json:"role"` + Tier UserTier `json:"tier"` + IsActive bool `json:"isActive"` + LastLoginAt *time.Time `json:"lastLoginAt,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type UserCreateRequest struct { + Email string `json:"email"` + Password string `json:"password"` + DisplayName string `json:"displayName"` + Role UserRole `json:"role"` + Tier UserTier `json:"tier"` +} + +type UserUpdateRequest struct { + Email *string `json:"email,omitempty"` + DisplayName *string `json:"displayName,omitempty"` + Role *UserRole `json:"role,omitempty"` + Tier *UserTier `json:"tier,omitempty"` + IsActive *bool `json:"isActive,omitempty"` +} + +type UserListResponse struct { + Users []User `json:"users"` + Total int `json:"total"` + Page int `json:"page"` + PerPage int `json:"perPage"` +} + +type Post struct { + ID string `json:"id"` + Title string `json:"title"` + Slug string `json:"slug"` + Content string `json:"content"` + Excerpt string `json:"excerpt,omitempty"` + CoverImage string `json:"coverImage,omitempty"` + AuthorID string `json:"authorId"` + AuthorName string `json:"authorName,omitempty"` + Category string `json:"category"` + Tags []string `json:"tags"` + Status string `json:"status"` + ViewCount int `json:"viewCount"` + PublishedAt *time.Time `json:"publishedAt,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type PostStatus string + +const ( + PostStatusDraft PostStatus = "draft" + PostStatusPublished PostStatus = "published" + PostStatusArchived PostStatus = "archived" +) + +type PostCreateRequest struct { + Title string `json:"title"` + Content string `json:"content"` + Excerpt string `json:"excerpt,omitempty"` + CoverImage string `json:"coverImage,omitempty"` + Category string `json:"category"` + Tags []string `json:"tags,omitempty"` + Status string `json:"status"` +} + +type PostUpdateRequest struct { + Title *string `json:"title,omitempty"` + Content *string `json:"content,omitempty"` + Excerpt *string `json:"excerpt,omitempty"` + CoverImage *string `json:"coverImage,omitempty"` + Category *string `json:"category,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Status *string `json:"status,omitempty"` +} + +type PostListResponse struct { + Posts []Post `json:"posts"` + Total int `json:"total"` + Page int `json:"page"` + PerPage int `json:"perPage"` +} + +type PlatformSettings struct { + ID string `json:"id"` + SiteName string `json:"siteName"` + SiteURL string `json:"siteUrl"` + LogoURL string `json:"logoUrl,omitempty"` + FaviconURL string `json:"faviconUrl,omitempty"` + Description string `json:"description,omitempty"` + SupportEmail string `json:"supportEmail,omitempty"` + Features FeatureFlags `json:"features"` + LLMSettings LLMSettings `json:"llmSettings"` + SearchSettings SearchSettings `json:"searchSettings"` + Metadata map[string]interface{} `json:"metadata,omitempty"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type FeatureFlags struct { + EnableRegistration bool `json:"enableRegistration"` + EnableDiscover bool `json:"enableDiscover"` + EnableFinance bool `json:"enableFinance"` + EnableLearning bool `json:"enableLearning"` + EnableTravel bool `json:"enableTravel"` + EnableMedicine bool `json:"enableMedicine"` + EnableFileUploads bool `json:"enableFileUploads"` + MaintenanceMode bool `json:"maintenanceMode"` +} + +type LLMSettings struct { + DefaultProvider string `json:"defaultProvider"` + DefaultModel string `json:"defaultModel"` + MaxTokens int `json:"maxTokens"` + Temperature float64 `json:"temperature"` + APIKeys map[string]string `json:"-"` +} + +type SearchSettings struct { + DefaultEngine string `json:"defaultEngine"` + SafeSearch bool `json:"safeSearch"` + MaxResults int `json:"maxResults"` + EnabledCategories []string `json:"enabledCategories"` +} + +type DiscoverCategory struct { + ID string `json:"id"` + Name string `json:"name"` + NameRu string `json:"nameRu"` + Icon string `json:"icon"` + Color string `json:"color"` + Keywords []string `json:"keywords"` + Regions []string `json:"regions"` + IsActive bool `json:"isActive"` + SortOrder int `json:"sortOrder"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type DiscoverCategoryCreateRequest struct { + Name string `json:"name"` + NameRu string `json:"nameRu"` + Icon string `json:"icon"` + Color string `json:"color"` + Keywords []string `json:"keywords"` + Regions []string `json:"regions"` +} + +type DiscoverCategoryUpdateRequest struct { + Name *string `json:"name,omitempty"` + NameRu *string `json:"nameRu,omitempty"` + Icon *string `json:"icon,omitempty"` + Color *string `json:"color,omitempty"` + Keywords *[]string `json:"keywords,omitempty"` + Regions *[]string `json:"regions,omitempty"` + IsActive *bool `json:"isActive,omitempty"` + SortOrder *int `json:"sortOrder,omitempty"` +} + +type DiscoverSource struct { + ID string `json:"id"` + Name string `json:"name"` + URL string `json:"url"` + LogoURL string `json:"logoUrl,omitempty"` + Categories []string `json:"categories"` + TrustScore float64 `json:"trustScore"` + IsActive bool `json:"isActive"` + Description string `json:"description,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type DiscoverSourceCreateRequest struct { + Name string `json:"name"` + URL string `json:"url"` + LogoURL string `json:"logoUrl,omitempty"` + Categories []string `json:"categories"` + TrustScore float64 `json:"trustScore"` + Description string `json:"description,omitempty"` +} + +type DashboardStats struct { + TotalUsers int `json:"totalUsers"` + ActiveUsers int `json:"activeUsers"` + TotalPosts int `json:"totalPosts"` + PublishedPosts int `json:"publishedPosts"` + TotalSearches int `json:"totalSearches"` + TodaySearches int `json:"todaySearches"` + StorageUsedMB int `json:"storageUsedMb"` + StorageLimitMB int `json:"storageLimitMb"` +} + +type AuditLog struct { + ID string `json:"id"` + UserID string `json:"userId"` + UserEmail string `json:"userEmail"` + Action string `json:"action"` + Resource string `json:"resource"` + ResourceID string `json:"resourceId,omitempty"` + Details map[string]interface{} `json:"details,omitempty"` + IPAddress string `json:"ipAddress,omitempty"` + UserAgent string `json:"userAgent,omitempty"` + CreatedAt time.Time `json:"createdAt"` +} + +type Connector struct { + ID string `json:"id"` + Type string `json:"type"` + Name string `json:"name"` + Config map[string]interface{} `json:"config"` + IsActive bool `json:"isActive"` + LastSyncAt *time.Time `json:"lastSyncAt,omitempty"` + Status string `json:"status"` + ErrorMsg string `json:"errorMsg,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type ConnectorType string + +const ( + ConnectorTypeOpenAI ConnectorType = "openai" + ConnectorTypeAnthropic ConnectorType = "anthropic" + ConnectorTypeGemini ConnectorType = "gemini" + ConnectorTypeTimeweb ConnectorType = "timeweb" + ConnectorTypeCustomLLM ConnectorType = "custom_llm" + ConnectorTypeS3 ConnectorType = "s3" + ConnectorTypeMinio ConnectorType = "minio" +) diff --git a/backend/internal/auth/repository.go b/backend/internal/auth/repository.go new file mode 100644 index 0000000..02932f7 --- /dev/null +++ b/backend/internal/auth/repository.go @@ -0,0 +1,455 @@ +package auth + +import ( + "context" + "crypto/rand" + "database/sql" + "encoding/hex" + "errors" + "time" + + "golang.org/x/crypto/bcrypt" +) + +var ( + ErrUserNotFound = errors.New("user not found") + ErrEmailExists = errors.New("email already exists") + ErrInvalidPassword = errors.New("invalid password") + ErrTokenExpired = errors.New("token expired") + ErrTokenInvalid = errors.New("invalid token") + ErrWeakPassword = errors.New("password too weak") +) + +type Repository struct { + db *sql.DB +} + +func NewRepository(db *sql.DB) *Repository { + return &Repository{db: db} +} + +func (r *Repository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS auth_users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255), + name VARCHAR(255) NOT NULL, + avatar TEXT, + role VARCHAR(50) DEFAULT 'user', + tier VARCHAR(50) DEFAULT 'free', + email_verified BOOLEAN DEFAULT FALSE, + provider VARCHAR(50) DEFAULT 'local', + provider_id VARCHAR(255), + last_login_at TIMESTAMPTZ, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_auth_users_email ON auth_users(email)`, + `CREATE INDEX IF NOT EXISTS idx_auth_users_provider ON auth_users(provider, provider_id)`, + + `CREATE TABLE IF NOT EXISTS refresh_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES auth_users(id) ON DELETE CASCADE, + token VARCHAR(255) UNIQUE NOT NULL, + user_agent TEXT, + ip VARCHAR(50), + expires_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_refresh_tokens_user ON refresh_tokens(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_refresh_tokens_token ON refresh_tokens(token)`, + + `CREATE TABLE IF NOT EXISTS password_reset_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES auth_users(id) ON DELETE CASCADE, + token VARCHAR(255) UNIQUE NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + used BOOLEAN DEFAULT FALSE, + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_password_reset_tokens ON password_reset_tokens(token)`, + } + + for _, m := range migrations { + if _, err := r.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *Repository) CreateUser(ctx context.Context, email, password, name string) (*User, error) { + var exists bool + err := r.db.QueryRowContext(ctx, "SELECT EXISTS(SELECT 1 FROM auth_users WHERE email = $1)", email).Scan(&exists) + if err != nil { + return nil, err + } + if exists { + return nil, ErrEmailExists + } + + if len(password) < 8 { + return nil, ErrWeakPassword + } + + hash, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + if err != nil { + return nil, err + } + + user := &User{ + Email: email, + PasswordHash: string(hash), + Name: name, + Role: string(RoleUser), + Tier: string(TierFree), + Provider: ProviderLocal, + } + + query := ` + INSERT INTO auth_users (email, password_hash, name, role, tier, provider) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING id, created_at, updated_at + ` + err = r.db.QueryRowContext(ctx, query, + user.Email, user.PasswordHash, user.Name, user.Role, user.Tier, user.Provider, + ).Scan(&user.ID, &user.CreatedAt, &user.UpdatedAt) + + if err != nil { + return nil, err + } + + return user, nil +} + +func (r *Repository) GetUserByEmail(ctx context.Context, email string) (*User, error) { + query := ` + SELECT id, email, password_hash, name, avatar, role, tier, email_verified, + provider, provider_id, last_login_at, created_at, updated_at + FROM auth_users WHERE email = $1 + ` + + user := &User{} + var lastLogin, avatar, providerID sql.NullString + var lastLoginTime sql.NullTime + + err := r.db.QueryRowContext(ctx, query, email).Scan( + &user.ID, &user.Email, &user.PasswordHash, &user.Name, &avatar, + &user.Role, &user.Tier, &user.EmailVerified, &user.Provider, + &providerID, &lastLoginTime, &user.CreatedAt, &user.UpdatedAt, + ) + + if err == sql.ErrNoRows { + return nil, ErrUserNotFound + } + if err != nil { + return nil, err + } + + if avatar.Valid { + user.Avatar = avatar.String + } + if providerID.Valid { + user.ProviderID = providerID.String + } + if lastLoginTime.Valid { + user.LastLoginAt = lastLoginTime.Time + } + _ = lastLogin + + return user, nil +} + +func (r *Repository) GetUserByID(ctx context.Context, id string) (*User, error) { + query := ` + SELECT id, email, password_hash, name, avatar, role, tier, email_verified, + provider, provider_id, last_login_at, created_at, updated_at + FROM auth_users WHERE id = $1 + ` + + user := &User{} + var avatar, providerID sql.NullString + var lastLoginTime sql.NullTime + + err := r.db.QueryRowContext(ctx, query, id).Scan( + &user.ID, &user.Email, &user.PasswordHash, &user.Name, &avatar, + &user.Role, &user.Tier, &user.EmailVerified, &user.Provider, + &providerID, &lastLoginTime, &user.CreatedAt, &user.UpdatedAt, + ) + + if err == sql.ErrNoRows { + return nil, ErrUserNotFound + } + if err != nil { + return nil, err + } + + if avatar.Valid { + user.Avatar = avatar.String + } + if providerID.Valid { + user.ProviderID = providerID.String + } + if lastLoginTime.Valid { + user.LastLoginAt = lastLoginTime.Time + } + + return user, nil +} + +func (r *Repository) ValidatePassword(ctx context.Context, email, password string) (*User, error) { + user, err := r.GetUserByEmail(ctx, email) + if err != nil { + return nil, err + } + + if err := bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(password)); err != nil { + return nil, ErrInvalidPassword + } + + r.db.ExecContext(ctx, "UPDATE auth_users SET last_login_at = NOW() WHERE id = $1", user.ID) + + return user, nil +} + +func (r *Repository) UpdatePassword(ctx context.Context, userID, newPassword string) error { + if len(newPassword) < 8 { + return ErrWeakPassword + } + + hash, err := bcrypt.GenerateFromPassword([]byte(newPassword), bcrypt.DefaultCost) + if err != nil { + return err + } + + _, err = r.db.ExecContext(ctx, + "UPDATE auth_users SET password_hash = $2, updated_at = NOW() WHERE id = $1", + userID, string(hash), + ) + return err +} + +func (r *Repository) UpdateProfile(ctx context.Context, userID, name, avatar string) error { + _, err := r.db.ExecContext(ctx, + "UPDATE auth_users SET name = $2, avatar = $3, updated_at = NOW() WHERE id = $1", + userID, name, avatar, + ) + return err +} + +func (r *Repository) UpdateTier(ctx context.Context, userID string, tier UserTier) error { + _, err := r.db.ExecContext(ctx, + "UPDATE auth_users SET tier = $2, updated_at = NOW() WHERE id = $1", + userID, string(tier), + ) + return err +} + +func (r *Repository) UpdateRole(ctx context.Context, userID string, role UserRole) error { + _, err := r.db.ExecContext(ctx, + "UPDATE auth_users SET role = $2, updated_at = NOW() WHERE id = $1", + userID, string(role), + ) + return err +} + +func (r *Repository) CreateRefreshToken(ctx context.Context, userID, userAgent, ip string, duration time.Duration) (*RefreshToken, error) { + token := generateSecureToken(32) + + rt := &RefreshToken{ + UserID: userID, + Token: token, + UserAgent: userAgent, + IP: ip, + ExpiresAt: time.Now().Add(duration), + } + + query := ` + INSERT INTO refresh_tokens (user_id, token, user_agent, ip, expires_at) + VALUES ($1, $2, $3, $4, $5) + RETURNING id, created_at + ` + err := r.db.QueryRowContext(ctx, query, + rt.UserID, rt.Token, rt.UserAgent, rt.IP, rt.ExpiresAt, + ).Scan(&rt.ID, &rt.CreatedAt) + + if err != nil { + return nil, err + } + + return rt, nil +} + +func (r *Repository) ValidateRefreshToken(ctx context.Context, token string) (*RefreshToken, error) { + query := ` + SELECT id, user_id, token, user_agent, ip, expires_at, created_at + FROM refresh_tokens WHERE token = $1 + ` + + rt := &RefreshToken{} + err := r.db.QueryRowContext(ctx, query, token).Scan( + &rt.ID, &rt.UserID, &rt.Token, &rt.UserAgent, &rt.IP, &rt.ExpiresAt, &rt.CreatedAt, + ) + + if err == sql.ErrNoRows { + return nil, ErrTokenInvalid + } + if err != nil { + return nil, err + } + + if time.Now().After(rt.ExpiresAt) { + r.db.ExecContext(ctx, "DELETE FROM refresh_tokens WHERE id = $1", rt.ID) + return nil, ErrTokenExpired + } + + return rt, nil +} + +func (r *Repository) RevokeRefreshToken(ctx context.Context, token string) error { + _, err := r.db.ExecContext(ctx, "DELETE FROM refresh_tokens WHERE token = $1", token) + return err +} + +func (r *Repository) RevokeAllRefreshTokens(ctx context.Context, userID string) error { + _, err := r.db.ExecContext(ctx, "DELETE FROM refresh_tokens WHERE user_id = $1", userID) + return err +} + +func (r *Repository) CreatePasswordResetToken(ctx context.Context, userID string) (*PasswordResetToken, error) { + token := generateSecureToken(32) + + prt := &PasswordResetToken{ + UserID: userID, + Token: token, + ExpiresAt: time.Now().Add(1 * time.Hour), + } + + query := ` + INSERT INTO password_reset_tokens (user_id, token, expires_at) + VALUES ($1, $2, $3) + RETURNING id, created_at + ` + err := r.db.QueryRowContext(ctx, query, + prt.UserID, prt.Token, prt.ExpiresAt, + ).Scan(&prt.ID, &prt.CreatedAt) + + if err != nil { + return nil, err + } + + return prt, nil +} + +func (r *Repository) ValidatePasswordResetToken(ctx context.Context, token string) (*PasswordResetToken, error) { + query := ` + SELECT id, user_id, token, expires_at, used, created_at + FROM password_reset_tokens WHERE token = $1 + ` + + prt := &PasswordResetToken{} + err := r.db.QueryRowContext(ctx, query, token).Scan( + &prt.ID, &prt.UserID, &prt.Token, &prt.ExpiresAt, &prt.Used, &prt.CreatedAt, + ) + + if err == sql.ErrNoRows { + return nil, ErrTokenInvalid + } + if err != nil { + return nil, err + } + + if prt.Used { + return nil, ErrTokenInvalid + } + + if time.Now().After(prt.ExpiresAt) { + return nil, ErrTokenExpired + } + + return prt, nil +} + +func (r *Repository) MarkPasswordResetTokenUsed(ctx context.Context, tokenID string) error { + _, err := r.db.ExecContext(ctx, "UPDATE password_reset_tokens SET used = TRUE WHERE id = $1", tokenID) + return err +} + +func (r *Repository) CreateOrUpdateOAuthUser(ctx context.Context, provider, providerID, email, name, avatar string) (*User, error) { + query := ` + SELECT id FROM auth_users WHERE provider = $1 AND provider_id = $2 + ` + var existingID string + err := r.db.QueryRowContext(ctx, query, provider, providerID).Scan(&existingID) + + if err == sql.ErrNoRows { + var emailExists bool + r.db.QueryRowContext(ctx, "SELECT EXISTS(SELECT 1 FROM auth_users WHERE email = $1)", email).Scan(&emailExists) + if emailExists { + _, err := r.db.ExecContext(ctx, + "UPDATE auth_users SET provider = $1, provider_id = $2, last_login_at = NOW() WHERE email = $3", + provider, providerID, email, + ) + if err != nil { + return nil, err + } + return r.GetUserByEmail(ctx, email) + } + + user := &User{ + Email: email, + Name: name, + Avatar: avatar, + Role: string(RoleUser), + Tier: string(TierFree), + Provider: provider, + ProviderID: providerID, + EmailVerified: true, + } + + insertQuery := ` + INSERT INTO auth_users (email, name, avatar, role, tier, provider, provider_id, email_verified) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING id, created_at, updated_at + ` + err = r.db.QueryRowContext(ctx, insertQuery, + user.Email, user.Name, user.Avatar, user.Role, user.Tier, user.Provider, user.ProviderID, user.EmailVerified, + ).Scan(&user.ID, &user.CreatedAt, &user.UpdatedAt) + + if err != nil { + return nil, err + } + + return user, nil + } + + if err != nil { + return nil, err + } + + _, err = r.db.ExecContext(ctx, + "UPDATE auth_users SET name = $2, avatar = $3, last_login_at = NOW() WHERE id = $1", + existingID, name, avatar, + ) + if err != nil { + return nil, err + } + + return r.GetUserByID(ctx, existingID) +} + +func (r *Repository) CleanupExpiredTokens(ctx context.Context) error { + _, err := r.db.ExecContext(ctx, "DELETE FROM refresh_tokens WHERE expires_at < NOW()") + if err != nil { + return err + } + _, err = r.db.ExecContext(ctx, "DELETE FROM password_reset_tokens WHERE expires_at < NOW()") + return err +} + +func generateSecureToken(length int) string { + b := make([]byte, length) + rand.Read(b) + return hex.EncodeToString(b) +} diff --git a/backend/internal/auth/types.go b/backend/internal/auth/types.go new file mode 100644 index 0000000..fa32b54 --- /dev/null +++ b/backend/internal/auth/types.go @@ -0,0 +1,118 @@ +package auth + +import ( + "time" +) + +type User struct { + ID string `json:"id"` + Email string `json:"email"` + PasswordHash string `json:"-"` + Name string `json:"name"` + Avatar string `json:"avatar,omitempty"` + Role string `json:"role"` + Tier string `json:"tier"` + EmailVerified bool `json:"emailVerified"` + Provider string `json:"provider"` + ProviderID string `json:"providerId,omitempty"` + LastLoginAt time.Time `json:"lastLoginAt,omitempty"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type RefreshToken struct { + ID string `json:"id"` + UserID string `json:"userId"` + Token string `json:"token"` + UserAgent string `json:"userAgent"` + IP string `json:"ip"` + ExpiresAt time.Time `json:"expiresAt"` + CreatedAt time.Time `json:"createdAt"` +} + +type RegisterRequest struct { + Email string `json:"email"` + Password string `json:"password"` + Name string `json:"name"` +} + +type LoginRequest struct { + Email string `json:"email"` + Password string `json:"password"` +} + +type TokenResponse struct { + AccessToken string `json:"accessToken"` + RefreshToken string `json:"refreshToken"` + ExpiresIn int `json:"expiresIn"` + TokenType string `json:"tokenType"` + User *User `json:"user"` +} + +type RefreshRequest struct { + RefreshToken string `json:"refreshToken"` +} + +type ChangePasswordRequest struct { + CurrentPassword string `json:"currentPassword"` + NewPassword string `json:"newPassword"` +} + +type ResetPasswordRequest struct { + Email string `json:"email"` +} + +type ResetPasswordConfirm struct { + Token string `json:"token"` + NewPassword string `json:"newPassword"` +} + +type PasswordResetToken struct { + ID string `json:"id"` + UserID string `json:"userId"` + Token string `json:"token"` + ExpiresAt time.Time `json:"expiresAt"` + Used bool `json:"used"` + CreatedAt time.Time `json:"createdAt"` +} + +type OAuthProvider string + +const ( + ProviderLocal = "local" + ProviderGoogle = "google" + ProviderGitHub = "github" + ProviderYandex = "yandex" +) + +type UserRole string + +const ( + RoleUser UserRole = "user" + RoleAdmin UserRole = "admin" +) + +type UserTier string + +const ( + TierFree UserTier = "free" + TierPro UserTier = "pro" + TierBusiness UserTier = "business" +) + +type JWTClaims struct { + UserID string `json:"userId"` + Email string `json:"email"` + Role string `json:"role"` + Tier string `json:"tier"` +} + +type ValidateResponse struct { + Valid bool `json:"valid"` + User *User `json:"user,omitempty"` +} + +type UpdateProfileRequest struct { + Name string `json:"name"` + Avatar string `json:"avatar"` +} diff --git a/backend/internal/computer/browser/browser.go b/backend/internal/computer/browser/browser.go deleted file mode 100644 index 31849a5..0000000 --- a/backend/internal/computer/browser/browser.go +++ /dev/null @@ -1,587 +0,0 @@ -package browser - -import ( - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "os" - "os/exec" - "strings" - "sync" - "time" - - "github.com/google/uuid" -) - -type PlaywrightBrowser struct { - cmd *exec.Cmd - serverURL string - client *http.Client - sessions map[string]*BrowserSession - mu sync.RWMutex - config Config -} - -type Config struct { - PlaywrightServerURL string - DefaultTimeout time.Duration - Headless bool - UserAgent string - ProxyURL string - ScreenshotsDir string - RecordingsDir string -} - -type BrowserSession struct { - ID string - ContextID string - PageID string - CreatedAt time.Time - LastAction time.Time - Screenshots []string - Recordings []string - Closed bool -} - -type ActionRequest struct { - SessionID string `json:"sessionId"` - Action string `json:"action"` - Params map[string]interface{} `json:"params"` -} - -type ActionResponse struct { - Success bool `json:"success"` - Data interface{} `json:"data,omitempty"` - Screenshot string `json:"screenshot,omitempty"` - Error string `json:"error,omitempty"` - PageTitle string `json:"pageTitle,omitempty"` - PageURL string `json:"pageUrl,omitempty"` -} - -func NewPlaywrightBrowser(cfg Config) *PlaywrightBrowser { - if cfg.DefaultTimeout == 0 { - cfg.DefaultTimeout = 30 * time.Second - } - if cfg.PlaywrightServerURL == "" { - cfg.PlaywrightServerURL = "http://localhost:3050" - } - if cfg.ScreenshotsDir == "" { - cfg.ScreenshotsDir = "/tmp/gooseek-screenshots" - } - if cfg.RecordingsDir == "" { - cfg.RecordingsDir = "/tmp/gooseek-recordings" - } - - os.MkdirAll(cfg.ScreenshotsDir, 0755) - os.MkdirAll(cfg.RecordingsDir, 0755) - - return &PlaywrightBrowser{ - serverURL: cfg.PlaywrightServerURL, - client: &http.Client{ - Timeout: cfg.DefaultTimeout, - }, - sessions: make(map[string]*BrowserSession), - config: cfg, - } -} - -func (b *PlaywrightBrowser) NewSession(ctx context.Context, opts SessionOptions) (*BrowserSession, error) { - sessionID := uuid.New().String() - - params := map[string]interface{}{ - "headless": b.config.Headless, - "sessionId": sessionID, - } - - if opts.Viewport != nil { - params["viewport"] = opts.Viewport - } - if opts.UserAgent != "" { - params["userAgent"] = opts.UserAgent - } else if b.config.UserAgent != "" { - params["userAgent"] = b.config.UserAgent - } - if opts.ProxyURL != "" { - params["proxy"] = opts.ProxyURL - } else if b.config.ProxyURL != "" { - params["proxy"] = b.config.ProxyURL - } - if opts.RecordVideo { - params["recordVideo"] = map[string]interface{}{ - "dir": b.config.RecordingsDir, - } - } - - resp, err := b.sendCommand(ctx, "browser.newContext", params) - if err != nil { - return nil, fmt.Errorf("failed to create browser context: %w", err) - } - - contextID, _ := resp["contextId"].(string) - pageID, _ := resp["pageId"].(string) - - session := &BrowserSession{ - ID: sessionID, - ContextID: contextID, - PageID: pageID, - CreatedAt: time.Now(), - LastAction: time.Now(), - } - - b.mu.Lock() - b.sessions[sessionID] = session - b.mu.Unlock() - - return session, nil -} - -func (b *PlaywrightBrowser) CloseSession(ctx context.Context, sessionID string) error { - b.mu.Lock() - session, ok := b.sessions[sessionID] - if !ok { - b.mu.Unlock() - return errors.New("session not found") - } - session.Closed = true - delete(b.sessions, sessionID) - b.mu.Unlock() - - _, err := b.sendCommand(ctx, "browser.closeContext", map[string]interface{}{ - "sessionId": sessionID, - }) - return err -} - -func (b *PlaywrightBrowser) Navigate(ctx context.Context, sessionID, url string, opts NavigateOptions) (*ActionResponse, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "url": url, - } - if opts.Timeout > 0 { - params["timeout"] = opts.Timeout - } - if opts.WaitUntil != "" { - params["waitUntil"] = opts.WaitUntil - } - - resp, err := b.sendCommand(ctx, "page.goto", params) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - result := &ActionResponse{ - Success: true, - PageURL: getString(resp, "url"), - PageTitle: getString(resp, "title"), - } - - if opts.Screenshot { - screenshot, _ := b.Screenshot(ctx, sessionID, ScreenshotOptions{FullPage: false}) - if screenshot != nil { - result.Screenshot = screenshot.Data - } - } - - return result, nil -} - -func (b *PlaywrightBrowser) Click(ctx context.Context, sessionID, selector string, opts ClickOptions) (*ActionResponse, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - } - if opts.Button != "" { - params["button"] = opts.Button - } - if opts.ClickCount > 0 { - params["clickCount"] = opts.ClickCount - } - if opts.Timeout > 0 { - params["timeout"] = opts.Timeout - } - if opts.Force { - params["force"] = true - } - - _, err := b.sendCommand(ctx, "page.click", params) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - result := &ActionResponse{Success: true} - - if opts.WaitAfter > 0 { - time.Sleep(time.Duration(opts.WaitAfter) * time.Millisecond) - } - - if opts.Screenshot { - screenshot, _ := b.Screenshot(ctx, sessionID, ScreenshotOptions{FullPage: false}) - if screenshot != nil { - result.Screenshot = screenshot.Data - } - } - - return result, nil -} - -func (b *PlaywrightBrowser) Type(ctx context.Context, sessionID, selector, text string, opts TypeOptions) (*ActionResponse, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - "text": text, - } - if opts.Delay > 0 { - params["delay"] = opts.Delay - } - if opts.Timeout > 0 { - params["timeout"] = opts.Timeout - } - if opts.Clear { - b.sendCommand(ctx, "page.fill", map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - "value": "", - }) - } - - _, err := b.sendCommand(ctx, "page.type", params) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - return &ActionResponse{Success: true}, nil -} - -func (b *PlaywrightBrowser) Fill(ctx context.Context, sessionID, selector, value string) (*ActionResponse, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - "value": value, - } - - _, err := b.sendCommand(ctx, "page.fill", params) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - return &ActionResponse{Success: true}, nil -} - -func (b *PlaywrightBrowser) Screenshot(ctx context.Context, sessionID string, opts ScreenshotOptions) (*ScreenshotResult, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "fullPage": opts.FullPage, - } - if opts.Selector != "" { - params["selector"] = opts.Selector - } - if opts.Quality > 0 { - params["quality"] = opts.Quality - } - params["type"] = "png" - if opts.Format != "" { - params["type"] = opts.Format - } - - resp, err := b.sendCommand(ctx, "page.screenshot", params) - if err != nil { - return nil, err - } - - data, _ := resp["data"].(string) - - filename := fmt.Sprintf("%s/%s-%d.png", b.config.ScreenshotsDir, sessionID, time.Now().UnixNano()) - if decoded, err := base64.StdEncoding.DecodeString(data); err == nil { - os.WriteFile(filename, decoded, 0644) - } - - b.mu.Lock() - if session, ok := b.sessions[sessionID]; ok { - session.Screenshots = append(session.Screenshots, filename) - } - b.mu.Unlock() - - return &ScreenshotResult{ - Data: data, - Path: filename, - MimeType: "image/png", - }, nil -} - -func (b *PlaywrightBrowser) ExtractText(ctx context.Context, sessionID, selector string) (string, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - } - - resp, err := b.sendCommand(ctx, "page.textContent", params) - if err != nil { - return "", err - } - - return getString(resp, "text"), nil -} - -func (b *PlaywrightBrowser) ExtractHTML(ctx context.Context, sessionID, selector string) (string, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - } - - resp, err := b.sendCommand(ctx, "page.innerHTML", params) - if err != nil { - return "", err - } - - return getString(resp, "html"), nil -} - -func (b *PlaywrightBrowser) WaitForSelector(ctx context.Context, sessionID, selector string, opts WaitOptions) error { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - } - if opts.Timeout > 0 { - params["timeout"] = opts.Timeout - } - if opts.State != "" { - params["state"] = opts.State - } - - _, err := b.sendCommand(ctx, "page.waitForSelector", params) - return err -} - -func (b *PlaywrightBrowser) WaitForNavigation(ctx context.Context, sessionID string, opts WaitOptions) error { - params := map[string]interface{}{ - "sessionId": sessionID, - } - if opts.Timeout > 0 { - params["timeout"] = opts.Timeout - } - if opts.WaitUntil != "" { - params["waitUntil"] = opts.WaitUntil - } - - _, err := b.sendCommand(ctx, "page.waitForNavigation", params) - return err -} - -func (b *PlaywrightBrowser) Scroll(ctx context.Context, sessionID string, opts ScrollOptions) (*ActionResponse, error) { - script := fmt.Sprintf("window.scrollBy(%d, %d)", opts.X, opts.Y) - if opts.Selector != "" { - script = fmt.Sprintf(`document.querySelector('%s').scrollBy(%d, %d)`, opts.Selector, opts.X, opts.Y) - } - if opts.ToBottom { - script = "window.scrollTo(0, document.body.scrollHeight)" - } - if opts.ToTop { - script = "window.scrollTo(0, 0)" - } - - _, err := b.Evaluate(ctx, sessionID, script) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - if opts.WaitAfter > 0 { - time.Sleep(time.Duration(opts.WaitAfter) * time.Millisecond) - } - - return &ActionResponse{Success: true}, nil -} - -func (b *PlaywrightBrowser) Evaluate(ctx context.Context, sessionID, script string) (interface{}, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "expression": script, - } - - resp, err := b.sendCommand(ctx, "page.evaluate", params) - if err != nil { - return nil, err - } - - return resp["result"], nil -} - -func (b *PlaywrightBrowser) Select(ctx context.Context, sessionID, selector string, values []string) (*ActionResponse, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - "selector": selector, - "values": values, - } - - _, err := b.sendCommand(ctx, "page.selectOption", params) - if err != nil { - return &ActionResponse{Success: false, Error: err.Error()}, err - } - - return &ActionResponse{Success: true}, nil -} - -func (b *PlaywrightBrowser) GetPageInfo(ctx context.Context, sessionID string) (*PageInfo, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - } - - resp, err := b.sendCommand(ctx, "page.info", params) - if err != nil { - return nil, err - } - - return &PageInfo{ - URL: getString(resp, "url"), - Title: getString(resp, "title"), - Content: getString(resp, "content"), - }, nil -} - -func (b *PlaywrightBrowser) PDF(ctx context.Context, sessionID string, opts PDFOptions) ([]byte, error) { - params := map[string]interface{}{ - "sessionId": sessionID, - } - if opts.Format != "" { - params["format"] = opts.Format - } - if opts.Landscape { - params["landscape"] = true - } - if opts.PrintBackground { - params["printBackground"] = true - } - - resp, err := b.sendCommand(ctx, "page.pdf", params) - if err != nil { - return nil, err - } - - data, _ := resp["data"].(string) - return base64.StdEncoding.DecodeString(data) -} - -func (b *PlaywrightBrowser) sendCommand(ctx context.Context, method string, params map[string]interface{}) (map[string]interface{}, error) { - body := map[string]interface{}{ - "method": method, - "params": params, - } - - jsonBody, err := json.Marshal(body) - if err != nil { - return nil, err - } - - req, err := http.NewRequestWithContext(ctx, "POST", b.serverURL+"/api/browser", strings.NewReader(string(jsonBody))) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", "application/json") - - resp, err := b.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - respBody, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - var result map[string]interface{} - if err := json.Unmarshal(respBody, &result); err != nil { - return nil, err - } - - if errMsg, ok := result["error"].(string); ok && errMsg != "" { - return result, errors.New(errMsg) - } - - return result, nil -} - -func getString(m map[string]interface{}, key string) string { - if v, ok := m[key].(string); ok { - return v - } - return "" -} - -type SessionOptions struct { - Headless bool - Viewport *Viewport - UserAgent string - ProxyURL string - RecordVideo bool - BlockAds bool -} - -type Viewport struct { - Width int `json:"width"` - Height int `json:"height"` -} - -type NavigateOptions struct { - Timeout int - WaitUntil string - Screenshot bool -} - -type ClickOptions struct { - Button string - ClickCount int - Timeout int - Force bool - WaitAfter int - Screenshot bool -} - -type TypeOptions struct { - Delay int - Timeout int - Clear bool -} - -type ScreenshotOptions struct { - FullPage bool - Selector string - Format string - Quality int -} - -type ScreenshotResult struct { - Data string - Path string - MimeType string -} - -type WaitOptions struct { - Timeout int - State string - WaitUntil string -} - -type ScrollOptions struct { - X int - Y int - Selector string - ToBottom bool - ToTop bool - WaitAfter int -} - -type PageInfo struct { - URL string - Title string - Content string -} - -type PDFOptions struct { - Format string - Landscape bool - PrintBackground bool -} diff --git a/backend/internal/computer/browser/server.go b/backend/internal/computer/browser/server.go deleted file mode 100644 index 300c8e8..0000000 --- a/backend/internal/computer/browser/server.go +++ /dev/null @@ -1,555 +0,0 @@ -package browser - -import ( - "context" - "encoding/json" - "fmt" - "log" - "net/http" - "sync" - "time" - - "github.com/gofiber/fiber/v2" - "github.com/gofiber/fiber/v2/middleware/cors" - "github.com/gofiber/fiber/v2/middleware/logger" -) - -type BrowserServer struct { - browser *PlaywrightBrowser - sessions map[string]*ManagedSession - mu sync.RWMutex - config ServerConfig -} - -type ServerConfig struct { - Port int - MaxSessions int - SessionTimeout time.Duration - CleanupInterval time.Duration -} - -type ManagedSession struct { - *BrowserSession - LastActive time.Time - Actions []ActionLog -} - -type ActionLog struct { - Action string `json:"action"` - Params string `json:"params"` - Success bool `json:"success"` - Error string `json:"error,omitempty"` - Duration int64 `json:"durationMs"` - Timestamp time.Time `json:"timestamp"` -} - -type BrowserRequest struct { - Method string `json:"method"` - Params map[string]interface{} `json:"params"` -} - -func NewBrowserServer(cfg ServerConfig) *BrowserServer { - if cfg.Port == 0 { - cfg.Port = 3050 - } - if cfg.MaxSessions == 0 { - cfg.MaxSessions = 20 - } - if cfg.SessionTimeout == 0 { - cfg.SessionTimeout = 30 * time.Minute - } - if cfg.CleanupInterval == 0 { - cfg.CleanupInterval = 5 * time.Minute - } - - return &BrowserServer{ - browser: NewPlaywrightBrowser(Config{ - DefaultTimeout: 30 * time.Second, - Headless: true, - }), - sessions: make(map[string]*ManagedSession), - config: cfg, - } -} - -func (s *BrowserServer) Start(ctx context.Context) error { - go s.cleanupLoop(ctx) - - app := fiber.New(fiber.Config{ - BodyLimit: 50 * 1024 * 1024, - ReadTimeout: 2 * time.Minute, - WriteTimeout: 2 * time.Minute, - }) - - app.Use(logger.New()) - app.Use(cors.New()) - - app.Get("/health", func(c *fiber.Ctx) error { - return c.JSON(fiber.Map{"status": "ok", "sessions": len(s.sessions)}) - }) - - app.Post("/api/browser", s.handleBrowserCommand) - - app.Post("/api/session/new", s.handleNewSession) - app.Delete("/api/session/:id", s.handleCloseSession) - app.Get("/api/session/:id", s.handleGetSession) - app.Get("/api/sessions", s.handleListSessions) - - app.Post("/api/action", s.handleAction) - - log.Printf("[BrowserServer] Starting on port %d", s.config.Port) - return app.Listen(fmt.Sprintf(":%d", s.config.Port)) -} - -func (s *BrowserServer) handleBrowserCommand(c *fiber.Ctx) error { - var req BrowserRequest - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - sessionID, _ := req.Params["sessionId"].(string) - - s.mu.Lock() - if session, ok := s.sessions[sessionID]; ok { - session.LastActive = time.Now() - } - s.mu.Unlock() - - start := time.Now() - result, err := s.executeMethod(ctx, req.Method, req.Params) - - s.mu.Lock() - if session, ok := s.sessions[sessionID]; ok { - paramsJSON, _ := json.Marshal(req.Params) - session.Actions = append(session.Actions, ActionLog{ - Action: req.Method, - Params: string(paramsJSON), - Success: err == nil, - Error: errToString(err), - Duration: time.Since(start).Milliseconds(), - Timestamp: time.Now(), - }) - } - s.mu.Unlock() - - if err != nil { - return c.JSON(fiber.Map{ - "success": false, - "error": err.Error(), - }) - } - - return c.JSON(result) -} - -func (s *BrowserServer) executeMethod(ctx context.Context, method string, params map[string]interface{}) (map[string]interface{}, error) { - sessionID, _ := params["sessionId"].(string) - - switch method { - case "browser.newContext": - opts := SessionOptions{ - Headless: getBool(params, "headless"), - } - if viewport, ok := params["viewport"].(map[string]interface{}); ok { - opts.Viewport = &Viewport{ - Width: getInt(viewport, "width"), - Height: getInt(viewport, "height"), - } - } - if ua, ok := params["userAgent"].(string); ok { - opts.UserAgent = ua - } - if proxy, ok := params["proxy"].(string); ok { - opts.ProxyURL = proxy - } - if rv, ok := params["recordVideo"].(map[string]interface{}); ok { - _ = rv - opts.RecordVideo = true - } - - session, err := s.browser.NewSession(ctx, opts) - if err != nil { - return nil, err - } - - s.mu.Lock() - s.sessions[session.ID] = &ManagedSession{ - BrowserSession: session, - LastActive: time.Now(), - Actions: make([]ActionLog, 0), - } - s.mu.Unlock() - - return map[string]interface{}{ - "sessionId": session.ID, - "contextId": session.ContextID, - "pageId": session.PageID, - }, nil - - case "browser.closeContext": - err := s.browser.CloseSession(ctx, sessionID) - s.mu.Lock() - delete(s.sessions, sessionID) - s.mu.Unlock() - return map[string]interface{}{"success": err == nil}, err - - case "page.goto": - url, _ := params["url"].(string) - opts := NavigateOptions{ - Timeout: getInt(params, "timeout"), - WaitUntil: getString(params, "waitUntil"), - } - result, err := s.browser.Navigate(ctx, sessionID, url, opts) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "success": result.Success, - "url": result.PageURL, - "title": result.PageTitle, - }, nil - - case "page.click": - selector, _ := params["selector"].(string) - opts := ClickOptions{ - Button: getString(params, "button"), - ClickCount: getInt(params, "clickCount"), - Timeout: getInt(params, "timeout"), - Force: getBool(params, "force"), - } - result, err := s.browser.Click(ctx, sessionID, selector, opts) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "success": result.Success, - "screenshot": result.Screenshot, - }, nil - - case "page.type": - selector, _ := params["selector"].(string) - text, _ := params["text"].(string) - opts := TypeOptions{ - Delay: getInt(params, "delay"), - Timeout: getInt(params, "timeout"), - } - _, err := s.browser.Type(ctx, sessionID, selector, text, opts) - return map[string]interface{}{"success": err == nil}, err - - case "page.fill": - selector, _ := params["selector"].(string) - value, _ := params["value"].(string) - _, err := s.browser.Fill(ctx, sessionID, selector, value) - return map[string]interface{}{"success": err == nil}, err - - case "page.screenshot": - opts := ScreenshotOptions{ - FullPage: getBool(params, "fullPage"), - Selector: getString(params, "selector"), - Format: getString(params, "type"), - Quality: getInt(params, "quality"), - } - result, err := s.browser.Screenshot(ctx, sessionID, opts) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "data": result.Data, - "path": result.Path, - }, nil - - case "page.textContent": - selector, _ := params["selector"].(string) - text, err := s.browser.ExtractText(ctx, sessionID, selector) - return map[string]interface{}{"text": text}, err - - case "page.innerHTML": - selector, _ := params["selector"].(string) - html, err := s.browser.ExtractHTML(ctx, sessionID, selector) - return map[string]interface{}{"html": html}, err - - case "page.waitForSelector": - selector, _ := params["selector"].(string) - opts := WaitOptions{ - Timeout: getInt(params, "timeout"), - State: getString(params, "state"), - } - err := s.browser.WaitForSelector(ctx, sessionID, selector, opts) - return map[string]interface{}{"success": err == nil}, err - - case "page.waitForNavigation": - opts := WaitOptions{ - Timeout: getInt(params, "timeout"), - WaitUntil: getString(params, "waitUntil"), - } - err := s.browser.WaitForNavigation(ctx, sessionID, opts) - return map[string]interface{}{"success": err == nil}, err - - case "page.evaluate": - expression, _ := params["expression"].(string) - result, err := s.browser.Evaluate(ctx, sessionID, expression) - return map[string]interface{}{"result": result}, err - - case "page.selectOption": - selector, _ := params["selector"].(string) - values := getStringArray(params, "values") - _, err := s.browser.Select(ctx, sessionID, selector, values) - return map[string]interface{}{"success": err == nil}, err - - case "page.info": - info, err := s.browser.GetPageInfo(ctx, sessionID) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "url": info.URL, - "title": info.Title, - "content": info.Content, - }, nil - - case "page.pdf": - opts := PDFOptions{ - Format: getString(params, "format"), - Landscape: getBool(params, "landscape"), - PrintBackground: getBool(params, "printBackground"), - } - data, err := s.browser.PDF(ctx, sessionID, opts) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "data": data, - }, nil - - default: - return nil, fmt.Errorf("unknown method: %s", method) - } -} - -func (s *BrowserServer) handleNewSession(c *fiber.Ctx) error { - var req struct { - Headless bool `json:"headless"` - Viewport *Viewport `json:"viewport,omitempty"` - UserAgent string `json:"userAgent,omitempty"` - ProxyURL string `json:"proxyUrl,omitempty"` - } - - if err := c.BodyParser(&req); err != nil { - req.Headless = true - } - - s.mu.RLock() - if len(s.sessions) >= s.config.MaxSessions { - s.mu.RUnlock() - return c.Status(http.StatusTooManyRequests).JSON(fiber.Map{ - "error": "Maximum sessions limit reached", - }) - } - s.mu.RUnlock() - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - session, err := s.browser.NewSession(ctx, SessionOptions{ - Headless: req.Headless, - Viewport: req.Viewport, - UserAgent: req.UserAgent, - ProxyURL: req.ProxyURL, - }) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - s.mu.Lock() - s.sessions[session.ID] = &ManagedSession{ - BrowserSession: session, - LastActive: time.Now(), - Actions: make([]ActionLog, 0), - } - s.mu.Unlock() - - return c.JSON(fiber.Map{ - "sessionId": session.ID, - "contextId": session.ContextID, - "pageId": session.PageID, - }) -} - -func (s *BrowserServer) handleCloseSession(c *fiber.Ctx) error { - sessionID := c.Params("id") - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - err := s.browser.CloseSession(ctx, sessionID) - if err != nil { - return c.Status(404).JSON(fiber.Map{"error": err.Error()}) - } - - s.mu.Lock() - delete(s.sessions, sessionID) - s.mu.Unlock() - - return c.JSON(fiber.Map{"success": true}) -} - -func (s *BrowserServer) handleGetSession(c *fiber.Ctx) error { - sessionID := c.Params("id") - - s.mu.RLock() - session, ok := s.sessions[sessionID] - s.mu.RUnlock() - - if !ok { - return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) - } - - return c.JSON(fiber.Map{ - "sessionId": session.ID, - "createdAt": session.CreatedAt, - "lastActive": session.LastActive, - "screenshots": session.Screenshots, - "actions": len(session.Actions), - }) -} - -func (s *BrowserServer) handleListSessions(c *fiber.Ctx) error { - s.mu.RLock() - defer s.mu.RUnlock() - - sessions := make([]map[string]interface{}, 0, len(s.sessions)) - for _, session := range s.sessions { - sessions = append(sessions, map[string]interface{}{ - "sessionId": session.ID, - "createdAt": session.CreatedAt, - "lastActive": session.LastActive, - "actions": len(session.Actions), - }) - } - - return c.JSON(fiber.Map{"sessions": sessions, "count": len(sessions)}) -} - -func (s *BrowserServer) handleAction(c *fiber.Ctx) error { - var req struct { - SessionID string `json:"sessionId"` - Action string `json:"action"` - Selector string `json:"selector,omitempty"` - URL string `json:"url,omitempty"` - Value string `json:"value,omitempty"` - Screenshot bool `json:"screenshot"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - s.mu.Lock() - if session, ok := s.sessions[req.SessionID]; ok { - session.LastActive = time.Now() - } - s.mu.Unlock() - - var result *ActionResponse - var err error - - switch req.Action { - case "navigate": - result, err = s.browser.Navigate(ctx, req.SessionID, req.URL, NavigateOptions{Screenshot: req.Screenshot}) - case "click": - result, err = s.browser.Click(ctx, req.SessionID, req.Selector, ClickOptions{Screenshot: req.Screenshot}) - case "type": - result, err = s.browser.Type(ctx, req.SessionID, req.Selector, req.Value, TypeOptions{}) - case "fill": - result, err = s.browser.Fill(ctx, req.SessionID, req.Selector, req.Value) - case "screenshot": - var screenshot *ScreenshotResult - screenshot, err = s.browser.Screenshot(ctx, req.SessionID, ScreenshotOptions{}) - if err == nil { - result = &ActionResponse{Success: true, Screenshot: screenshot.Data} - } - case "extract": - var text string - text, err = s.browser.ExtractText(ctx, req.SessionID, req.Selector) - result = &ActionResponse{Success: err == nil, Data: text} - default: - return c.Status(400).JSON(fiber.Map{"error": "Unknown action: " + req.Action}) - } - - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error(), "success": false}) - } - - return c.JSON(result) -} - -func (s *BrowserServer) cleanupLoop(ctx context.Context) { - ticker := time.NewTicker(s.config.CleanupInterval) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - return - case <-ticker.C: - s.cleanupExpiredSessions() - } - } -} - -func (s *BrowserServer) cleanupExpiredSessions() { - s.mu.Lock() - defer s.mu.Unlock() - - now := time.Now() - for sessionID, session := range s.sessions { - if now.Sub(session.LastActive) > s.config.SessionTimeout { - log.Printf("[BrowserServer] Cleaning up expired session: %s", sessionID) - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) - s.browser.CloseSession(ctx, sessionID) - cancel() - delete(s.sessions, sessionID) - } - } -} - -func errToString(err error) string { - if err == nil { - return "" - } - return err.Error() -} - -func getBool(m map[string]interface{}, key string) bool { - if v, ok := m[key].(bool); ok { - return v - } - return false -} - -func getInt(m map[string]interface{}, key string) int { - if v, ok := m[key].(float64); ok { - return int(v) - } - if v, ok := m[key].(int); ok { - return v - } - return 0 -} - -func getStringArray(m map[string]interface{}, key string) []string { - if v, ok := m[key].([]interface{}); ok { - result := make([]string, len(v)) - for i, item := range v { - result[i], _ = item.(string) - } - return result - } - return nil -} diff --git a/backend/internal/computer/computer.go b/backend/internal/computer/computer.go deleted file mode 100644 index 1df73bd..0000000 --- a/backend/internal/computer/computer.go +++ /dev/null @@ -1,763 +0,0 @@ -package computer - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "sync" - "time" - - "github.com/gooseek/backend/internal/computer/connectors" - "github.com/gooseek/backend/internal/llm" - "github.com/google/uuid" -) - -type ComputerConfig struct { - MaxParallelTasks int - MaxSubTasks int - TaskTimeout time.Duration - SubTaskTimeout time.Duration - TotalBudget float64 - EnableSandbox bool - EnableScheduling bool - EnableBrowser bool - SandboxImage string - ArtifactStorageURL string - BrowserServerURL string - CheckpointStorePath string - MaxConcurrentTasks int - HeartbeatInterval time.Duration - CheckpointInterval time.Duration -} - -func DefaultConfig() ComputerConfig { - return ComputerConfig{ - MaxParallelTasks: 10, - MaxSubTasks: 100, - TaskTimeout: 365 * 24 * time.Hour, - SubTaskTimeout: 2 * time.Hour, - TotalBudget: 100.0, - EnableSandbox: true, - EnableScheduling: true, - EnableBrowser: true, - SandboxImage: "gooseek/sandbox:latest", - BrowserServerURL: "http://browser-svc:3050", - CheckpointStorePath: "/data/checkpoints", - MaxConcurrentTasks: 50, - HeartbeatInterval: 30 * time.Second, - CheckpointInterval: 15 * time.Minute, - } -} - -func GetDurationConfig(mode DurationMode) (maxDuration, checkpointFreq, heartbeatFreq time.Duration, maxIter int) { - cfg, ok := DurationModeConfigs[mode] - if !ok { - cfg = DurationModeConfigs[DurationMedium] - } - return cfg.MaxDuration, cfg.CheckpointFreq, cfg.HeartbeatFreq, cfg.MaxIterations -} - -type Dependencies struct { - Registry *llm.ModelRegistry - TaskRepo TaskRepository - MemoryRepo MemoryRepository - ArtifactRepo ArtifactRepository -} - -type TaskRepository interface { - Create(ctx context.Context, task *ComputerTask) error - Update(ctx context.Context, task *ComputerTask) error - GetByID(ctx context.Context, id string) (*ComputerTask, error) - GetByUserID(ctx context.Context, userID string, limit, offset int) ([]ComputerTask, error) - GetScheduled(ctx context.Context) ([]ComputerTask, error) - Delete(ctx context.Context, id string) error -} - -type MemoryRepository interface { - Store(ctx context.Context, entry *MemoryEntry) error - GetByUser(ctx context.Context, userID string, limit int) ([]MemoryEntry, error) - GetByTask(ctx context.Context, taskID string) ([]MemoryEntry, error) - Search(ctx context.Context, userID, query string, limit int) ([]MemoryEntry, error) - Delete(ctx context.Context, id string) error -} - -type ArtifactRepository interface { - Create(ctx context.Context, artifact *Artifact) error - GetByID(ctx context.Context, id string) (*Artifact, error) - GetByTaskID(ctx context.Context, taskID string) ([]Artifact, error) - Delete(ctx context.Context, id string) error -} - -type Computer struct { - cfg ComputerConfig - planner *Planner - router *Router - executor *Executor - sandbox *SandboxManager - memory *MemoryStore - scheduler *Scheduler - connectors *connectors.ConnectorHub - registry *llm.ModelRegistry - taskRepo TaskRepository - eventBus *EventBus - mu sync.RWMutex - tasks map[string]*ComputerTask -} - -func NewComputer(cfg ComputerConfig, deps Dependencies) *Computer { - eventBus := NewEventBus() - - c := &Computer{ - cfg: cfg, - registry: deps.Registry, - taskRepo: deps.TaskRepo, - eventBus: eventBus, - tasks: make(map[string]*ComputerTask), - } - - c.planner = NewPlanner(deps.Registry) - c.router = NewRouter(deps.Registry) - c.executor = NewExecutor(c.router, cfg.MaxParallelTasks) - c.memory = NewMemoryStore(deps.MemoryRepo) - c.connectors = connectors.NewConnectorHub() - - if cfg.EnableSandbox { - c.sandbox = NewSandboxManager(SandboxConfig{ - Image: cfg.SandboxImage, - Timeout: cfg.SubTaskTimeout, - }) - c.executor.SetSandbox(c.sandbox) - } - - if cfg.EnableScheduling { - c.scheduler = NewScheduler(deps.TaskRepo, c) - } - - return c -} - -func (c *Computer) Execute(ctx context.Context, userID, query string, opts ExecuteOptions) (*ComputerTask, error) { - if opts.ResumeFromID != "" { - return c.resumeFromCheckpoint(ctx, opts.ResumeFromID, opts) - } - - durationMode := opts.DurationMode - if durationMode == "" { - durationMode = DurationMedium - } - - maxDuration, _, _, maxIter := GetDurationConfig(durationMode) - - task := &ComputerTask{ - ID: uuid.New().String(), - UserID: userID, - Query: query, - Status: StatusPending, - Memory: make(map[string]interface{}), - CreatedAt: time.Now(), - UpdatedAt: time.Now(), - DurationMode: durationMode, - MaxDuration: maxDuration, - MaxIterations: maxIter, - Priority: opts.Priority, - } - - if opts.Priority == "" { - task.Priority = PriorityNormal - } - - if opts.ResourceLimits != nil { - task.ResourceLimits = opts.ResourceLimits - } - - if opts.Schedule != nil { - task.Schedule = opts.Schedule - task.Status = StatusScheduled - } - - if opts.Context != nil { - task.Memory = opts.Context - } - - estimatedEnd := time.Now().Add(maxDuration) - task.EstimatedEnd = &estimatedEnd - - if err := c.taskRepo.Create(ctx, task); err != nil { - return nil, fmt.Errorf("failed to create task: %w", err) - } - - c.mu.Lock() - c.tasks[task.ID] = task - c.mu.Unlock() - - c.emitEvent(TaskEvent{ - Type: EventTaskCreated, - TaskID: task.ID, - Status: task.Status, - Message: fmt.Sprintf("Task created (mode: %s, max duration: %v)", durationMode, maxDuration), - Timestamp: time.Now(), - Data: map[string]interface{}{ - "durationMode": durationMode, - "maxDuration": maxDuration.String(), - "maxIterations": maxIter, - }, - }) - - if opts.Async { - go c.executeTaskWithCheckpoints(context.Background(), task, opts) - return task, nil - } - - return c.executeTaskWithCheckpoints(ctx, task, opts) -} - -func (c *Computer) resumeFromCheckpoint(ctx context.Context, checkpointID string, opts ExecuteOptions) (*ComputerTask, error) { - task, err := c.taskRepo.GetByID(ctx, checkpointID) - if err != nil { - return nil, fmt.Errorf("task not found: %w", err) - } - - if task.Checkpoint == nil { - return nil, errors.New("no checkpoint found for this task") - } - - task.Status = StatusExecuting - now := time.Now() - task.ResumedAt = &now - task.UpdatedAt = now - - c.emitEvent(TaskEvent{ - Type: EventResumed, - TaskID: task.ID, - Status: task.Status, - Message: fmt.Sprintf("Resumed from checkpoint (wave: %d, subtask: %d)", task.Checkpoint.WaveIndex, task.Checkpoint.SubTaskIndex), - Progress: task.Checkpoint.Progress, - Timestamp: time.Now(), - }) - - c.mu.Lock() - c.tasks[task.ID] = task - c.mu.Unlock() - - if opts.Async { - go c.executeTaskWithCheckpoints(context.Background(), task, opts) - return task, nil - } - - return c.executeTaskWithCheckpoints(ctx, task, opts) -} - -func (c *Computer) executeTask(ctx context.Context, task *ComputerTask, opts ExecuteOptions) (*ComputerTask, error) { - return c.executeTaskWithCheckpoints(ctx, task, opts) -} - -func (c *Computer) executeTaskWithCheckpoints(ctx context.Context, task *ComputerTask, opts ExecuteOptions) (*ComputerTask, error) { - maxDuration, checkpointFreq, heartbeatFreq, _ := GetDurationConfig(task.DurationMode) - - if opts.Timeout > 0 { - maxDuration = time.Duration(opts.Timeout) * time.Second - } - - ctx, cancel := context.WithTimeout(ctx, maxDuration) - defer cancel() - - budget := c.cfg.TotalBudget - if opts.MaxCost > 0 { - budget = opts.MaxCost - } - if task.ResourceLimits != nil && task.ResourceLimits.MaxTotalCost > 0 { - budget = task.ResourceLimits.MaxTotalCost - } - - startWave := 0 - if task.Checkpoint != nil { - startWave = task.Checkpoint.WaveIndex - for k, v := range task.Checkpoint.Memory { - task.Memory[k] = v - } - } - - if task.Plan == nil { - task.Status = StatusPlanning - task.UpdatedAt = time.Now() - c.updateTask(ctx, task) - - c.emitEvent(TaskEvent{ - Type: EventTaskStarted, - TaskID: task.ID, - Status: StatusPlanning, - Message: "Planning task execution", - Timestamp: time.Now(), - }) - - userMemory, _ := c.memory.GetUserContext(ctx, task.UserID) - memoryContext := make(map[string]interface{}) - for k, v := range userMemory { - memoryContext[k] = v - } - for k, v := range task.Memory { - memoryContext[k] = v - } - - plan, err := c.planner.Plan(ctx, task.Query, memoryContext) - if err != nil { - task.Status = StatusFailed - task.Error = fmt.Sprintf("Planning failed: %v", err) - task.UpdatedAt = time.Now() - c.updateTask(ctx, task) - c.emitEvent(TaskEvent{ - Type: EventTaskFailed, - TaskID: task.ID, - Status: StatusFailed, - Message: task.Error, - Timestamp: time.Now(), - }) - return task, err - } - - task.Plan = plan - task.SubTasks = plan.SubTasks - } - - task.Status = StatusLongRunning - task.UpdatedAt = time.Now() - c.updateTask(ctx, task) - - c.emitEvent(TaskEvent{ - Type: EventTaskProgress, - TaskID: task.ID, - Status: StatusLongRunning, - Progress: 10, - Message: fmt.Sprintf("Executing %d subtasks (long-running mode)", len(task.Plan.SubTasks)), - Data: map[string]interface{}{ - "plan": task.Plan, - "durationMode": task.DurationMode, - "checkpointFreq": checkpointFreq.String(), - }, - Timestamp: time.Now(), - }) - - heartbeatTicker := time.NewTicker(heartbeatFreq) - defer heartbeatTicker.Stop() - - checkpointTicker := time.NewTicker(checkpointFreq) - defer checkpointTicker.Stop() - - go func() { - for { - select { - case <-ctx.Done(): - return - case <-heartbeatTicker.C: - now := time.Now() - task.HeartbeatAt = &now - c.emitEvent(TaskEvent{ - Type: EventHeartbeat, - TaskID: task.ID, - Progress: task.Progress, - Message: fmt.Sprintf("Heartbeat: %d%% complete, cost: $%.4f", task.Progress, task.TotalCost), - Data: map[string]interface{}{ - "runtime": time.Since(task.CreatedAt).String(), - "cost": task.TotalCost, - }, - Timestamp: now, - }) - } - } - }() - - totalSubTasks := len(task.Plan.ExecutionOrder) - for waveIdx := startWave; waveIdx < totalSubTasks; waveIdx++ { - select { - case <-ctx.Done(): - c.saveCheckpoint(task, waveIdx, 0, "context_timeout") - return task, ctx.Err() - case <-checkpointTicker.C: - c.saveCheckpoint(task, waveIdx, 0, "periodic") - default: - } - - if budget > 0 && task.TotalCost >= budget { - c.saveCheckpoint(task, waveIdx, 0, "budget_exceeded") - task.Status = StatusPaused - task.Message = fmt.Sprintf("Paused: budget exceeded ($%.2f / $%.2f)", task.TotalCost, budget) - c.updateTask(ctx, task) - return task, nil - } - - wave := task.Plan.ExecutionOrder[waveIdx] - waveTasks := make([]SubTask, 0) - for _, subTaskID := range wave { - for i := range task.SubTasks { - if task.SubTasks[i].ID == subTaskID { - waveTasks = append(waveTasks, task.SubTasks[i]) - break - } - } - } - - results, err := c.executor.ExecuteGroup(ctx, waveTasks, budget-task.TotalCost) - if err != nil { - c.saveCheckpoint(task, waveIdx, 0, "execution_error") - task.Status = StatusFailed - task.Error = fmt.Sprintf("Execution failed at wave %d: %v", waveIdx, err) - task.UpdatedAt = time.Now() - c.updateTask(ctx, task) - return task, err - } - - for _, result := range results { - for i := range task.SubTasks { - if task.SubTasks[i].ID == result.SubTaskID { - task.SubTasks[i].Output = result.Output - task.SubTasks[i].Cost = result.Cost - task.SubTasks[i].Status = StatusCompleted - now := time.Now() - task.SubTasks[i].CompletedAt = &now - if result.Error != nil { - task.SubTasks[i].Status = StatusFailed - task.SubTasks[i].Error = result.Error.Error() - } - break - } - } - - task.TotalCost += result.Cost - task.TotalRuntime = time.Since(task.CreatedAt) - - for _, artifact := range result.Artifacts { - task.Artifacts = append(task.Artifacts, artifact) - c.emitEvent(TaskEvent{ - Type: EventArtifact, - TaskID: task.ID, - SubTaskID: result.SubTaskID, - Data: map[string]interface{}{ - "artifact": artifact, - }, - Timestamp: time.Now(), - }) - } - } - - progress := 10 + int(float64(waveIdx+1)/float64(totalSubTasks)*80) - task.Progress = progress - task.Iterations = waveIdx + 1 - task.UpdatedAt = time.Now() - c.updateTask(ctx, task) - - c.emitEvent(TaskEvent{ - Type: EventIteration, - TaskID: task.ID, - Progress: progress, - Message: fmt.Sprintf("Completed wave %d/%d (runtime: %v)", waveIdx+1, totalSubTasks, time.Since(task.CreatedAt).Round(time.Second)), - Data: map[string]interface{}{ - "wave": waveIdx + 1, - "total": totalSubTasks, - "cost": task.TotalCost, - "runtime": time.Since(task.CreatedAt).String(), - "artifacts": len(task.Artifacts), - }, - Timestamp: time.Now(), - }) - } - - task.Status = StatusCompleted - task.Progress = 100 - now := time.Now() - task.CompletedAt = &now - task.UpdatedAt = now - task.TotalRuntime = time.Since(task.CreatedAt) - c.updateTask(ctx, task) - - c.emitEvent(TaskEvent{ - Type: EventTaskCompleted, - TaskID: task.ID, - Status: StatusCompleted, - Progress: 100, - Message: fmt.Sprintf("Task completed (runtime: %v, cost: $%.4f)", task.TotalRuntime.Round(time.Second), task.TotalCost), - Data: map[string]interface{}{ - "artifacts": task.Artifacts, - "totalCost": task.TotalCost, - "totalRuntime": task.TotalRuntime.String(), - "iterations": task.Iterations, - }, - Timestamp: time.Now(), - }) - - c.storeTaskResults(ctx, task) - - return task, nil -} - -func (c *Computer) saveCheckpoint(task *ComputerTask, waveIdx, subTaskIdx int, reason string) { - checkpoint := Checkpoint{ - ID: uuid.New().String(), - TaskID: task.ID, - WaveIndex: waveIdx, - SubTaskIndex: subTaskIdx, - State: make(map[string]interface{}), - Progress: task.Progress, - Memory: task.Memory, - CreatedAt: time.Now(), - RuntimeSoFar: time.Since(task.CreatedAt), - CostSoFar: task.TotalCost, - Reason: reason, - } - - for _, artifact := range task.Artifacts { - checkpoint.Artifacts = append(checkpoint.Artifacts, artifact.ID) - } - - task.Checkpoint = &checkpoint - task.Checkpoints = append(task.Checkpoints, checkpoint) - task.UpdatedAt = time.Now() - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - c.taskRepo.Update(ctx, task) - - c.emitEvent(TaskEvent{ - Type: EventCheckpointSaved, - TaskID: task.ID, - Progress: task.Progress, - Message: fmt.Sprintf("Checkpoint saved: %s (wave %d)", reason, waveIdx), - Data: map[string]interface{}{ - "checkpointId": checkpoint.ID, - "waveIndex": waveIdx, - "subTaskIndex": subTaskIdx, - "reason": reason, - "runtime": checkpoint.RuntimeSoFar.String(), - "cost": checkpoint.CostSoFar, - }, - Timestamp: time.Now(), - }) -} - -func (c *Computer) Pause(ctx context.Context, taskID string) error { - c.mu.Lock() - task, ok := c.tasks[taskID] - if !ok { - c.mu.Unlock() - var err error - task, err = c.taskRepo.GetByID(ctx, taskID) - if err != nil { - return err - } - c.mu.Lock() - } - - if task.Status != StatusExecuting && task.Status != StatusLongRunning { - c.mu.Unlock() - return errors.New("task is not running") - } - - now := time.Now() - task.Status = StatusPaused - task.PausedAt = &now - task.UpdatedAt = now - c.mu.Unlock() - - c.saveCheckpoint(task, task.Iterations, 0, "user_paused") - - c.emitEvent(TaskEvent{ - Type: EventPaused, - TaskID: taskID, - Status: StatusPaused, - Progress: task.Progress, - Message: "Task paused by user", - Timestamp: now, - }) - - return c.taskRepo.Update(ctx, task) -} - -func (c *Computer) Resume(ctx context.Context, taskID string, userInput string) error { - c.mu.RLock() - task, ok := c.tasks[taskID] - c.mu.RUnlock() - - if !ok { - var err error - task, err = c.taskRepo.GetByID(ctx, taskID) - if err != nil { - return fmt.Errorf("task not found: %w", err) - } - } - - if task.Status != StatusWaiting { - return errors.New("task is not waiting for user input") - } - - task.Memory["user_input"] = userInput - task.Status = StatusExecuting - task.UpdatedAt = time.Now() - - go c.executeTask(context.Background(), task, ExecuteOptions{Async: true}) - - return nil -} - -func (c *Computer) Cancel(ctx context.Context, taskID string) error { - c.mu.Lock() - task, ok := c.tasks[taskID] - if ok { - task.Status = StatusCancelled - task.UpdatedAt = time.Now() - } - c.mu.Unlock() - - if !ok { - task, err := c.taskRepo.GetByID(ctx, taskID) - if err != nil { - return fmt.Errorf("task not found: %w", err) - } - task.Status = StatusCancelled - task.UpdatedAt = time.Now() - return c.taskRepo.Update(ctx, task) - } - - c.emitEvent(TaskEvent{ - Type: EventTaskFailed, - TaskID: taskID, - Status: StatusCancelled, - Message: "Task cancelled by user", - Timestamp: time.Now(), - }) - - return c.taskRepo.Update(ctx, task) -} - -func (c *Computer) GetStatus(ctx context.Context, taskID string) (*ComputerTask, error) { - c.mu.RLock() - task, ok := c.tasks[taskID] - c.mu.RUnlock() - - if ok { - return task, nil - } - - return c.taskRepo.GetByID(ctx, taskID) -} - -func (c *Computer) GetUserTasks(ctx context.Context, userID string, limit, offset int) ([]ComputerTask, error) { - return c.taskRepo.GetByUserID(ctx, userID, limit, offset) -} - -func (c *Computer) Stream(ctx context.Context, taskID string) (<-chan TaskEvent, error) { - task, err := c.taskRepo.GetByID(ctx, taskID) - if err != nil { - return nil, fmt.Errorf("task not found: %w", err) - } - - if task.Status == StatusCompleted || task.Status == StatusFailed || task.Status == StatusCancelled { - ch := make(chan TaskEvent, 1) - go func() { - eventType := EventTaskCompleted - if task.Status == StatusFailed { - eventType = EventTaskFailed - } - ch <- TaskEvent{ - TaskID: taskID, - Type: eventType, - Status: task.Status, - Progress: task.Progress, - Message: task.Message, - Timestamp: time.Now(), - } - close(ch) - }() - return ch, nil - } - - return c.eventBus.Subscribe(taskID), nil -} - -func (c *Computer) updateTask(ctx context.Context, task *ComputerTask) { - c.mu.Lock() - c.tasks[task.ID] = task - c.mu.Unlock() - - _ = c.taskRepo.Update(ctx, task) -} - -func (c *Computer) emitEvent(event TaskEvent) { - c.eventBus.Publish(event.TaskID, event) -} - -func (c *Computer) storeTaskResults(ctx context.Context, task *ComputerTask) { - for _, st := range task.SubTasks { - if st.Output != nil { - outputJSON, _ := json.Marshal(st.Output) - entry := &MemoryEntry{ - ID: uuid.New().String(), - UserID: task.UserID, - TaskID: task.ID, - Key: fmt.Sprintf("subtask_%s_result", st.ID), - Value: string(outputJSON), - Type: MemoryTypeResult, - CreatedAt: time.Now(), - } - _ = c.memory.Store(ctx, task.UserID, entry) - } - } -} - -func (c *Computer) StartScheduler(ctx context.Context) { - if c.scheduler != nil { - c.scheduler.Start(ctx) - } -} - -func (c *Computer) StopScheduler() { - if c.scheduler != nil { - c.scheduler.Stop() - } -} - -type EventBus struct { - subscribers map[string][]chan TaskEvent - mu sync.RWMutex -} - -func NewEventBus() *EventBus { - return &EventBus{ - subscribers: make(map[string][]chan TaskEvent), - } -} - -func (eb *EventBus) Subscribe(taskID string) <-chan TaskEvent { - eb.mu.Lock() - defer eb.mu.Unlock() - - ch := make(chan TaskEvent, 100) - eb.subscribers[taskID] = append(eb.subscribers[taskID], ch) - return ch -} - -func (eb *EventBus) Unsubscribe(taskID string, ch <-chan TaskEvent) { - eb.mu.Lock() - defer eb.mu.Unlock() - - subs := eb.subscribers[taskID] - for i, sub := range subs { - if sub == ch { - eb.subscribers[taskID] = append(subs[:i], subs[i+1:]...) - close(sub) - break - } - } -} - -func (eb *EventBus) Publish(taskID string, event TaskEvent) { - eb.mu.RLock() - subs := eb.subscribers[taskID] - eb.mu.RUnlock() - - for _, ch := range subs { - select { - case ch <- event: - default: - } - } -} diff --git a/backend/internal/computer/connectors/connector.go b/backend/internal/computer/connectors/connector.go deleted file mode 100644 index 4fa2902..0000000 --- a/backend/internal/computer/connectors/connector.go +++ /dev/null @@ -1,104 +0,0 @@ -package connectors - -import ( - "context" - "errors" - "sync" -) - -type Connector interface { - ID() string - Name() string - Description() string - Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) - GetActions() []Action - Validate(params map[string]interface{}) error -} - -type Action struct { - Name string `json:"name"` - Description string `json:"description"` - Schema map[string]interface{} `json:"schema"` - Required []string `json:"required"` -} - -type ConnectorHub struct { - connectors map[string]Connector - mu sync.RWMutex -} - -func NewConnectorHub() *ConnectorHub { - return &ConnectorHub{ - connectors: make(map[string]Connector), - } -} - -func (h *ConnectorHub) Register(connector Connector) { - h.mu.Lock() - defer h.mu.Unlock() - h.connectors[connector.ID()] = connector -} - -func (h *ConnectorHub) Unregister(id string) { - h.mu.Lock() - defer h.mu.Unlock() - delete(h.connectors, id) -} - -func (h *ConnectorHub) Get(id string) (Connector, error) { - h.mu.RLock() - defer h.mu.RUnlock() - - connector, ok := h.connectors[id] - if !ok { - return nil, errors.New("connector not found: " + id) - } - return connector, nil -} - -func (h *ConnectorHub) List() []Connector { - h.mu.RLock() - defer h.mu.RUnlock() - - result := make([]Connector, 0, len(h.connectors)) - for _, c := range h.connectors { - result = append(result, c) - } - return result -} - -func (h *ConnectorHub) Execute(ctx context.Context, connectorID, action string, params map[string]interface{}) (interface{}, error) { - connector, err := h.Get(connectorID) - if err != nil { - return nil, err - } - - if err := connector.Validate(params); err != nil { - return nil, err - } - - return connector.Execute(ctx, action, params) -} - -type ConnectorInfo struct { - ID string `json:"id"` - Name string `json:"name"` - Description string `json:"description"` - Actions []Action `json:"actions"` -} - -func (h *ConnectorHub) GetInfo() []ConnectorInfo { - h.mu.RLock() - defer h.mu.RUnlock() - - result := make([]ConnectorInfo, 0, len(h.connectors)) - for _, c := range h.connectors { - result = append(result, ConnectorInfo{ - ID: c.ID(), - Name: c.Name(), - Description: c.Description(), - Actions: c.GetActions(), - }) - } - return result -} diff --git a/backend/internal/computer/connectors/email.go b/backend/internal/computer/connectors/email.go deleted file mode 100644 index 7700fbb..0000000 --- a/backend/internal/computer/connectors/email.go +++ /dev/null @@ -1,215 +0,0 @@ -package connectors - -import ( - "context" - "crypto/tls" - "errors" - "fmt" - "net/smtp" - "strings" -) - -type EmailConfig struct { - SMTPHost string - SMTPPort int - Username string - Password string - FromAddress string - FromName string - UseTLS bool - AllowHTML bool -} - -type EmailConnector struct { - cfg EmailConfig -} - -func NewEmailConnector(cfg EmailConfig) *EmailConnector { - return &EmailConnector{cfg: cfg} -} - -func (e *EmailConnector) ID() string { - return "email" -} - -func (e *EmailConnector) Name() string { - return "Email" -} - -func (e *EmailConnector) Description() string { - return "Send emails via SMTP" -} - -func (e *EmailConnector) GetActions() []Action { - return []Action{ - { - Name: "send", - Description: "Send an email", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "to": map[string]interface{}{"type": "string", "description": "Recipient email address"}, - "subject": map[string]interface{}{"type": "string", "description": "Email subject"}, - "body": map[string]interface{}{"type": "string", "description": "Email body"}, - "html": map[string]interface{}{"type": "boolean", "description": "Whether body is HTML"}, - "cc": map[string]interface{}{"type": "string", "description": "CC recipients (comma-separated)"}, - "bcc": map[string]interface{}{"type": "string", "description": "BCC recipients (comma-separated)"}, - }, - }, - Required: []string{"to", "subject", "body"}, - }, - } -} - -func (e *EmailConnector) Validate(params map[string]interface{}) error { - if _, ok := params["to"]; !ok { - return errors.New("'to' is required") - } - if _, ok := params["subject"]; !ok { - return errors.New("'subject' is required") - } - if _, ok := params["body"]; !ok { - return errors.New("'body' is required") - } - return nil -} - -func (e *EmailConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { - switch action { - case "send": - return e.send(ctx, params) - default: - return nil, errors.New("unknown action: " + action) - } -} - -func (e *EmailConnector) send(ctx context.Context, params map[string]interface{}) (interface{}, error) { - to := params["to"].(string) - subject := params["subject"].(string) - body := params["body"].(string) - - isHTML := false - if html, ok := params["html"].(bool); ok { - isHTML = html && e.cfg.AllowHTML - } - - var cc, bcc []string - if ccStr, ok := params["cc"].(string); ok && ccStr != "" { - cc = strings.Split(ccStr, ",") - for i := range cc { - cc[i] = strings.TrimSpace(cc[i]) - } - } - if bccStr, ok := params["bcc"].(string); ok && bccStr != "" { - bcc = strings.Split(bccStr, ",") - for i := range bcc { - bcc[i] = strings.TrimSpace(bcc[i]) - } - } - - from := e.cfg.FromAddress - if e.cfg.FromName != "" { - from = fmt.Sprintf("%s <%s>", e.cfg.FromName, e.cfg.FromAddress) - } - - var msg strings.Builder - msg.WriteString(fmt.Sprintf("From: %s\r\n", from)) - msg.WriteString(fmt.Sprintf("To: %s\r\n", to)) - if len(cc) > 0 { - msg.WriteString(fmt.Sprintf("Cc: %s\r\n", strings.Join(cc, ", "))) - } - msg.WriteString(fmt.Sprintf("Subject: %s\r\n", subject)) - msg.WriteString("MIME-Version: 1.0\r\n") - - if isHTML { - msg.WriteString("Content-Type: text/html; charset=\"UTF-8\"\r\n") - } else { - msg.WriteString("Content-Type: text/plain; charset=\"UTF-8\"\r\n") - } - - msg.WriteString("\r\n") - msg.WriteString(body) - - recipients := []string{to} - recipients = append(recipients, cc...) - recipients = append(recipients, bcc...) - - addr := fmt.Sprintf("%s:%d", e.cfg.SMTPHost, e.cfg.SMTPPort) - - var auth smtp.Auth - if e.cfg.Username != "" && e.cfg.Password != "" { - auth = smtp.PlainAuth("", e.cfg.Username, e.cfg.Password, e.cfg.SMTPHost) - } - - var err error - if e.cfg.UseTLS { - err = e.sendWithTLS(addr, auth, e.cfg.FromAddress, recipients, []byte(msg.String())) - } else { - err = smtp.SendMail(addr, auth, e.cfg.FromAddress, recipients, []byte(msg.String())) - } - - if err != nil { - return map[string]interface{}{ - "success": false, - "error": err.Error(), - }, err - } - - return map[string]interface{}{ - "success": true, - "to": to, - "subject": subject, - "recipients": len(recipients), - }, nil -} - -func (e *EmailConnector) sendWithTLS(addr string, auth smtp.Auth, from string, to []string, msg []byte) error { - tlsConfig := &tls.Config{ - ServerName: e.cfg.SMTPHost, - } - - conn, err := tls.Dial("tcp", addr, tlsConfig) - if err != nil { - return err - } - defer conn.Close() - - client, err := smtp.NewClient(conn, e.cfg.SMTPHost) - if err != nil { - return err - } - defer client.Close() - - if auth != nil { - if err := client.Auth(auth); err != nil { - return err - } - } - - if err := client.Mail(from); err != nil { - return err - } - - for _, recipient := range to { - if err := client.Rcpt(recipient); err != nil { - return err - } - } - - w, err := client.Data() - if err != nil { - return err - } - - _, err = w.Write(msg) - if err != nil { - return err - } - - err = w.Close() - if err != nil { - return err - } - - return client.Quit() -} diff --git a/backend/internal/computer/connectors/storage.go b/backend/internal/computer/connectors/storage.go deleted file mode 100644 index 1872f13..0000000 --- a/backend/internal/computer/connectors/storage.go +++ /dev/null @@ -1,432 +0,0 @@ -package connectors - -import ( - "bytes" - "context" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "strings" - "time" - - "github.com/minio/minio-go/v7" - "github.com/minio/minio-go/v7/pkg/credentials" -) - -type StorageConfig struct { - Endpoint string - AccessKeyID string - SecretAccessKey string - BucketName string - UseSSL bool - Region string - PublicURL string -} - -type StorageConnector struct { - cfg StorageConfig - client *minio.Client -} - -func NewStorageConnector(cfg StorageConfig) (*StorageConnector, error) { - client, err := minio.New(cfg.Endpoint, &minio.Options{ - Creds: credentials.NewStaticV4(cfg.AccessKeyID, cfg.SecretAccessKey, ""), - Secure: cfg.UseSSL, - Region: cfg.Region, - }) - if err != nil { - return nil, fmt.Errorf("failed to create storage client: %w", err) - } - - return &StorageConnector{ - cfg: cfg, - client: client, - }, nil -} - -func (s *StorageConnector) ID() string { - return "storage" -} - -func (s *StorageConnector) Name() string { - return "Storage" -} - -func (s *StorageConnector) Description() string { - return "Store and retrieve files from S3-compatible storage" -} - -func (s *StorageConnector) GetActions() []Action { - return []Action{ - { - Name: "upload", - Description: "Upload a file", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, - "content": map[string]interface{}{"type": "string", "description": "File content (base64 or text)"}, - "content_type": map[string]interface{}{"type": "string", "description": "MIME type"}, - "public": map[string]interface{}{"type": "boolean", "description": "Make file publicly accessible"}, - }, - }, - Required: []string{"path", "content"}, - }, - { - Name: "download", - Description: "Download a file", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, - }, - }, - Required: []string{"path"}, - }, - { - Name: "delete", - Description: "Delete a file", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, - }, - }, - Required: []string{"path"}, - }, - { - Name: "list", - Description: "List files in a directory", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "prefix": map[string]interface{}{"type": "string", "description": "Path prefix"}, - "limit": map[string]interface{}{"type": "integer", "description": "Max results"}, - }, - }, - }, - { - Name: "get_url", - Description: "Get a presigned URL for a file", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "path": map[string]interface{}{"type": "string", "description": "Storage path/key"}, - "expires": map[string]interface{}{"type": "integer", "description": "URL expiry in seconds"}, - }, - }, - Required: []string{"path"}, - }, - } -} - -func (s *StorageConnector) Validate(params map[string]interface{}) error { - return nil -} - -func (s *StorageConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { - switch action { - case "upload": - return s.upload(ctx, params) - case "download": - return s.download(ctx, params) - case "delete": - return s.deleteFile(ctx, params) - case "list": - return s.list(ctx, params) - case "get_url": - return s.getURL(ctx, params) - default: - return nil, errors.New("unknown action: " + action) - } -} - -func (s *StorageConnector) upload(ctx context.Context, params map[string]interface{}) (interface{}, error) { - path := params["path"].(string) - content := params["content"].(string) - - contentType := "application/octet-stream" - if ct, ok := params["content_type"].(string); ok { - contentType = ct - } - - if contentType == "" { - contentType = s.detectContentType(path) - } - - reader := bytes.NewReader([]byte(content)) - size := int64(len(content)) - - info, err := s.client.PutObject(ctx, s.cfg.BucketName, path, reader, size, minio.PutObjectOptions{ - ContentType: contentType, - }) - if err != nil { - return nil, fmt.Errorf("upload failed: %w", err) - } - - url := "" - if s.cfg.PublicURL != "" { - url = fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path) - } - - return map[string]interface{}{ - "success": true, - "path": path, - "size": info.Size, - "etag": info.ETag, - "url": url, - }, nil -} - -func (s *StorageConnector) UploadBytes(ctx context.Context, path string, content []byte, contentType string) (string, error) { - if contentType == "" { - contentType = s.detectContentType(path) - } - - reader := bytes.NewReader(content) - size := int64(len(content)) - - _, err := s.client.PutObject(ctx, s.cfg.BucketName, path, reader, size, minio.PutObjectOptions{ - ContentType: contentType, - }) - if err != nil { - return "", err - } - - if s.cfg.PublicURL != "" { - return fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path), nil - } - - return path, nil -} - -func (s *StorageConnector) download(ctx context.Context, params map[string]interface{}) (interface{}, error) { - path := params["path"].(string) - - obj, err := s.client.GetObject(ctx, s.cfg.BucketName, path, minio.GetObjectOptions{}) - if err != nil { - return nil, fmt.Errorf("download failed: %w", err) - } - defer obj.Close() - - content, err := io.ReadAll(obj) - if err != nil { - return nil, fmt.Errorf("read failed: %w", err) - } - - stat, _ := obj.Stat() - - return map[string]interface{}{ - "success": true, - "path": path, - "content": string(content), - "size": len(content), - "content_type": stat.ContentType, - "modified": stat.LastModified, - }, nil -} - -func (s *StorageConnector) DownloadBytes(ctx context.Context, path string) ([]byte, error) { - obj, err := s.client.GetObject(ctx, s.cfg.BucketName, path, minio.GetObjectOptions{}) - if err != nil { - return nil, err - } - defer obj.Close() - - return io.ReadAll(obj) -} - -func (s *StorageConnector) deleteFile(ctx context.Context, params map[string]interface{}) (interface{}, error) { - path := params["path"].(string) - - err := s.client.RemoveObject(ctx, s.cfg.BucketName, path, minio.RemoveObjectOptions{}) - if err != nil { - return nil, fmt.Errorf("delete failed: %w", err) - } - - return map[string]interface{}{ - "success": true, - "path": path, - }, nil -} - -func (s *StorageConnector) list(ctx context.Context, params map[string]interface{}) (interface{}, error) { - prefix := "" - if p, ok := params["prefix"].(string); ok { - prefix = p - } - - limit := 100 - if l, ok := params["limit"].(float64); ok { - limit = int(l) - } - - objects := s.client.ListObjects(ctx, s.cfg.BucketName, minio.ListObjectsOptions{ - Prefix: prefix, - Recursive: true, - }) - - var files []map[string]interface{} - count := 0 - - for obj := range objects { - if obj.Err != nil { - continue - } - - files = append(files, map[string]interface{}{ - "path": obj.Key, - "size": obj.Size, - "modified": obj.LastModified, - "etag": obj.ETag, - }) - - count++ - if count >= limit { - break - } - } - - return map[string]interface{}{ - "success": true, - "files": files, - "count": len(files), - }, nil -} - -func (s *StorageConnector) getURL(ctx context.Context, params map[string]interface{}) (interface{}, error) { - path := params["path"].(string) - - expires := 3600 - if e, ok := params["expires"].(float64); ok { - expires = int(e) - } - - url, err := s.client.PresignedGetObject(ctx, s.cfg.BucketName, path, time.Duration(expires)*time.Second, nil) - if err != nil { - return nil, fmt.Errorf("failed to generate URL: %w", err) - } - - return map[string]interface{}{ - "success": true, - "url": url.String(), - "expires": expires, - }, nil -} - -func (s *StorageConnector) GetPublicURL(path string) string { - if s.cfg.PublicURL != "" { - return fmt.Sprintf("%s/%s/%s", strings.TrimSuffix(s.cfg.PublicURL, "/"), s.cfg.BucketName, path) - } - return "" -} - -func (s *StorageConnector) detectContentType(path string) string { - ext := strings.ToLower(filepath.Ext(path)) - - contentTypes := map[string]string{ - ".html": "text/html", - ".css": "text/css", - ".js": "application/javascript", - ".json": "application/json", - ".xml": "application/xml", - ".pdf": "application/pdf", - ".zip": "application/zip", - ".png": "image/png", - ".jpg": "image/jpeg", - ".jpeg": "image/jpeg", - ".gif": "image/gif", - ".svg": "image/svg+xml", - ".mp4": "video/mp4", - ".mp3": "audio/mpeg", - ".txt": "text/plain", - ".md": "text/markdown", - ".csv": "text/csv", - ".py": "text/x-python", - ".go": "text/x-go", - ".rs": "text/x-rust", - } - - if ct, ok := contentTypes[ext]; ok { - return ct - } - - return "application/octet-stream" -} - -func (s *StorageConnector) EnsureBucket(ctx context.Context) error { - exists, err := s.client.BucketExists(ctx, s.cfg.BucketName) - if err != nil { - return err - } - - if !exists { - return s.client.MakeBucket(ctx, s.cfg.BucketName, minio.MakeBucketOptions{ - Region: s.cfg.Region, - }) - } - - return nil -} - -func NewLocalStorageConnector(basePath string) *LocalStorageConnector { - return &LocalStorageConnector{basePath: basePath} -} - -type LocalStorageConnector struct { - basePath string -} - -func (l *LocalStorageConnector) ID() string { - return "local_storage" -} - -func (l *LocalStorageConnector) Name() string { - return "Local Storage" -} - -func (l *LocalStorageConnector) Description() string { - return "Store files on local filesystem" -} - -func (l *LocalStorageConnector) GetActions() []Action { - return []Action{ - {Name: "upload", Description: "Upload a file"}, - {Name: "download", Description: "Download a file"}, - {Name: "delete", Description: "Delete a file"}, - {Name: "list", Description: "List files"}, - } -} - -func (l *LocalStorageConnector) Validate(params map[string]interface{}) error { - return nil -} - -func (l *LocalStorageConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { - switch action { - case "upload": - path := params["path"].(string) - content := params["content"].(string) - fullPath := filepath.Join(l.basePath, path) - os.MkdirAll(filepath.Dir(fullPath), 0755) - err := os.WriteFile(fullPath, []byte(content), 0644) - return map[string]interface{}{"success": err == nil, "path": path}, err - - case "download": - path := params["path"].(string) - fullPath := filepath.Join(l.basePath, path) - content, err := os.ReadFile(fullPath) - return map[string]interface{}{"success": err == nil, "content": string(content)}, err - - case "delete": - path := params["path"].(string) - fullPath := filepath.Join(l.basePath, path) - err := os.Remove(fullPath) - return map[string]interface{}{"success": err == nil}, err - - default: - return nil, errors.New("unknown action") - } -} diff --git a/backend/internal/computer/connectors/telegram.go b/backend/internal/computer/connectors/telegram.go deleted file mode 100644 index a6cfc9e..0000000 --- a/backend/internal/computer/connectors/telegram.go +++ /dev/null @@ -1,263 +0,0 @@ -package connectors - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "mime/multipart" - "net/http" - "strconv" - "time" -) - -type TelegramConfig struct { - BotToken string - Timeout time.Duration -} - -type TelegramConnector struct { - cfg TelegramConfig - client *http.Client -} - -func NewTelegramConnector(cfg TelegramConfig) *TelegramConnector { - timeout := cfg.Timeout - if timeout == 0 { - timeout = 30 * time.Second - } - - return &TelegramConnector{ - cfg: cfg, - client: &http.Client{ - Timeout: timeout, - }, - } -} - -func (t *TelegramConnector) ID() string { - return "telegram" -} - -func (t *TelegramConnector) Name() string { - return "Telegram" -} - -func (t *TelegramConnector) Description() string { - return "Send messages via Telegram Bot API" -} - -func (t *TelegramConnector) GetActions() []Action { - return []Action{ - { - Name: "send_message", - Description: "Send a text message", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID or @username"}, - "text": map[string]interface{}{"type": "string", "description": "Message text"}, - "parse_mode": map[string]interface{}{"type": "string", "enum": []string{"HTML", "Markdown", "MarkdownV2"}}, - }, - }, - Required: []string{"chat_id", "text"}, - }, - { - Name: "send_document", - Description: "Send a document/file", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID"}, - "document": map[string]interface{}{"type": "string", "description": "File path or URL"}, - "caption": map[string]interface{}{"type": "string", "description": "Document caption"}, - }, - }, - Required: []string{"chat_id", "document"}, - }, - { - Name: "send_photo", - Description: "Send a photo", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "chat_id": map[string]interface{}{"type": "string", "description": "Chat ID"}, - "photo": map[string]interface{}{"type": "string", "description": "Photo URL or file_id"}, - "caption": map[string]interface{}{"type": "string", "description": "Photo caption"}, - }, - }, - Required: []string{"chat_id", "photo"}, - }, - } -} - -func (t *TelegramConnector) Validate(params map[string]interface{}) error { - if _, ok := params["chat_id"]; !ok { - return errors.New("'chat_id' is required") - } - return nil -} - -func (t *TelegramConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { - switch action { - case "send_message": - return t.sendMessage(ctx, params) - case "send_document": - return t.sendDocument(ctx, params) - case "send_photo": - return t.sendPhoto(ctx, params) - default: - return nil, errors.New("unknown action: " + action) - } -} - -func (t *TelegramConnector) sendMessage(ctx context.Context, params map[string]interface{}) (interface{}, error) { - chatID := params["chat_id"].(string) - text := params["text"].(string) - - payload := map[string]interface{}{ - "chat_id": chatID, - "text": text, - } - - if parseMode, ok := params["parse_mode"].(string); ok { - payload["parse_mode"] = parseMode - } - - return t.apiCall(ctx, "sendMessage", payload) -} - -func (t *TelegramConnector) sendDocument(ctx context.Context, params map[string]interface{}) (interface{}, error) { - chatID := params["chat_id"].(string) - document := params["document"].(string) - - payload := map[string]interface{}{ - "chat_id": chatID, - "document": document, - } - - if caption, ok := params["caption"].(string); ok { - payload["caption"] = caption - } - - return t.apiCall(ctx, "sendDocument", payload) -} - -func (t *TelegramConnector) sendPhoto(ctx context.Context, params map[string]interface{}) (interface{}, error) { - chatID := params["chat_id"].(string) - photo := params["photo"].(string) - - payload := map[string]interface{}{ - "chat_id": chatID, - "photo": photo, - } - - if caption, ok := params["caption"].(string); ok { - payload["caption"] = caption - } - - return t.apiCall(ctx, "sendPhoto", payload) -} - -func (t *TelegramConnector) apiCall(ctx context.Context, method string, payload map[string]interface{}) (interface{}, error) { - url := fmt.Sprintf("https://api.telegram.org/bot%s/%s", t.cfg.BotToken, method) - - body, err := json.Marshal(payload) - if err != nil { - return nil, err - } - - req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body)) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", "application/json") - - resp, err := t.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - respBody, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - var result map[string]interface{} - if err := json.Unmarshal(respBody, &result); err != nil { - return nil, err - } - - if ok, exists := result["ok"].(bool); exists && !ok { - desc := "unknown error" - if d, exists := result["description"].(string); exists { - desc = d - } - return result, errors.New("Telegram API error: " + desc) - } - - return result, nil -} - -func (t *TelegramConnector) SendFileFromBytes(ctx context.Context, chatID string, filename string, content []byte, caption string) (interface{}, error) { - url := fmt.Sprintf("https://api.telegram.org/bot%s/sendDocument", t.cfg.BotToken) - - var b bytes.Buffer - w := multipart.NewWriter(&b) - - w.WriteField("chat_id", chatID) - - if caption != "" { - w.WriteField("caption", caption) - } - - fw, err := w.CreateFormFile("document", filename) - if err != nil { - return nil, err - } - fw.Write(content) - - w.Close() - - req, err := http.NewRequestWithContext(ctx, "POST", url, &b) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", w.FormDataContentType()) - - resp, err := t.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - respBody, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - var result map[string]interface{} - if err := json.Unmarshal(respBody, &result); err != nil { - return nil, err - } - - return result, nil -} - -func (t *TelegramConnector) GetChatID(chatIDOrUsername interface{}) string { - switch v := chatIDOrUsername.(type) { - case string: - return v - case int: - return strconv.Itoa(v) - case int64: - return strconv.FormatInt(v, 10) - case float64: - return strconv.FormatInt(int64(v), 10) - default: - return fmt.Sprintf("%v", v) - } -} diff --git a/backend/internal/computer/connectors/webhook.go b/backend/internal/computer/connectors/webhook.go deleted file mode 100644 index 314c37c..0000000 --- a/backend/internal/computer/connectors/webhook.go +++ /dev/null @@ -1,275 +0,0 @@ -package connectors - -import ( - "bytes" - "context" - "crypto/hmac" - "crypto/sha256" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "strings" - "time" -) - -type WebhookConfig struct { - Timeout time.Duration - MaxRetries int - RetryDelay time.Duration - DefaultSecret string -} - -type WebhookConnector struct { - cfg WebhookConfig - client *http.Client -} - -func NewWebhookConnector(cfg WebhookConfig) *WebhookConnector { - timeout := cfg.Timeout - if timeout == 0 { - timeout = 30 * time.Second - } - if cfg.MaxRetries == 0 { - cfg.MaxRetries = 3 - } - if cfg.RetryDelay == 0 { - cfg.RetryDelay = time.Second - } - - return &WebhookConnector{ - cfg: cfg, - client: &http.Client{ - Timeout: timeout, - }, - } -} - -func (w *WebhookConnector) ID() string { - return "webhook" -} - -func (w *WebhookConnector) Name() string { - return "Webhook" -} - -func (w *WebhookConnector) Description() string { - return "Send HTTP webhooks to external services" -} - -func (w *WebhookConnector) GetActions() []Action { - return []Action{ - { - Name: "post", - Description: "Send POST request", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "url": map[string]interface{}{"type": "string", "description": "Webhook URL"}, - "body": map[string]interface{}{"type": "object", "description": "Request body (JSON)"}, - "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, - "secret": map[string]interface{}{"type": "string", "description": "HMAC secret for signing"}, - }, - }, - Required: []string{"url"}, - }, - { - Name: "get", - Description: "Send GET request", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "url": map[string]interface{}{"type": "string", "description": "Request URL"}, - "params": map[string]interface{}{"type": "object", "description": "Query parameters"}, - "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, - }, - }, - Required: []string{"url"}, - }, - { - Name: "put", - Description: "Send PUT request", - Schema: map[string]interface{}{ - "type": "object", - "properties": map[string]interface{}{ - "url": map[string]interface{}{"type": "string", "description": "Request URL"}, - "body": map[string]interface{}{"type": "object", "description": "Request body (JSON)"}, - "headers": map[string]interface{}{"type": "object", "description": "Custom headers"}, - }, - }, - Required: []string{"url"}, - }, - } -} - -func (w *WebhookConnector) Validate(params map[string]interface{}) error { - urlStr, ok := params["url"].(string) - if !ok { - return errors.New("'url' is required") - } - - parsed, err := url.Parse(urlStr) - if err != nil { - return fmt.Errorf("invalid URL: %w", err) - } - - if parsed.Scheme != "http" && parsed.Scheme != "https" { - return errors.New("URL must use http or https scheme") - } - - return nil -} - -func (w *WebhookConnector) Execute(ctx context.Context, action string, params map[string]interface{}) (interface{}, error) { - switch action { - case "post": - return w.doRequest(ctx, "POST", params) - case "get": - return w.doRequest(ctx, "GET", params) - case "put": - return w.doRequest(ctx, "PUT", params) - case "delete": - return w.doRequest(ctx, "DELETE", params) - case "patch": - return w.doRequest(ctx, "PATCH", params) - default: - return nil, errors.New("unknown action: " + action) - } -} - -func (w *WebhookConnector) doRequest(ctx context.Context, method string, params map[string]interface{}) (interface{}, error) { - urlStr := params["url"].(string) - - if method == "GET" { - if queryParams, ok := params["params"].(map[string]interface{}); ok { - parsedURL, _ := url.Parse(urlStr) - q := parsedURL.Query() - for k, v := range queryParams { - q.Set(k, fmt.Sprintf("%v", v)) - } - parsedURL.RawQuery = q.Encode() - urlStr = parsedURL.String() - } - } - - var bodyReader io.Reader - var bodyBytes []byte - - if body, ok := params["body"]; ok && method != "GET" { - var err error - bodyBytes, err = json.Marshal(body) - if err != nil { - return nil, fmt.Errorf("failed to marshal body: %w", err) - } - bodyReader = bytes.NewReader(bodyBytes) - } - - var lastErr error - for attempt := 0; attempt <= w.cfg.MaxRetries; attempt++ { - if attempt > 0 { - time.Sleep(w.cfg.RetryDelay * time.Duration(attempt)) - if bodyBytes != nil { - bodyReader = bytes.NewReader(bodyBytes) - } - } - - req, err := http.NewRequestWithContext(ctx, method, urlStr, bodyReader) - if err != nil { - return nil, err - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", "GooSeek-Computer/1.0") - - if headers, ok := params["headers"].(map[string]interface{}); ok { - for k, v := range headers { - req.Header.Set(k, fmt.Sprintf("%v", v)) - } - } - - if bodyBytes != nil { - secret := w.cfg.DefaultSecret - if s, ok := params["secret"].(string); ok { - secret = s - } - if secret != "" { - signature := w.signPayload(bodyBytes, secret) - req.Header.Set("X-Signature-256", "sha256="+signature) - } - } - - resp, err := w.client.Do(req) - if err != nil { - lastErr = err - continue - } - - respBody, err := io.ReadAll(resp.Body) - resp.Body.Close() - if err != nil { - lastErr = err - continue - } - - result := map[string]interface{}{ - "status_code": resp.StatusCode, - "headers": w.headersToMap(resp.Header), - } - - var jsonBody interface{} - if err := json.Unmarshal(respBody, &jsonBody); err == nil { - result["body"] = jsonBody - } else { - result["body"] = string(respBody) - } - - if resp.StatusCode >= 200 && resp.StatusCode < 300 { - result["success"] = true - return result, nil - } - - if resp.StatusCode >= 500 { - lastErr = fmt.Errorf("server error: %d", resp.StatusCode) - continue - } - - result["success"] = false - return result, nil - } - - return map[string]interface{}{ - "success": false, - "error": lastErr.Error(), - }, lastErr -} - -func (w *WebhookConnector) signPayload(payload []byte, secret string) string { - mac := hmac.New(sha256.New, []byte(secret)) - mac.Write(payload) - return hex.EncodeToString(mac.Sum(nil)) -} - -func (w *WebhookConnector) headersToMap(headers http.Header) map[string]string { - result := make(map[string]string) - for k, v := range headers { - result[k] = strings.Join(v, ", ") - } - return result -} - -func (w *WebhookConnector) PostJSON(ctx context.Context, webhookURL string, data interface{}) (interface{}, error) { - return w.Execute(ctx, "post", map[string]interface{}{ - "url": webhookURL, - "body": data, - }) -} - -func (w *WebhookConnector) GetJSON(ctx context.Context, webhookURL string, params map[string]interface{}) (interface{}, error) { - return w.Execute(ctx, "get", map[string]interface{}{ - "url": webhookURL, - "params": params, - }) -} diff --git a/backend/internal/computer/executor.go b/backend/internal/computer/executor.go deleted file mode 100644 index 6d19f4f..0000000 --- a/backend/internal/computer/executor.go +++ /dev/null @@ -1,574 +0,0 @@ -package computer - -import ( - "context" - "encoding/json" - "fmt" - "strings" - "sync" - "time" - - "github.com/gooseek/backend/internal/llm" - "github.com/google/uuid" - "golang.org/x/sync/errgroup" -) - -type Executor struct { - router *Router - sandbox *SandboxManager - maxWorkers int -} - -func NewExecutor(router *Router, maxWorkers int) *Executor { - if maxWorkers <= 0 { - maxWorkers = 5 - } - return &Executor{ - router: router, - maxWorkers: maxWorkers, - } -} - -func (e *Executor) SetSandbox(sandbox *SandboxManager) { - e.sandbox = sandbox -} - -func (e *Executor) ExecuteGroup(ctx context.Context, tasks []SubTask, budget float64) ([]ExecutionResult, error) { - results := make([]ExecutionResult, len(tasks)) - var mu sync.Mutex - - perTaskBudget := budget / float64(len(tasks)) - - g, gctx := errgroup.WithContext(ctx) - g.SetLimit(e.maxWorkers) - - for i, task := range tasks { - i, task := i, task - g.Go(func() error { - result, err := e.ExecuteTask(gctx, &task, perTaskBudget) - mu.Lock() - if err != nil { - results[i] = ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Error: err, - } - } else { - results[i] = *result - } - mu.Unlock() - return nil - }) - } - - if err := g.Wait(); err != nil { - return results, err - } - - return results, nil -} - -func (e *Executor) ExecuteTask(ctx context.Context, task *SubTask, budget float64) (*ExecutionResult, error) { - startTime := time.Now() - - client, spec, err := e.router.Route(task, budget) - if err != nil { - return nil, fmt.Errorf("routing failed: %w", err) - } - - task.ModelID = spec.ID - now := time.Now() - task.StartedAt = &now - - var result *ExecutionResult - - switch task.Type { - case TaskResearch: - result, err = e.executeResearch(ctx, client, task) - case TaskCode: - result, err = e.executeCode(ctx, client, task) - case TaskAnalysis: - result, err = e.executeAnalysis(ctx, client, task) - case TaskDesign: - result, err = e.executeDesign(ctx, client, task) - case TaskDeploy: - result, err = e.executeDeploy(ctx, client, task) - case TaskReport: - result, err = e.executeReport(ctx, client, task) - case TaskCommunicate: - result, err = e.executeCommunicate(ctx, client, task) - case TaskTransform: - result, err = e.executeTransform(ctx, client, task) - case TaskValidate: - result, err = e.executeValidate(ctx, client, task) - default: - result, err = e.executeGeneric(ctx, client, task) - } - - if err != nil { - return nil, err - } - - result.Duration = time.Since(startTime) - result.Cost = e.router.EstimateCost(task, 1000, 500) - - return result, nil -} - -func (e *Executor) executeResearch(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - prompt := fmt.Sprintf(`You are a research assistant. Complete this research task: - -Task: %s - -Additional context: %v - -Provide a comprehensive research result with: -1. Key findings -2. Sources/references -3. Summary - -Respond in JSON: -{ - "findings": ["finding 1", "finding 2"], - "sources": ["source 1", "source 2"], - "summary": "...", - "data": {} -}`, task.Description, task.Input) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeCode(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputContext := "" - if task.Input != nil { - inputJSON, _ := json.Marshal(task.Input) - inputContext = fmt.Sprintf("\n\nContext from previous tasks:\n%s", string(inputJSON)) - } - - prompt := fmt.Sprintf(`You are an expert programmer. Complete this coding task: - -Task: %s%s - -Requirements: -1. Write clean, production-ready code -2. Include error handling -3. Add necessary imports -4. Follow best practices - -Respond in JSON: -{ - "language": "python", - "code": "...", - "filename": "main.py", - "dependencies": ["package1", "package2"], - "explanation": "..." -}`, task.Description, inputContext) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 8192}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - var artifacts []Artifact - if code, ok := output["code"].(string); ok { - filename := "main.py" - if fn, ok := output["filename"].(string); ok { - filename = fn - } - artifacts = append(artifacts, Artifact{ - ID: uuid.New().String(), - TaskID: task.ID, - Type: ArtifactTypeCode, - Name: filename, - Content: []byte(code), - Size: int64(len(code)), - CreatedAt: time.Now(), - }) - } - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - Artifacts: artifacts, - }, nil -} - -func (e *Executor) executeAnalysis(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`You are a data analyst. Analyze this data/information: - -Task: %s - -Input data: -%s - -Provide: -1. Key insights -2. Patterns observed -3. Recommendations -4. Visualizations needed (describe) - -Respond in JSON: -{ - "insights": ["insight 1", "insight 2"], - "patterns": ["pattern 1"], - "recommendations": ["rec 1"], - "visualizations": ["chart type 1"], - "summary": "..." -}`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeDesign(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`You are a software architect. Design a solution: - -Task: %s - -Context: -%s - -Provide: -1. Architecture overview -2. Components and their responsibilities -3. Data flow -4. Technology recommendations -5. Implementation plan - -Respond in JSON: -{ - "architecture": "...", - "components": [{"name": "...", "responsibility": "..."}], - "dataFlow": "...", - "technologies": ["tech1", "tech2"], - "implementationSteps": ["step1", "step2"], - "diagram": "mermaid diagram code" -}`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeDeploy(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - if e.sandbox == nil { - return e.executeGeneric(ctx, client, task) - } - - var code string - if task.Input != nil { - if c, ok := task.Input["code"].(string); ok { - code = c - } - } - - if code == "" { - return e.executeGeneric(ctx, client, task) - } - - sandbox, err := e.sandbox.Create(ctx, task.ID) - if err != nil { - return nil, fmt.Errorf("failed to create sandbox: %w", err) - } - defer e.sandbox.Destroy(ctx, sandbox) - - result, err := e.sandbox.Execute(ctx, sandbox, code, "python") - if err != nil { - return nil, fmt.Errorf("sandbox execution failed: %w", err) - } - - output := map[string]interface{}{ - "stdout": result.Stdout, - "stderr": result.Stderr, - "exitCode": result.ExitCode, - "duration": result.Duration.String(), - } - - var artifacts []Artifact - for name, content := range result.Files { - artifacts = append(artifacts, Artifact{ - ID: uuid.New().String(), - TaskID: task.ID, - Type: ArtifactTypeFile, - Name: name, - Content: content, - Size: int64(len(content)), - CreatedAt: time.Now(), - }) - } - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - Artifacts: artifacts, - }, nil -} - -func (e *Executor) executeReport(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`You are a report writer. Generate a comprehensive report: - -Task: %s - -Data/Context: -%s - -Create a well-structured report with: -1. Executive Summary -2. Key Findings -3. Detailed Analysis -4. Conclusions -5. Recommendations - -Use markdown formatting.`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 8192}, - }) - if err != nil { - return nil, err - } - - output := map[string]interface{}{ - "report": response, - "format": "markdown", - "wordCount": len(strings.Fields(response)), - } - - artifacts := []Artifact{ - { - ID: uuid.New().String(), - TaskID: task.ID, - Type: ArtifactTypeReport, - Name: "report.md", - Content: []byte(response), - MimeType: "text/markdown", - Size: int64(len(response)), - CreatedAt: time.Now(), - }, - } - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - Artifacts: artifacts, - }, nil -} - -func (e *Executor) executeCommunicate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`Generate a message/notification: - -Task: %s - -Context: -%s - -Create an appropriate message. Respond in JSON: -{ - "subject": "...", - "body": "...", - "format": "text|html", - "priority": "low|normal|high" -}`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 2048}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - output["status"] = "prepared" - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeTransform(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`Transform data as requested: - -Task: %s - -Input data: -%s - -Perform the transformation and return the result in JSON: -{ - "transformed": ..., - "format": "...", - "changes": ["change 1", "change 2"] -}`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeValidate(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`Validate the following: - -Task: %s - -Data to validate: -%s - -Check for: -1. Correctness -2. Completeness -3. Consistency -4. Quality - -Respond in JSON: -{ - "valid": true|false, - "score": 0-100, - "issues": ["issue 1", "issue 2"], - "suggestions": ["suggestion 1"], - "summary": "..." -}`, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 2048}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func (e *Executor) executeGeneric(ctx context.Context, client llm.Client, task *SubTask) (*ExecutionResult, error) { - inputJSON, _ := json.Marshal(task.Input) - - prompt := fmt.Sprintf(`Complete this task: - -Task type: %s -Description: %s - -Context: -%s - -Provide a comprehensive result in JSON format.`, task.Type, task.Description, string(inputJSON)) - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return nil, err - } - - output := parseJSONOutput(response) - if len(output) == 0 { - output = map[string]interface{}{ - "result": response, - } - } - - return &ExecutionResult{ - TaskID: task.ID, - SubTaskID: task.ID, - Output: output, - }, nil -} - -func parseJSONOutput(response string) map[string]interface{} { - startIdx := strings.Index(response, "{") - endIdx := strings.LastIndex(response, "}") - - if startIdx == -1 || endIdx == -1 || endIdx <= startIdx { - return map[string]interface{}{"raw": response} - } - - jsonStr := response[startIdx : endIdx+1] - - var output map[string]interface{} - if err := json.Unmarshal([]byte(jsonStr), &output); err != nil { - return map[string]interface{}{"raw": response} - } - - return output -} diff --git a/backend/internal/computer/memory.go b/backend/internal/computer/memory.go deleted file mode 100644 index 7e0134e..0000000 --- a/backend/internal/computer/memory.go +++ /dev/null @@ -1,377 +0,0 @@ -package computer - -import ( - "context" - "encoding/json" - "strings" - "sync" - "time" - - "github.com/google/uuid" -) - -type MemoryStore struct { - repo MemoryRepository - cache map[string][]MemoryEntry - mu sync.RWMutex -} - -func NewMemoryStore(repo MemoryRepository) *MemoryStore { - return &MemoryStore{ - repo: repo, - cache: make(map[string][]MemoryEntry), - } -} - -func (m *MemoryStore) Store(ctx context.Context, userID string, entry *MemoryEntry) error { - if entry.ID == "" { - entry.ID = uuid.New().String() - } - entry.UserID = userID - if entry.CreatedAt.IsZero() { - entry.CreatedAt = time.Now() - } - - if m.repo != nil { - if err := m.repo.Store(ctx, entry); err != nil { - return err - } - } - - m.mu.Lock() - m.cache[userID] = append(m.cache[userID], *entry) - if len(m.cache[userID]) > 1000 { - m.cache[userID] = m.cache[userID][len(m.cache[userID])-500:] - } - m.mu.Unlock() - - return nil -} - -func (m *MemoryStore) StoreResult(ctx context.Context, userID, taskID, key string, value interface{}) error { - valueJSON, _ := json.Marshal(value) - - entry := &MemoryEntry{ - UserID: userID, - TaskID: taskID, - Key: key, - Value: string(valueJSON), - Type: MemoryTypeResult, - CreatedAt: time.Now(), - } - - return m.Store(ctx, userID, entry) -} - -func (m *MemoryStore) StoreFact(ctx context.Context, userID, key string, value interface{}, tags []string) error { - entry := &MemoryEntry{ - UserID: userID, - Key: key, - Value: value, - Type: MemoryTypeFact, - Tags: tags, - CreatedAt: time.Now(), - } - - return m.Store(ctx, userID, entry) -} - -func (m *MemoryStore) StorePreference(ctx context.Context, userID, key string, value interface{}) error { - entry := &MemoryEntry{ - UserID: userID, - Key: key, - Value: value, - Type: MemoryTypePreference, - CreatedAt: time.Now(), - } - - return m.Store(ctx, userID, entry) -} - -func (m *MemoryStore) StoreContext(ctx context.Context, userID, taskID, key string, value interface{}, ttl time.Duration) error { - expiresAt := time.Now().Add(ttl) - - entry := &MemoryEntry{ - UserID: userID, - TaskID: taskID, - Key: key, - Value: value, - Type: MemoryTypeContext, - CreatedAt: time.Now(), - ExpiresAt: &expiresAt, - } - - return m.Store(ctx, userID, entry) -} - -func (m *MemoryStore) Recall(ctx context.Context, userID string, query string, limit int) ([]MemoryEntry, error) { - if m.repo != nil { - entries, err := m.repo.Search(ctx, userID, query, limit) - if err == nil && len(entries) > 0 { - return entries, nil - } - } - - m.mu.RLock() - cached := m.cache[userID] - m.mu.RUnlock() - - if len(cached) == 0 { - return nil, nil - } - - queryLower := strings.ToLower(query) - queryTerms := strings.Fields(queryLower) - - type scored struct { - entry MemoryEntry - score int - } - - var results []scored - now := time.Now() - - for _, entry := range cached { - if entry.ExpiresAt != nil && entry.ExpiresAt.Before(now) { - continue - } - - score := 0 - - keyLower := strings.ToLower(entry.Key) - for _, term := range queryTerms { - if strings.Contains(keyLower, term) { - score += 3 - } - } - - if valueStr, ok := entry.Value.(string); ok { - valueLower := strings.ToLower(valueStr) - for _, term := range queryTerms { - if strings.Contains(valueLower, term) { - score += 1 - } - } - } - - for _, tag := range entry.Tags { - tagLower := strings.ToLower(tag) - for _, term := range queryTerms { - if strings.Contains(tagLower, term) { - score += 2 - } - } - } - - if score > 0 { - results = append(results, scored{entry: entry, score: score}) - } - } - - for i := 0; i < len(results)-1; i++ { - for j := i + 1; j < len(results); j++ { - if results[j].score > results[i].score { - results[i], results[j] = results[j], results[i] - } - } - } - - if len(results) > limit { - results = results[:limit] - } - - entries := make([]MemoryEntry, len(results)) - for i, r := range results { - entries[i] = r.entry - } - - return entries, nil -} - -func (m *MemoryStore) GetByUser(ctx context.Context, userID string, limit int) ([]MemoryEntry, error) { - if m.repo != nil { - return m.repo.GetByUser(ctx, userID, limit) - } - - m.mu.RLock() - cached := m.cache[userID] - m.mu.RUnlock() - - if len(cached) > limit { - return cached[len(cached)-limit:], nil - } - - return cached, nil -} - -func (m *MemoryStore) GetByTask(ctx context.Context, taskID string) ([]MemoryEntry, error) { - if m.repo != nil { - return m.repo.GetByTask(ctx, taskID) - } - - var result []MemoryEntry - - m.mu.RLock() - for _, entries := range m.cache { - for _, e := range entries { - if e.TaskID == taskID { - result = append(result, e) - } - } - } - m.mu.RUnlock() - - return result, nil -} - -func (m *MemoryStore) GetTaskContext(ctx context.Context, taskID string) (map[string]interface{}, error) { - entries, err := m.GetByTask(ctx, taskID) - if err != nil { - return nil, err - } - - context := make(map[string]interface{}) - for _, e := range entries { - context[e.Key] = e.Value - } - - return context, nil -} - -func (m *MemoryStore) GetUserContext(ctx context.Context, userID string) (map[string]interface{}, error) { - entries, err := m.GetByUser(ctx, userID, 100) - if err != nil { - return nil, err - } - - context := make(map[string]interface{}) - for _, e := range entries { - if e.Type == MemoryTypePreference || e.Type == MemoryTypeFact { - context[e.Key] = e.Value - } - } - - return context, nil -} - -func (m *MemoryStore) GetPreferences(ctx context.Context, userID string) (map[string]interface{}, error) { - entries, err := m.GetByUser(ctx, userID, 100) - if err != nil { - return nil, err - } - - prefs := make(map[string]interface{}) - for _, e := range entries { - if e.Type == MemoryTypePreference { - prefs[e.Key] = e.Value - } - } - - return prefs, nil -} - -func (m *MemoryStore) GetFacts(ctx context.Context, userID string) ([]MemoryEntry, error) { - entries, err := m.GetByUser(ctx, userID, 100) - if err != nil { - return nil, err - } - - var facts []MemoryEntry - for _, e := range entries { - if e.Type == MemoryTypeFact { - facts = append(facts, e) - } - } - - return facts, nil -} - -func (m *MemoryStore) Delete(ctx context.Context, id string) error { - if m.repo != nil { - return m.repo.Delete(ctx, id) - } - - m.mu.Lock() - for userID, entries := range m.cache { - for i, e := range entries { - if e.ID == id { - m.cache[userID] = append(entries[:i], entries[i+1:]...) - break - } - } - } - m.mu.Unlock() - - return nil -} - -func (m *MemoryStore) Clear(ctx context.Context, userID string) error { - m.mu.Lock() - delete(m.cache, userID) - m.mu.Unlock() - - return nil -} - -func (m *MemoryStore) ClearTask(ctx context.Context, taskID string) error { - m.mu.Lock() - for userID, entries := range m.cache { - var filtered []MemoryEntry - for _, e := range entries { - if e.TaskID != taskID { - filtered = append(filtered, e) - } - } - m.cache[userID] = filtered - } - m.mu.Unlock() - - return nil -} - -func (m *MemoryStore) Cleanup(ctx context.Context) error { - now := time.Now() - - m.mu.Lock() - for userID, entries := range m.cache { - var valid []MemoryEntry - for _, e := range entries { - if e.ExpiresAt == nil || e.ExpiresAt.After(now) { - valid = append(valid, e) - } - } - m.cache[userID] = valid - } - m.mu.Unlock() - - return nil -} - -func (m *MemoryStore) Stats(userID string) map[string]int { - m.mu.RLock() - entries := m.cache[userID] - m.mu.RUnlock() - - stats := map[string]int{ - "total": len(entries), - "facts": 0, - "preferences": 0, - "context": 0, - "results": 0, - } - - for _, e := range entries { - switch e.Type { - case MemoryTypeFact: - stats["facts"]++ - case MemoryTypePreference: - stats["preferences"]++ - case MemoryTypeContext: - stats["context"]++ - case MemoryTypeResult: - stats["results"]++ - } - } - - return stats -} diff --git a/backend/internal/computer/planner.go b/backend/internal/computer/planner.go deleted file mode 100644 index 9aa68c3..0000000 --- a/backend/internal/computer/planner.go +++ /dev/null @@ -1,371 +0,0 @@ -package computer - -import ( - "context" - "encoding/json" - "fmt" - "regexp" - "strings" - - "github.com/gooseek/backend/internal/llm" - "github.com/google/uuid" -) - -type Planner struct { - registry *llm.ModelRegistry -} - -func NewPlanner(registry *llm.ModelRegistry) *Planner { - return &Planner{ - registry: registry, - } -} - -func (p *Planner) Plan(ctx context.Context, query string, memory map[string]interface{}) (*TaskPlan, error) { - client, _, err := p.registry.GetBest(llm.CapReasoning) - if err != nil { - client, _, err = p.registry.GetBest(llm.CapCoding) - if err != nil { - return nil, fmt.Errorf("no suitable model for planning: %w", err) - } - } - - memoryContext := "" - if len(memory) > 0 { - memoryJSON, _ := json.Marshal(memory) - memoryContext = fmt.Sprintf("\n\nUser context and memory:\n%s", string(memoryJSON)) - } - - prompt := fmt.Sprintf(`You are a task planning AI. Analyze this query and create an execution plan. - -Query: %s%s - -Break this into subtasks. Each subtask should be: -1. Atomic - one clear action -2. Independent where possible (for parallel execution) -3. Have clear dependencies when needed - -Available task types: -- research: Search web, gather information -- code: Write/generate code -- analysis: Analyze data, extract insights -- design: Design architecture, create plans -- deploy: Deploy applications, run code -- monitor: Set up monitoring, tracking -- report: Generate reports, summaries -- communicate: Send emails, messages -- transform: Convert data formats -- validate: Check, verify results - -For each subtask specify: -- type: one of the task types above -- description: what to do -- dependencies: list of subtask IDs this depends on (empty if none) -- capabilities: required AI capabilities (reasoning, coding, search, creative, fast, long_context, vision, math) - -Respond in JSON format: -{ - "summary": "Brief summary of the plan", - "subtasks": [ - { - "id": "1", - "type": "research", - "description": "Search for...", - "dependencies": [], - "capabilities": ["search"] - }, - { - "id": "2", - "type": "code", - "description": "Write code to...", - "dependencies": ["1"], - "capabilities": ["coding"] - } - ], - "estimatedCost": 0.05, - "estimatedTimeSeconds": 120 -} - -Create 3-10 subtasks. Be specific and actionable.`, query, memoryContext) - - messages := []llm.Message{ - {Role: llm.RoleUser, Content: prompt}, - } - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: messages, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return p.createDefaultPlan(query), nil - } - - plan, err := p.parsePlanResponse(response) - if err != nil { - return p.createDefaultPlan(query), nil - } - - plan.Query = query - plan.ExecutionOrder = p.calculateExecutionOrder(plan.SubTasks) - - return plan, nil -} - -func (p *Planner) parsePlanResponse(response string) (*TaskPlan, error) { - jsonRegex := regexp.MustCompile(`\{[\s\S]*\}`) - jsonMatch := jsonRegex.FindString(response) - if jsonMatch == "" { - return nil, fmt.Errorf("no JSON found in response") - } - - var rawPlan struct { - Summary string `json:"summary"` - EstimatedCost float64 `json:"estimatedCost"` - EstimatedTimeSeconds int `json:"estimatedTimeSeconds"` - SubTasks []struct { - ID string `json:"id"` - Type string `json:"type"` - Description string `json:"description"` - Dependencies []string `json:"dependencies"` - Capabilities []string `json:"capabilities"` - } `json:"subtasks"` - } - - if err := json.Unmarshal([]byte(jsonMatch), &rawPlan); err != nil { - return nil, fmt.Errorf("failed to parse plan JSON: %w", err) - } - - plan := &TaskPlan{ - Summary: rawPlan.Summary, - EstimatedCost: rawPlan.EstimatedCost, - EstimatedTime: rawPlan.EstimatedTimeSeconds, - SubTasks: make([]SubTask, len(rawPlan.SubTasks)), - } - - for i, st := range rawPlan.SubTasks { - caps := make([]llm.ModelCapability, len(st.Capabilities)) - for j, c := range st.Capabilities { - caps[j] = llm.ModelCapability(c) - } - - plan.SubTasks[i] = SubTask{ - ID: st.ID, - Type: TaskType(st.Type), - Description: st.Description, - Dependencies: st.Dependencies, - RequiredCaps: caps, - Status: StatusPending, - MaxRetries: 3, - } - } - - return plan, nil -} - -func (p *Planner) calculateExecutionOrder(subTasks []SubTask) [][]string { - taskMap := make(map[string]*SubTask) - for i := range subTasks { - taskMap[subTasks[i].ID] = &subTasks[i] - } - - inDegree := make(map[string]int) - for _, st := range subTasks { - if _, ok := inDegree[st.ID]; !ok { - inDegree[st.ID] = 0 - } - for _, dep := range st.Dependencies { - inDegree[st.ID]++ - if _, ok := inDegree[dep]; !ok { - inDegree[dep] = 0 - } - } - } - - var order [][]string - completed := make(map[string]bool) - - for len(completed) < len(subTasks) { - var wave []string - - for _, st := range subTasks { - if completed[st.ID] { - continue - } - - canExecute := true - for _, dep := range st.Dependencies { - if !completed[dep] { - canExecute = false - break - } - } - - if canExecute { - wave = append(wave, st.ID) - } - } - - if len(wave) == 0 { - for _, st := range subTasks { - if !completed[st.ID] { - wave = append(wave, st.ID) - } - } - } - - for _, id := range wave { - completed[id] = true - } - - order = append(order, wave) - } - - return order -} - -func (p *Planner) createDefaultPlan(query string) *TaskPlan { - queryLower := strings.ToLower(query) - - subTasks := []SubTask{ - { - ID: uuid.New().String(), - Type: TaskResearch, - Description: "Research and gather information about: " + query, - Dependencies: []string{}, - RequiredCaps: []llm.ModelCapability{llm.CapSearch}, - Status: StatusPending, - MaxRetries: 3, - }, - } - - if strings.Contains(queryLower, "код") || strings.Contains(queryLower, "code") || - strings.Contains(queryLower, "приложение") || strings.Contains(queryLower, "app") || - strings.Contains(queryLower, "скрипт") || strings.Contains(queryLower, "script") { - subTasks = append(subTasks, SubTask{ - ID: uuid.New().String(), - Type: TaskDesign, - Description: "Design architecture and structure", - Dependencies: []string{subTasks[0].ID}, - RequiredCaps: []llm.ModelCapability{llm.CapReasoning}, - Status: StatusPending, - MaxRetries: 3, - }) - subTasks = append(subTasks, SubTask{ - ID: uuid.New().String(), - Type: TaskCode, - Description: "Generate code implementation", - Dependencies: []string{subTasks[1].ID}, - RequiredCaps: []llm.ModelCapability{llm.CapCoding}, - Status: StatusPending, - MaxRetries: 3, - }) - } - - if strings.Contains(queryLower, "отчёт") || strings.Contains(queryLower, "report") || - strings.Contains(queryLower, "анализ") || strings.Contains(queryLower, "analysis") { - subTasks = append(subTasks, SubTask{ - ID: uuid.New().String(), - Type: TaskAnalysis, - Description: "Analyze gathered information", - Dependencies: []string{subTasks[0].ID}, - RequiredCaps: []llm.ModelCapability{llm.CapReasoning}, - Status: StatusPending, - MaxRetries: 3, - }) - subTasks = append(subTasks, SubTask{ - ID: uuid.New().String(), - Type: TaskReport, - Description: "Generate comprehensive report", - Dependencies: []string{subTasks[len(subTasks)-1].ID}, - RequiredCaps: []llm.ModelCapability{llm.CapCreative}, - Status: StatusPending, - MaxRetries: 3, - }) - } - - if strings.Contains(queryLower, "email") || strings.Contains(queryLower, "письмо") || - strings.Contains(queryLower, "telegram") || strings.Contains(queryLower, "отправ") { - subTasks = append(subTasks, SubTask{ - ID: uuid.New().String(), - Type: TaskCommunicate, - Description: "Send notification/message", - Dependencies: []string{subTasks[len(subTasks)-1].ID}, - RequiredCaps: []llm.ModelCapability{llm.CapFast}, - Status: StatusPending, - MaxRetries: 3, - }) - } - - plan := &TaskPlan{ - Query: query, - Summary: "Auto-generated plan for: " + query, - SubTasks: subTasks, - EstimatedCost: float64(len(subTasks)) * 0.01, - EstimatedTime: len(subTasks) * 30, - } - - plan.ExecutionOrder = p.calculateExecutionOrder(subTasks) - - return plan -} - -func (p *Planner) Replan(ctx context.Context, plan *TaskPlan, newContext string) (*TaskPlan, error) { - completedTasks := make([]SubTask, 0) - pendingTasks := make([]SubTask, 0) - - for _, st := range plan.SubTasks { - if st.Status == StatusCompleted { - completedTasks = append(completedTasks, st) - } else if st.Status == StatusPending || st.Status == StatusFailed { - pendingTasks = append(pendingTasks, st) - } - } - - completedJSON, _ := json.Marshal(completedTasks) - pendingJSON, _ := json.Marshal(pendingTasks) - - client, _, err := p.registry.GetBest(llm.CapReasoning) - if err != nil { - return plan, nil - } - - prompt := fmt.Sprintf(`You need to replan a task based on new context. - -Original query: %s - -Completed subtasks: -%s - -Pending subtasks: -%s - -New context/feedback: -%s - -Adjust the plan. Keep completed tasks, modify or remove pending tasks as needed. -Add new subtasks if the new context requires it. - -Respond in the same JSON format as before.`, plan.Query, string(completedJSON), string(pendingJSON), newContext) - - messages := []llm.Message{ - {Role: llm.RoleUser, Content: prompt}, - } - - response, err := client.GenerateText(ctx, llm.StreamRequest{ - Messages: messages, - Options: llm.StreamOptions{MaxTokens: 4096}, - }) - if err != nil { - return plan, nil - } - - newPlan, err := p.parsePlanResponse(response) - if err != nil { - return plan, nil - } - - newPlan.Query = plan.Query - newPlan.ExecutionOrder = p.calculateExecutionOrder(newPlan.SubTasks) - - return newPlan, nil -} diff --git a/backend/internal/computer/router.go b/backend/internal/computer/router.go deleted file mode 100644 index 6de0a47..0000000 --- a/backend/internal/computer/router.go +++ /dev/null @@ -1,244 +0,0 @@ -package computer - -import ( - "errors" - "sort" - - "github.com/gooseek/backend/internal/llm" -) - -type RoutingRule struct { - TaskType TaskType - Preferred []llm.ModelCapability - Fallback []string - MaxCost float64 - MaxLatency int -} - -type Router struct { - registry *llm.ModelRegistry - rules map[TaskType]RoutingRule -} - -func NewRouter(registry *llm.ModelRegistry) *Router { - r := &Router{ - registry: registry, - rules: make(map[TaskType]RoutingRule), - } - - r.rules[TaskResearch] = RoutingRule{ - TaskType: TaskResearch, - Preferred: []llm.ModelCapability{llm.CapSearch, llm.CapLongContext}, - Fallback: []string{"gemini-1.5-pro", "gpt-4o"}, - MaxCost: 0.1, - } - - r.rules[TaskCode] = RoutingRule{ - TaskType: TaskCode, - Preferred: []llm.ModelCapability{llm.CapCoding}, - Fallback: []string{"claude-3-sonnet", "claude-3-opus", "gpt-4o"}, - MaxCost: 0.2, - } - - r.rules[TaskAnalysis] = RoutingRule{ - TaskType: TaskAnalysis, - Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapMath}, - Fallback: []string{"claude-3-opus", "gpt-4o"}, - MaxCost: 0.15, - } - - r.rules[TaskDesign] = RoutingRule{ - TaskType: TaskDesign, - Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapCreative}, - Fallback: []string{"claude-3-opus", "gpt-4o"}, - MaxCost: 0.15, - } - - r.rules[TaskDeploy] = RoutingRule{ - TaskType: TaskDeploy, - Preferred: []llm.ModelCapability{llm.CapCoding, llm.CapFast}, - Fallback: []string{"claude-3-sonnet", "gpt-4o-mini"}, - MaxCost: 0.05, - } - - r.rules[TaskMonitor] = RoutingRule{ - TaskType: TaskMonitor, - Preferred: []llm.ModelCapability{llm.CapFast}, - Fallback: []string{"gpt-4o-mini", "gemini-1.5-flash"}, - MaxCost: 0.02, - } - - r.rules[TaskReport] = RoutingRule{ - TaskType: TaskReport, - Preferred: []llm.ModelCapability{llm.CapCreative, llm.CapLongContext}, - Fallback: []string{"claude-3-opus", "gpt-4o"}, - MaxCost: 0.1, - } - - r.rules[TaskCommunicate] = RoutingRule{ - TaskType: TaskCommunicate, - Preferred: []llm.ModelCapability{llm.CapFast, llm.CapCreative}, - Fallback: []string{"gpt-4o-mini", "gemini-1.5-flash"}, - MaxCost: 0.02, - } - - r.rules[TaskTransform] = RoutingRule{ - TaskType: TaskTransform, - Preferred: []llm.ModelCapability{llm.CapFast, llm.CapCoding}, - Fallback: []string{"gpt-4o-mini", "claude-3-sonnet"}, - MaxCost: 0.03, - } - - r.rules[TaskValidate] = RoutingRule{ - TaskType: TaskValidate, - Preferred: []llm.ModelCapability{llm.CapReasoning}, - Fallback: []string{"gpt-4o", "claude-3-sonnet"}, - MaxCost: 0.05, - } - - return r -} - -func (r *Router) Route(task *SubTask, budget float64) (llm.Client, llm.ModelSpec, error) { - if task.ModelID != "" { - client, spec, err := r.registry.GetByID(task.ModelID) - if err == nil && spec.CostPer1K <= budget { - return client, spec, nil - } - } - - if len(task.RequiredCaps) > 0 { - for _, cap := range task.RequiredCaps { - client, spec, err := r.registry.GetBest(cap) - if err == nil && spec.CostPer1K <= budget { - return client, spec, nil - } - } - } - - rule, ok := r.rules[task.Type] - if ok { - for _, cap := range rule.Preferred { - client, spec, err := r.registry.GetBest(cap) - if err == nil && spec.CostPer1K <= budget { - return client, spec, nil - } - } - - for _, modelID := range rule.Fallback { - client, spec, err := r.registry.GetByID(modelID) - if err == nil && spec.CostPer1K <= budget { - return client, spec, nil - } - } - } - - models := r.registry.GetAll() - if len(models) == 0 { - return nil, llm.ModelSpec{}, errors.New("no models available") - } - - sort.Slice(models, func(i, j int) bool { - return models[i].CostPer1K < models[j].CostPer1K - }) - - for _, spec := range models { - if spec.CostPer1K <= budget { - client, err := r.registry.GetClient(spec.ID) - if err == nil { - return client, spec, nil - } - } - } - - client, err := r.registry.GetClient(models[0].ID) - if err != nil { - return nil, llm.ModelSpec{}, err - } - return client, models[0], nil -} - -func (r *Router) RouteMultiple(task *SubTask, count int, budget float64) ([]llm.Client, []llm.ModelSpec, error) { - var clients []llm.Client - var specs []llm.ModelSpec - - usedModels := make(map[string]bool) - perModelBudget := budget / float64(count) - - rule, ok := r.rules[task.Type] - if !ok { - rule = RoutingRule{ - Preferred: []llm.ModelCapability{llm.CapReasoning, llm.CapCoding, llm.CapFast}, - } - } - - for _, cap := range rule.Preferred { - if len(clients) >= count { - break - } - - models := r.registry.GetAllWithCapability(cap) - for _, spec := range models { - if len(clients) >= count { - break - } - if usedModels[spec.ID] { - continue - } - if spec.CostPer1K > perModelBudget { - continue - } - - client, err := r.registry.GetClient(spec.ID) - if err == nil { - clients = append(clients, client) - specs = append(specs, spec) - usedModels[spec.ID] = true - } - } - } - - if len(clients) < count { - models := r.registry.GetAll() - for _, spec := range models { - if len(clients) >= count { - break - } - if usedModels[spec.ID] { - continue - } - - client, err := r.registry.GetClient(spec.ID) - if err == nil { - clients = append(clients, client) - specs = append(specs, spec) - usedModels[spec.ID] = true - } - } - } - - if len(clients) == 0 { - return nil, nil, errors.New("no models available for consensus") - } - - return clients, specs, nil -} - -func (r *Router) SetRule(taskType TaskType, rule RoutingRule) { - r.rules[taskType] = rule -} - -func (r *Router) GetRule(taskType TaskType) (RoutingRule, bool) { - rule, ok := r.rules[taskType] - return rule, ok -} - -func (r *Router) EstimateCost(task *SubTask, inputTokens, outputTokens int) float64 { - _, spec, err := r.Route(task, 1.0) - if err != nil { - return 0.01 - } - - totalTokens := inputTokens + outputTokens - return spec.CostPer1K * float64(totalTokens) / 1000.0 -} diff --git a/backend/internal/computer/sandbox.go b/backend/internal/computer/sandbox.go deleted file mode 100644 index b715266..0000000 --- a/backend/internal/computer/sandbox.go +++ /dev/null @@ -1,431 +0,0 @@ -package computer - -import ( - "bytes" - "context" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - "sync" - "time" - - "github.com/google/uuid" -) - -type SandboxConfig struct { - Image string - Timeout time.Duration - MemoryLimit string - CPULimit string - NetworkMode string - WorkDir string - MaxFileSize int64 - AllowNetwork bool -} - -func DefaultSandboxConfig() SandboxConfig { - return SandboxConfig{ - Image: "gooseek/sandbox:latest", - Timeout: 5 * time.Minute, - MemoryLimit: "512m", - CPULimit: "1.0", - NetworkMode: "none", - WorkDir: "/workspace", - MaxFileSize: 10 * 1024 * 1024, - AllowNetwork: false, - } -} - -type Sandbox struct { - ID string - ContainerID string - WorkDir string - Status string - TaskID string - CreatedAt time.Time -} - -type SandboxManager struct { - cfg SandboxConfig - sandboxes map[string]*Sandbox - mu sync.RWMutex - useDocker bool -} - -func NewSandboxManager(cfg SandboxConfig) *SandboxManager { - if cfg.Timeout == 0 { - cfg.Timeout = 5 * time.Minute - } - if cfg.MemoryLimit == "" { - cfg.MemoryLimit = "512m" - } - if cfg.WorkDir == "" { - cfg.WorkDir = "/workspace" - } - - useDocker := isDockerAvailable() - - return &SandboxManager{ - cfg: cfg, - sandboxes: make(map[string]*Sandbox), - useDocker: useDocker, - } -} - -func isDockerAvailable() bool { - cmd := exec.Command("docker", "version") - return cmd.Run() == nil -} - -func (sm *SandboxManager) Create(ctx context.Context, taskID string) (*Sandbox, error) { - sandboxID := uuid.New().String()[:8] - - sandbox := &Sandbox{ - ID: sandboxID, - TaskID: taskID, - Status: "creating", - CreatedAt: time.Now(), - } - - if sm.useDocker { - workDir, err := os.MkdirTemp("", fmt.Sprintf("sandbox-%s-", sandboxID)) - if err != nil { - return nil, fmt.Errorf("failed to create temp dir: %w", err) - } - sandbox.WorkDir = workDir - - args := []string{ - "create", - "--name", fmt.Sprintf("gooseek-sandbox-%s", sandboxID), - "-v", fmt.Sprintf("%s:%s", workDir, sm.cfg.WorkDir), - "-w", sm.cfg.WorkDir, - "--memory", sm.cfg.MemoryLimit, - "--cpus", sm.cfg.CPULimit, - } - - if !sm.cfg.AllowNetwork { - args = append(args, "--network", "none") - } - - args = append(args, sm.cfg.Image, "tail", "-f", "/dev/null") - - cmd := exec.CommandContext(ctx, "docker", args...) - output, err := cmd.CombinedOutput() - if err != nil { - os.RemoveAll(workDir) - return nil, fmt.Errorf("failed to create container: %w - %s", err, string(output)) - } - - sandbox.ContainerID = strings.TrimSpace(string(output)) - - startCmd := exec.CommandContext(ctx, "docker", "start", sandbox.ContainerID) - if err := startCmd.Run(); err != nil { - sm.cleanupContainer(sandbox) - return nil, fmt.Errorf("failed to start container: %w", err) - } - } else { - workDir, err := os.MkdirTemp("", fmt.Sprintf("sandbox-%s-", sandboxID)) - if err != nil { - return nil, fmt.Errorf("failed to create temp dir: %w", err) - } - sandbox.WorkDir = workDir - } - - sandbox.Status = "running" - - sm.mu.Lock() - sm.sandboxes[sandboxID] = sandbox - sm.mu.Unlock() - - return sandbox, nil -} - -func (sm *SandboxManager) Execute(ctx context.Context, sandbox *Sandbox, code string, lang string) (*SandboxResult, error) { - ctx, cancel := context.WithTimeout(ctx, sm.cfg.Timeout) - defer cancel() - - startTime := time.Now() - - filename, err := sm.writeCodeFile(sandbox, code, lang) - if err != nil { - return nil, err - } - - var cmd *exec.Cmd - var stdout, stderr bytes.Buffer - - if sm.useDocker { - runCmd := sm.getRunCommand(lang, filename) - cmd = exec.CommandContext(ctx, "docker", "exec", sandbox.ContainerID, "sh", "-c", runCmd) - } else { - cmd = sm.getLocalCommand(ctx, lang, filepath.Join(sandbox.WorkDir, filename)) - } - - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err = cmd.Run() - exitCode := 0 - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - exitCode = exitErr.ExitCode() - } else if ctx.Err() == context.DeadlineExceeded { - return &SandboxResult{ - Stderr: "Execution timeout exceeded", - ExitCode: -1, - Duration: time.Since(startTime), - }, nil - } - } - - files, _ := sm.collectOutputFiles(sandbox) - - return &SandboxResult{ - Stdout: stdout.String(), - Stderr: stderr.String(), - ExitCode: exitCode, - Files: files, - Duration: time.Since(startTime), - }, nil -} - -func (sm *SandboxManager) RunCommand(ctx context.Context, sandbox *Sandbox, command string) (*SandboxResult, error) { - ctx, cancel := context.WithTimeout(ctx, sm.cfg.Timeout) - defer cancel() - - startTime := time.Now() - - var cmd *exec.Cmd - var stdout, stderr bytes.Buffer - - if sm.useDocker { - cmd = exec.CommandContext(ctx, "docker", "exec", sandbox.ContainerID, "sh", "-c", command) - } else { - cmd = exec.CommandContext(ctx, "sh", "-c", command) - cmd.Dir = sandbox.WorkDir - } - - cmd.Stdout = &stdout - cmd.Stderr = &stderr - - err := cmd.Run() - exitCode := 0 - if err != nil { - if exitErr, ok := err.(*exec.ExitError); ok { - exitCode = exitErr.ExitCode() - } - } - - return &SandboxResult{ - Stdout: stdout.String(), - Stderr: stderr.String(), - ExitCode: exitCode, - Duration: time.Since(startTime), - }, nil -} - -func (sm *SandboxManager) WriteFile(ctx context.Context, sandbox *Sandbox, path string, content []byte) error { - if int64(len(content)) > sm.cfg.MaxFileSize { - return fmt.Errorf("file size exceeds limit: %d > %d", len(content), sm.cfg.MaxFileSize) - } - - fullPath := filepath.Join(sandbox.WorkDir, path) - dir := filepath.Dir(fullPath) - - if err := os.MkdirAll(dir, 0755); err != nil { - return fmt.Errorf("failed to create directory: %w", err) - } - - return os.WriteFile(fullPath, content, 0644) -} - -func (sm *SandboxManager) ReadFile(ctx context.Context, sandbox *Sandbox, path string) ([]byte, error) { - fullPath := filepath.Join(sandbox.WorkDir, path) - return os.ReadFile(fullPath) -} - -func (sm *SandboxManager) Destroy(ctx context.Context, sandbox *Sandbox) error { - sm.mu.Lock() - delete(sm.sandboxes, sandbox.ID) - sm.mu.Unlock() - - if sm.useDocker && sandbox.ContainerID != "" { - sm.cleanupContainer(sandbox) - } - - if sandbox.WorkDir != "" { - os.RemoveAll(sandbox.WorkDir) - } - - return nil -} - -func (sm *SandboxManager) cleanupContainer(sandbox *Sandbox) { - exec.Command("docker", "stop", sandbox.ContainerID).Run() - exec.Command("docker", "rm", "-f", sandbox.ContainerID).Run() -} - -func (sm *SandboxManager) writeCodeFile(sandbox *Sandbox, code string, lang string) (string, error) { - var filename string - switch lang { - case "python", "py": - filename = "main.py" - case "javascript", "js", "node": - filename = "main.js" - case "typescript", "ts": - filename = "main.ts" - case "go", "golang": - filename = "main.go" - case "bash", "sh", "shell": - filename = "script.sh" - case "ruby", "rb": - filename = "main.rb" - default: - filename = "main.txt" - } - - fullPath := filepath.Join(sandbox.WorkDir, filename) - if err := os.WriteFile(fullPath, []byte(code), 0755); err != nil { - return "", fmt.Errorf("failed to write code file: %w", err) - } - - return filename, nil -} - -func (sm *SandboxManager) getRunCommand(lang, filename string) string { - switch lang { - case "python", "py": - return fmt.Sprintf("python3 %s/%s", sm.cfg.WorkDir, filename) - case "javascript", "js", "node": - return fmt.Sprintf("node %s/%s", sm.cfg.WorkDir, filename) - case "typescript", "ts": - return fmt.Sprintf("npx ts-node %s/%s", sm.cfg.WorkDir, filename) - case "go", "golang": - return fmt.Sprintf("go run %s/%s", sm.cfg.WorkDir, filename) - case "bash", "sh", "shell": - return fmt.Sprintf("bash %s/%s", sm.cfg.WorkDir, filename) - case "ruby", "rb": - return fmt.Sprintf("ruby %s/%s", sm.cfg.WorkDir, filename) - default: - return fmt.Sprintf("cat %s/%s", sm.cfg.WorkDir, filename) - } -} - -func (sm *SandboxManager) getLocalCommand(ctx context.Context, lang, filepath string) *exec.Cmd { - switch lang { - case "python", "py": - return exec.CommandContext(ctx, "python3", filepath) - case "javascript", "js", "node": - return exec.CommandContext(ctx, "node", filepath) - case "go", "golang": - return exec.CommandContext(ctx, "go", "run", filepath) - case "bash", "sh", "shell": - return exec.CommandContext(ctx, "bash", filepath) - case "ruby", "rb": - return exec.CommandContext(ctx, "ruby", filepath) - default: - return exec.CommandContext(ctx, "cat", filepath) - } -} - -func (sm *SandboxManager) collectOutputFiles(sandbox *Sandbox) (map[string][]byte, error) { - files := make(map[string][]byte) - - err := filepath.Walk(sandbox.WorkDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if info.IsDir() { - return nil - } - - relPath, err := filepath.Rel(sandbox.WorkDir, path) - if err != nil { - return nil - } - - if strings.HasPrefix(relPath, "main.") || strings.HasPrefix(relPath, "script.") { - return nil - } - - if info.Size() > sm.cfg.MaxFileSize { - return nil - } - - content, err := os.ReadFile(path) - if err != nil { - return nil - } - - files[relPath] = content - return nil - }) - - return files, err -} - -func (sm *SandboxManager) ListSandboxes() []*Sandbox { - sm.mu.RLock() - defer sm.mu.RUnlock() - - result := make([]*Sandbox, 0, len(sm.sandboxes)) - for _, s := range sm.sandboxes { - result = append(result, s) - } - return result -} - -func (sm *SandboxManager) GetSandbox(id string) (*Sandbox, bool) { - sm.mu.RLock() - defer sm.mu.RUnlock() - s, ok := sm.sandboxes[id] - return s, ok -} - -func (sm *SandboxManager) CopyToContainer(ctx context.Context, sandbox *Sandbox, src string, dst string) error { - if !sm.useDocker { - srcData, err := os.ReadFile(src) - if err != nil { - return err - } - return sm.WriteFile(ctx, sandbox, dst, srcData) - } - - cmd := exec.CommandContext(ctx, "docker", "cp", src, fmt.Sprintf("%s:%s", sandbox.ContainerID, dst)) - return cmd.Run() -} - -func (sm *SandboxManager) CopyFromContainer(ctx context.Context, sandbox *Sandbox, src string, dst string) error { - if !sm.useDocker { - srcPath := filepath.Join(sandbox.WorkDir, src) - srcData, err := os.ReadFile(srcPath) - if err != nil { - return err - } - return os.WriteFile(dst, srcData, 0644) - } - - cmd := exec.CommandContext(ctx, "docker", "cp", fmt.Sprintf("%s:%s", sandbox.ContainerID, src), dst) - return cmd.Run() -} - -func (sm *SandboxManager) StreamLogs(ctx context.Context, sandbox *Sandbox) (io.ReadCloser, error) { - if !sm.useDocker { - return nil, fmt.Errorf("streaming not supported without Docker") - } - - cmd := exec.CommandContext(ctx, "docker", "logs", "-f", sandbox.ContainerID) - stdout, err := cmd.StdoutPipe() - if err != nil { - return nil, err - } - - if err := cmd.Start(); err != nil { - return nil, err - } - - return stdout, nil -} diff --git a/backend/internal/computer/scheduler.go b/backend/internal/computer/scheduler.go deleted file mode 100644 index 9d76e56..0000000 --- a/backend/internal/computer/scheduler.go +++ /dev/null @@ -1,386 +0,0 @@ -package computer - -import ( - "context" - "log" - "sync" - "time" - - "github.com/robfig/cron/v3" -) - -type Scheduler struct { - taskRepo TaskRepository - computer *Computer - cron *cron.Cron - jobs map[string]cron.EntryID - running map[string]bool - mu sync.RWMutex - stopCh chan struct{} -} - -func NewScheduler(taskRepo TaskRepository, computer *Computer) *Scheduler { - return &Scheduler{ - taskRepo: taskRepo, - computer: computer, - cron: cron.New(cron.WithSeconds()), - jobs: make(map[string]cron.EntryID), - running: make(map[string]bool), - stopCh: make(chan struct{}), - } -} - -func (s *Scheduler) Start(ctx context.Context) { - s.cron.Start() - - go s.pollScheduledTasks(ctx) - - log.Println("[Scheduler] Started") -} - -func (s *Scheduler) Stop() { - close(s.stopCh) - s.cron.Stop() - log.Println("[Scheduler] Stopped") -} - -func (s *Scheduler) pollScheduledTasks(ctx context.Context) { - ticker := time.NewTicker(30 * time.Second) - defer ticker.Stop() - - s.loadScheduledTasks(ctx) - - for { - select { - case <-ctx.Done(): - return - case <-s.stopCh: - return - case <-ticker.C: - s.checkAndExecute(ctx) - } - } -} - -func (s *Scheduler) loadScheduledTasks(ctx context.Context) { - tasks, err := s.taskRepo.GetScheduled(ctx) - if err != nil { - log.Printf("[Scheduler] Failed to load scheduled tasks: %v", err) - return - } - - for _, task := range tasks { - if task.Schedule != nil && task.Schedule.Enabled { - s.scheduleTask(&task) - } - } - - log.Printf("[Scheduler] Loaded %d scheduled tasks", len(tasks)) -} - -func (s *Scheduler) scheduleTask(task *ComputerTask) error { - s.mu.Lock() - defer s.mu.Unlock() - - if oldID, exists := s.jobs[task.ID]; exists { - s.cron.Remove(oldID) - } - - if task.Schedule == nil || !task.Schedule.Enabled { - return nil - } - - var entryID cron.EntryID - var err error - - switch task.Schedule.Type { - case "cron": - if task.Schedule.CronExpr == "" { - return nil - } - entryID, err = s.cron.AddFunc(task.Schedule.CronExpr, func() { - s.executeScheduledTask(task.ID) - }) - - case "interval": - if task.Schedule.Interval <= 0 { - return nil - } - cronExpr := s.intervalToCron(task.Schedule.Interval) - entryID, err = s.cron.AddFunc(cronExpr, func() { - s.executeScheduledTask(task.ID) - }) - - case "once": - go func() { - if task.Schedule.NextRun.After(time.Now()) { - time.Sleep(time.Until(task.Schedule.NextRun)) - } - s.executeScheduledTask(task.ID) - }() - return nil - - case "daily": - entryID, err = s.cron.AddFunc("0 0 9 * * *", func() { - s.executeScheduledTask(task.ID) - }) - - case "hourly": - entryID, err = s.cron.AddFunc("0 0 * * * *", func() { - s.executeScheduledTask(task.ID) - }) - - case "weekly": - entryID, err = s.cron.AddFunc("0 0 9 * * 1", func() { - s.executeScheduledTask(task.ID) - }) - - case "monthly": - entryID, err = s.cron.AddFunc("0 0 9 1 * *", func() { - s.executeScheduledTask(task.ID) - }) - - default: - return nil - } - - if err != nil { - log.Printf("[Scheduler] Failed to schedule task %s: %v", task.ID, err) - return err - } - - s.jobs[task.ID] = entryID - log.Printf("[Scheduler] Scheduled task %s with type %s", task.ID, task.Schedule.Type) - - return nil -} - -func (s *Scheduler) intervalToCron(seconds int) string { - if seconds < 60 { - return "*/30 * * * * *" - } - if seconds < 3600 { - minutes := seconds / 60 - return "0 */" + itoa(minutes) + " * * * *" - } - if seconds < 86400 { - hours := seconds / 3600 - return "0 0 */" + itoa(hours) + " * * *" - } - return "0 0 0 * * *" -} - -func itoa(i int) string { - if i < 10 { - return string(rune('0' + i)) - } - return "" -} - -func (s *Scheduler) executeScheduledTask(taskID string) { - s.mu.Lock() - if s.running[taskID] { - s.mu.Unlock() - log.Printf("[Scheduler] Task %s is already running, skipping", taskID) - return - } - s.running[taskID] = true - s.mu.Unlock() - - defer func() { - s.mu.Lock() - delete(s.running, taskID) - s.mu.Unlock() - }() - - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Minute) - defer cancel() - - task, err := s.taskRepo.GetByID(ctx, taskID) - if err != nil { - log.Printf("[Scheduler] Failed to get task %s: %v", taskID, err) - return - } - - if task.Schedule != nil { - if task.Schedule.ExpiresAt != nil && time.Now().After(*task.Schedule.ExpiresAt) { - log.Printf("[Scheduler] Task %s has expired, removing", taskID) - s.Cancel(taskID) - return - } - - if task.Schedule.MaxRuns > 0 && task.Schedule.RunCount >= task.Schedule.MaxRuns { - log.Printf("[Scheduler] Task %s reached max runs (%d), removing", taskID, task.Schedule.MaxRuns) - s.Cancel(taskID) - return - } - } - - log.Printf("[Scheduler] Executing scheduled task %s (run #%d)", taskID, task.RunCount+1) - - _, err = s.computer.Execute(ctx, task.UserID, task.Query, ExecuteOptions{ - Async: false, - Context: task.Memory, - }) - - if err != nil { - log.Printf("[Scheduler] Task %s execution failed: %v", taskID, err) - } else { - log.Printf("[Scheduler] Task %s completed successfully", taskID) - } - - task.RunCount++ - if task.Schedule != nil { - task.Schedule.RunCount = task.RunCount - task.Schedule.NextRun = s.calculateNextRun(task.Schedule) - task.NextRunAt = &task.Schedule.NextRun - } - task.UpdatedAt = time.Now() - - if err := s.taskRepo.Update(ctx, task); err != nil { - log.Printf("[Scheduler] Failed to update task %s: %v", taskID, err) - } -} - -func (s *Scheduler) calculateNextRun(schedule *Schedule) time.Time { - switch schedule.Type { - case "interval": - return time.Now().Add(time.Duration(schedule.Interval) * time.Second) - case "hourly": - return time.Now().Add(time.Hour).Truncate(time.Hour) - case "daily": - next := time.Now().Add(24 * time.Hour) - return time.Date(next.Year(), next.Month(), next.Day(), 9, 0, 0, 0, next.Location()) - case "weekly": - next := time.Now().Add(7 * 24 * time.Hour) - return time.Date(next.Year(), next.Month(), next.Day(), 9, 0, 0, 0, next.Location()) - case "monthly": - next := time.Now().AddDate(0, 1, 0) - return time.Date(next.Year(), next.Month(), 1, 9, 0, 0, 0, next.Location()) - default: - return time.Now().Add(time.Hour) - } -} - -func (s *Scheduler) checkAndExecute(ctx context.Context) { - tasks, err := s.taskRepo.GetScheduled(ctx) - if err != nil { - return - } - - now := time.Now() - for _, task := range tasks { - if task.NextRunAt != nil && task.NextRunAt.Before(now) { - if task.Schedule != nil && task.Schedule.Enabled { - go s.executeScheduledTask(task.ID) - } - } - } -} - -func (s *Scheduler) Schedule(taskID string, schedule Schedule) error { - ctx := context.Background() - task, err := s.taskRepo.GetByID(ctx, taskID) - if err != nil { - return err - } - - task.Schedule = &schedule - task.Schedule.Enabled = true - task.Schedule.NextRun = s.calculateNextRun(&schedule) - task.NextRunAt = &task.Schedule.NextRun - task.Status = StatusScheduled - task.UpdatedAt = time.Now() - - if err := s.taskRepo.Update(ctx, task); err != nil { - return err - } - - return s.scheduleTask(task) -} - -func (s *Scheduler) Cancel(taskID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - if entryID, exists := s.jobs[taskID]; exists { - s.cron.Remove(entryID) - delete(s.jobs, taskID) - } - - ctx := context.Background() - task, err := s.taskRepo.GetByID(ctx, taskID) - if err != nil { - return err - } - - if task.Schedule != nil { - task.Schedule.Enabled = false - } - task.Status = StatusCancelled - task.UpdatedAt = time.Now() - - return s.taskRepo.Update(ctx, task) -} - -func (s *Scheduler) Pause(taskID string) error { - s.mu.Lock() - defer s.mu.Unlock() - - if entryID, exists := s.jobs[taskID]; exists { - s.cron.Remove(entryID) - delete(s.jobs, taskID) - } - - ctx := context.Background() - task, err := s.taskRepo.GetByID(ctx, taskID) - if err != nil { - return err - } - - if task.Schedule != nil { - task.Schedule.Enabled = false - } - task.UpdatedAt = time.Now() - - return s.taskRepo.Update(ctx, task) -} - -func (s *Scheduler) Resume(taskID string) error { - ctx := context.Background() - task, err := s.taskRepo.GetByID(ctx, taskID) - if err != nil { - return err - } - - if task.Schedule != nil { - task.Schedule.Enabled = true - task.Schedule.NextRun = s.calculateNextRun(task.Schedule) - task.NextRunAt = &task.Schedule.NextRun - } - task.Status = StatusScheduled - task.UpdatedAt = time.Now() - - if err := s.taskRepo.Update(ctx, task); err != nil { - return err - } - - return s.scheduleTask(task) -} - -func (s *Scheduler) GetScheduledTasks() []string { - s.mu.RLock() - defer s.mu.RUnlock() - - result := make([]string, 0, len(s.jobs)) - for taskID := range s.jobs { - result = append(result, taskID) - } - return result -} - -func (s *Scheduler) IsRunning(taskID string) bool { - s.mu.RLock() - defer s.mu.RUnlock() - return s.running[taskID] -} diff --git a/backend/internal/computer/types.go b/backend/internal/computer/types.go deleted file mode 100644 index 3a3ba0b..0000000 --- a/backend/internal/computer/types.go +++ /dev/null @@ -1,376 +0,0 @@ -package computer - -import ( - "time" - - "github.com/gooseek/backend/internal/llm" -) - -type TaskStatus string - -const ( - StatusPending TaskStatus = "pending" - StatusPlanning TaskStatus = "planning" - StatusExecuting TaskStatus = "executing" - StatusWaiting TaskStatus = "waiting_user" - StatusCompleted TaskStatus = "completed" - StatusFailed TaskStatus = "failed" - StatusCancelled TaskStatus = "cancelled" - StatusScheduled TaskStatus = "scheduled" - StatusPaused TaskStatus = "paused" - StatusCheckpoint TaskStatus = "checkpoint" - StatusLongRunning TaskStatus = "long_running" -) - -type TaskType string - -const ( - TaskResearch TaskType = "research" - TaskCode TaskType = "code" - TaskAnalysis TaskType = "analysis" - TaskDesign TaskType = "design" - TaskDeploy TaskType = "deploy" - TaskMonitor TaskType = "monitor" - TaskReport TaskType = "report" - TaskCommunicate TaskType = "communicate" - TaskSchedule TaskType = "schedule" - TaskTransform TaskType = "transform" - TaskValidate TaskType = "validate" -) - -type ComputerTask struct { - ID string `json:"id"` - UserID string `json:"userId"` - Query string `json:"query"` - Status TaskStatus `json:"status"` - Plan *TaskPlan `json:"plan,omitempty"` - SubTasks []SubTask `json:"subTasks,omitempty"` - Artifacts []Artifact `json:"artifacts,omitempty"` - Memory map[string]interface{} `json:"memory,omitempty"` - Progress int `json:"progress"` - Message string `json:"message,omitempty"` - Error string `json:"error,omitempty"` - Schedule *Schedule `json:"schedule,omitempty"` - NextRunAt *time.Time `json:"nextRunAt,omitempty"` - RunCount int `json:"runCount"` - TotalCost float64 `json:"totalCost"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` - CompletedAt *time.Time `json:"completedAt,omitempty"` - - DurationMode DurationMode `json:"durationMode"` - Checkpoint *Checkpoint `json:"checkpoint,omitempty"` - Checkpoints []Checkpoint `json:"checkpoints,omitempty"` - MaxDuration time.Duration `json:"maxDuration"` - EstimatedEnd *time.Time `json:"estimatedEnd,omitempty"` - Iterations int `json:"iterations"` - MaxIterations int `json:"maxIterations"` - PausedAt *time.Time `json:"pausedAt,omitempty"` - ResumedAt *time.Time `json:"resumedAt,omitempty"` - TotalRuntime time.Duration `json:"totalRuntime"` - HeartbeatAt *time.Time `json:"heartbeatAt,omitempty"` - Priority TaskPriority `json:"priority"` - ResourceLimits *ResourceLimits `json:"resourceLimits,omitempty"` -} - -type DurationMode string - -const ( - DurationShort DurationMode = "short" - DurationMedium DurationMode = "medium" - DurationLong DurationMode = "long" - DurationExtended DurationMode = "extended" - DurationUnlimited DurationMode = "unlimited" -) - -type TaskPriority string - -const ( - PriorityLow TaskPriority = "low" - PriorityNormal TaskPriority = "normal" - PriorityHigh TaskPriority = "high" - PriorityCritical TaskPriority = "critical" -) - -type Checkpoint struct { - ID string `json:"id"` - TaskID string `json:"taskId"` - SubTaskIndex int `json:"subTaskIndex"` - WaveIndex int `json:"waveIndex"` - State map[string]interface{} `json:"state"` - Progress int `json:"progress"` - Artifacts []string `json:"artifacts"` - Memory map[string]interface{} `json:"memory"` - CreatedAt time.Time `json:"createdAt"` - RuntimeSoFar time.Duration `json:"runtimeSoFar"` - CostSoFar float64 `json:"costSoFar"` - Reason string `json:"reason"` -} - -type ResourceLimits struct { - MaxCPU float64 `json:"maxCpu"` - MaxMemoryMB int `json:"maxMemoryMb"` - MaxDiskMB int `json:"maxDiskMb"` - MaxNetworkMbps int `json:"maxNetworkMbps"` - MaxCostPerHour float64 `json:"maxCostPerHour"` - MaxTotalCost float64 `json:"maxTotalCost"` - MaxConcurrent int `json:"maxConcurrent"` - IdleTimeoutMins int `json:"idleTimeoutMins"` -} - -var DurationModeConfigs = map[DurationMode]struct { - MaxDuration time.Duration - CheckpointFreq time.Duration - HeartbeatFreq time.Duration - MaxIterations int -}{ - DurationShort: {30 * time.Minute, 5 * time.Minute, 30 * time.Second, 10}, - DurationMedium: {4 * time.Hour, 15 * time.Minute, time.Minute, 50}, - DurationLong: {24 * time.Hour, 30 * time.Minute, 2 * time.Minute, 200}, - DurationExtended: {7 * 24 * time.Hour, time.Hour, 5 * time.Minute, 1000}, - DurationUnlimited: {365 * 24 * time.Hour, 4 * time.Hour, 10 * time.Minute, 0}, -} - -type SubTask struct { - ID string `json:"id"` - Type TaskType `json:"type"` - Description string `json:"description"` - Dependencies []string `json:"dependencies,omitempty"` - ModelID string `json:"modelId,omitempty"` - RequiredCaps []llm.ModelCapability `json:"requiredCaps,omitempty"` - Input map[string]interface{} `json:"input,omitempty"` - Output map[string]interface{} `json:"output,omitempty"` - Status TaskStatus `json:"status"` - Progress int `json:"progress"` - Error string `json:"error,omitempty"` - Cost float64 `json:"cost"` - StartedAt *time.Time `json:"startedAt,omitempty"` - CompletedAt *time.Time `json:"completedAt,omitempty"` - Retries int `json:"retries"` - MaxRetries int `json:"maxRetries"` -} - -type TaskPlan struct { - Query string `json:"query"` - Summary string `json:"summary"` - SubTasks []SubTask `json:"subTasks"` - ExecutionOrder [][]string `json:"executionOrder"` - EstimatedCost float64 `json:"estimatedCost"` - EstimatedTime int `json:"estimatedTimeSeconds"` -} - -type Artifact struct { - ID string `json:"id"` - TaskID string `json:"taskId"` - Type string `json:"type"` - Name string `json:"name"` - Content []byte `json:"-"` - URL string `json:"url,omitempty"` - Size int64 `json:"size"` - MimeType string `json:"mimeType,omitempty"` - Metadata map[string]interface{} `json:"metadata,omitempty"` - CreatedAt time.Time `json:"createdAt"` -} - -type Schedule struct { - Type string `json:"type"` - CronExpr string `json:"cronExpr,omitempty"` - Interval int `json:"intervalSeconds,omitempty"` - NextRun time.Time `json:"nextRun"` - MaxRuns int `json:"maxRuns"` - RunCount int `json:"runCount"` - ExpiresAt *time.Time `json:"expiresAt,omitempty"` - Enabled bool `json:"enabled"` - - DurationMode DurationMode `json:"durationMode,omitempty"` - RetryOnFail bool `json:"retryOnFail"` - MaxRetries int `json:"maxRetries"` - RetryDelay time.Duration `json:"retryDelay"` - Timezone string `json:"timezone,omitempty"` - WindowStart string `json:"windowStart,omitempty"` - WindowEnd string `json:"windowEnd,omitempty"` - Conditions []Condition `json:"conditions,omitempty"` -} - -type Condition struct { - Type string `json:"type"` - Field string `json:"field"` - Operator string `json:"operator"` - Value interface{} `json:"value"` - Params map[string]interface{} `json:"params,omitempty"` -} - -const ( - ScheduleOnce = "once" - ScheduleInterval = "interval" - ScheduleCron = "cron" - ScheduleHourly = "hourly" - ScheduleDaily = "daily" - ScheduleWeekly = "weekly" - ScheduleMonthly = "monthly" - ScheduleQuarterly = "quarterly" - ScheduleYearly = "yearly" - ScheduleContinuous = "continuous" - ScheduleOnCondition = "on_condition" -) - -type TaskEvent struct { - Type string `json:"type"` - TaskID string `json:"taskId"` - SubTaskID string `json:"subTaskId,omitempty"` - Status TaskStatus `json:"status,omitempty"` - Progress int `json:"progress,omitempty"` - Message string `json:"message,omitempty"` - Data map[string]interface{} `json:"data,omitempty"` - Timestamp time.Time `json:"timestamp"` -} - -type ExecuteOptions struct { - Async bool `json:"async"` - MaxCost float64 `json:"maxCost"` - Timeout int `json:"timeoutSeconds"` - EnableSandbox bool `json:"enableSandbox"` - Schedule *Schedule `json:"schedule,omitempty"` - Context map[string]interface{} `json:"context,omitempty"` - - DurationMode DurationMode `json:"durationMode,omitempty"` - Priority TaskPriority `json:"priority,omitempty"` - ResourceLimits *ResourceLimits `json:"resourceLimits,omitempty"` - ResumeFromID string `json:"resumeFromId,omitempty"` - EnableBrowser bool `json:"enableBrowser"` - BrowserOptions *BrowserOptions `json:"browserOptions,omitempty"` - NotifyOnEvents []string `json:"notifyOnEvents,omitempty"` - WebhookURL string `json:"webhookUrl,omitempty"` - Tags []string `json:"tags,omitempty"` -} - -type BrowserOptions struct { - Headless bool `json:"headless"` - UserAgent string `json:"userAgent,omitempty"` - Viewport *Viewport `json:"viewport,omitempty"` - ProxyURL string `json:"proxyUrl,omitempty"` - Timeout int `json:"timeout"` - Screenshots bool `json:"screenshots"` - RecordVideo bool `json:"recordVideo"` - BlockAds bool `json:"blockAds"` - AcceptCookies bool `json:"acceptCookies"` -} - -type Viewport struct { - Width int `json:"width"` - Height int `json:"height"` -} - -type ExecutionResult struct { - TaskID string - SubTaskID string - Output map[string]interface{} - Artifacts []Artifact - Duration time.Duration - Cost float64 - Error error -} - -type SandboxResult struct { - Stdout string - Stderr string - ExitCode int - Files map[string][]byte - Duration time.Duration -} - -type MemoryEntry struct { - ID string `json:"id"` - UserID string `json:"userId"` - TaskID string `json:"taskId,omitempty"` - Key string `json:"key"` - Value interface{} `json:"value"` - Type string `json:"type"` - Tags []string `json:"tags,omitempty"` - CreatedAt time.Time `json:"createdAt"` - ExpiresAt *time.Time `json:"expiresAt,omitempty"` -} - -const ( - EventTaskCreated = "task_created" - EventTaskStarted = "task_started" - EventTaskProgress = "task_progress" - EventTaskCompleted = "task_completed" - EventTaskFailed = "task_failed" - EventSubTaskStart = "subtask_start" - EventSubTaskDone = "subtask_done" - EventSubTaskFail = "subtask_fail" - EventArtifact = "artifact" - EventMessage = "message" - EventUserInput = "user_input_required" - EventCheckpoint = "checkpoint" - EventCheckpointSaved = "checkpoint_saved" - EventResumed = "resumed" - EventPaused = "paused" - EventHeartbeat = "heartbeat" - EventIteration = "iteration" - EventBrowserAction = "browser_action" - EventScreenshot = "screenshot" - EventResourceAlert = "resource_alert" - EventScheduleUpdate = "schedule_update" -) - -type BrowserAction struct { - ID string `json:"id"` - Type BrowserActionType `json:"type"` - Selector string `json:"selector,omitempty"` - URL string `json:"url,omitempty"` - Value string `json:"value,omitempty"` - Options map[string]interface{} `json:"options,omitempty"` - Screenshot bool `json:"screenshot"` - WaitAfter int `json:"waitAfterMs"` - Timeout int `json:"timeoutMs"` - Result *BrowserActionResult `json:"result,omitempty"` -} - -type BrowserActionType string - -const ( - BrowserNavigate BrowserActionType = "navigate" - BrowserClick BrowserActionType = "click" - BrowserType BrowserActionType = "type" - BrowserScroll BrowserActionType = "scroll" - BrowserScreenshot BrowserActionType = "screenshot" - BrowserWait BrowserActionType = "wait" - BrowserWaitSelector BrowserActionType = "wait_selector" - BrowserExtract BrowserActionType = "extract" - BrowserEval BrowserActionType = "eval" - BrowserSelect BrowserActionType = "select" - BrowserUpload BrowserActionType = "upload" - BrowserDownload BrowserActionType = "download" - BrowserPDF BrowserActionType = "pdf" - BrowserClose BrowserActionType = "close" -) - -type BrowserActionResult struct { - Success bool `json:"success"` - Data interface{} `json:"data,omitempty"` - Screenshot string `json:"screenshot,omitempty"` - Error string `json:"error,omitempty"` - Duration time.Duration `json:"duration"` - PageTitle string `json:"pageTitle,omitempty"` - PageURL string `json:"pageUrl,omitempty"` - Cookies []map[string]string `json:"cookies,omitempty"` - LocalStorage map[string]string `json:"localStorage,omitempty"` -} - -const ( - ArtifactTypeFile = "file" - ArtifactTypeCode = "code" - ArtifactTypeReport = "report" - ArtifactTypeDeployment = "deployment" - ArtifactTypeImage = "image" - ArtifactTypeData = "data" -) - -const ( - MemoryTypeFact = "fact" - MemoryTypePreference = "preference" - MemoryTypeContext = "context" - MemoryTypeResult = "result" -) diff --git a/backend/internal/db/collection_repo.go b/backend/internal/db/collection_repo.go index b6fbcc6..dfdf400 100644 --- a/backend/internal/db/collection_repo.go +++ b/backend/internal/db/collection_repo.go @@ -105,21 +105,35 @@ func (r *CollectionRepository) GetByUserID(ctx context.Context, userID string, l return collections, nil } -func (r *CollectionRepository) Update(ctx context.Context, c *Collection) error { +func (r *CollectionRepository) Update(ctx context.Context, c *Collection, userID string) error { query := ` UPDATE collections SET name = $2, description = $3, is_public = $4, context_enabled = $5, updated_at = NOW() - WHERE id = $1 + WHERE id = $1 AND user_id = $6 ` - _, err := r.db.db.ExecContext(ctx, query, - c.ID, c.Name, c.Description, c.IsPublic, c.ContextEnabled, + result, err := r.db.db.ExecContext(ctx, query, + c.ID, c.Name, c.Description, c.IsPublic, c.ContextEnabled, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *CollectionRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM collections WHERE id = $1", id) - return err +func (r *CollectionRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM collections WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } func (r *CollectionRepository) AddItem(ctx context.Context, item *CollectionItem) error { @@ -135,7 +149,20 @@ func (r *CollectionRepository) AddItem(ctx context.Context, item *CollectionItem ).Scan(&item.ID, &item.CreatedAt, &item.SortOrder) } -func (r *CollectionRepository) GetItems(ctx context.Context, collectionID string) ([]CollectionItem, error) { +func (r *CollectionRepository) GetItems(ctx context.Context, collectionID, userID string) ([]CollectionItem, error) { + var ownerID string + var isPublic bool + err := r.db.db.QueryRowContext(ctx, "SELECT user_id, is_public FROM collections WHERE id = $1", collectionID).Scan(&ownerID, &isPublic) + if err == sql.ErrNoRows { + return nil, ErrNotFound + } + if err != nil { + return nil, err + } + if ownerID != userID && !isPublic { + return nil, ErrForbidden + } + query := ` SELECT id, collection_id, item_type, title, content, url, metadata, created_at, sort_order FROM collection_items @@ -168,13 +195,25 @@ func (r *CollectionRepository) GetItems(ctx context.Context, collectionID string return items, nil } -func (r *CollectionRepository) RemoveItem(ctx context.Context, itemID string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM collection_items WHERE id = $1", itemID) - return err +func (r *CollectionRepository) RemoveItem(ctx context.Context, itemID, userID string) error { + query := ` + DELETE FROM collection_items + WHERE id = $1 + AND collection_id IN (SELECT id FROM collections WHERE user_id = $2) + ` + result, err := r.db.db.ExecContext(ctx, query, itemID, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *CollectionRepository) GetCollectionContext(ctx context.Context, collectionID string) (string, error) { - items, err := r.GetItems(ctx, collectionID) +func (r *CollectionRepository) GetCollectionContext(ctx context.Context, collectionID, userID string) (string, error) { + items, err := r.GetItems(ctx, collectionID, userID) if err != nil { return "", err } diff --git a/backend/internal/db/computer_artifact_repo.go b/backend/internal/db/computer_artifact_repo.go deleted file mode 100644 index 1181044..0000000 --- a/backend/internal/db/computer_artifact_repo.go +++ /dev/null @@ -1,322 +0,0 @@ -package db - -import ( - "context" - "database/sql" - "encoding/json" - "time" - - "github.com/gooseek/backend/internal/computer" -) - -type ComputerArtifactRepo struct { - db *sql.DB -} - -func NewComputerArtifactRepo(db *sql.DB) *ComputerArtifactRepo { - return &ComputerArtifactRepo{db: db} -} - -func (r *ComputerArtifactRepo) Migrate() error { - query := ` - CREATE TABLE IF NOT EXISTS computer_artifacts ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - task_id UUID NOT NULL, - type VARCHAR(50) NOT NULL, - name VARCHAR(255), - content BYTEA, - url TEXT, - size BIGINT DEFAULT 0, - mime_type VARCHAR(100), - metadata JSONB, - created_at TIMESTAMPTZ DEFAULT NOW() - ); - - CREATE INDEX IF NOT EXISTS idx_computer_artifacts_task_id ON computer_artifacts(task_id); - CREATE INDEX IF NOT EXISTS idx_computer_artifacts_type ON computer_artifacts(type); - CREATE INDEX IF NOT EXISTS idx_computer_artifacts_created ON computer_artifacts(created_at DESC); - ` - - _, err := r.db.Exec(query) - return err -} - -func (r *ComputerArtifactRepo) Create(ctx context.Context, artifact *computer.Artifact) error { - metadataJSON, _ := json.Marshal(artifact.Metadata) - - query := ` - INSERT INTO computer_artifacts (id, task_id, type, name, content, url, size, mime_type, metadata, created_at) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - ` - - _, err := r.db.ExecContext(ctx, query, - artifact.ID, - artifact.TaskID, - artifact.Type, - artifact.Name, - artifact.Content, - artifact.URL, - artifact.Size, - artifact.MimeType, - metadataJSON, - artifact.CreatedAt, - ) - - return err -} - -func (r *ComputerArtifactRepo) GetByID(ctx context.Context, id string) (*computer.Artifact, error) { - query := ` - SELECT id, task_id, type, name, content, url, size, mime_type, metadata, created_at - FROM computer_artifacts - WHERE id = $1 - ` - - var artifact computer.Artifact - var content []byte - var url, mimeType sql.NullString - var metadataJSON []byte - - err := r.db.QueryRowContext(ctx, query, id).Scan( - &artifact.ID, - &artifact.TaskID, - &artifact.Type, - &artifact.Name, - &content, - &url, - &artifact.Size, - &mimeType, - &metadataJSON, - &artifact.CreatedAt, - ) - - if err != nil { - return nil, err - } - - artifact.Content = content - if url.Valid { - artifact.URL = url.String - } - if mimeType.Valid { - artifact.MimeType = mimeType.String - } - if len(metadataJSON) > 0 { - json.Unmarshal(metadataJSON, &artifact.Metadata) - } - - return &artifact, nil -} - -func (r *ComputerArtifactRepo) GetByTaskID(ctx context.Context, taskID string) ([]computer.Artifact, error) { - query := ` - SELECT id, task_id, type, name, url, size, mime_type, metadata, created_at - FROM computer_artifacts - WHERE task_id = $1 - ORDER BY created_at ASC - ` - - rows, err := r.db.QueryContext(ctx, query, taskID) - if err != nil { - return nil, err - } - defer rows.Close() - - var artifacts []computer.Artifact - - for rows.Next() { - var artifact computer.Artifact - var url, mimeType sql.NullString - var metadataJSON []byte - - err := rows.Scan( - &artifact.ID, - &artifact.TaskID, - &artifact.Type, - &artifact.Name, - &url, - &artifact.Size, - &mimeType, - &metadataJSON, - &artifact.CreatedAt, - ) - if err != nil { - continue - } - - if url.Valid { - artifact.URL = url.String - } - if mimeType.Valid { - artifact.MimeType = mimeType.String - } - if len(metadataJSON) > 0 { - json.Unmarshal(metadataJSON, &artifact.Metadata) - } - - artifacts = append(artifacts, artifact) - } - - return artifacts, nil -} - -func (r *ComputerArtifactRepo) GetByType(ctx context.Context, taskID, artifactType string) ([]computer.Artifact, error) { - query := ` - SELECT id, task_id, type, name, url, size, mime_type, metadata, created_at - FROM computer_artifacts - WHERE task_id = $1 AND type = $2 - ORDER BY created_at ASC - ` - - rows, err := r.db.QueryContext(ctx, query, taskID, artifactType) - if err != nil { - return nil, err - } - defer rows.Close() - - var artifacts []computer.Artifact - - for rows.Next() { - var artifact computer.Artifact - var url, mimeType sql.NullString - var metadataJSON []byte - - err := rows.Scan( - &artifact.ID, - &artifact.TaskID, - &artifact.Type, - &artifact.Name, - &url, - &artifact.Size, - &mimeType, - &metadataJSON, - &artifact.CreatedAt, - ) - if err != nil { - continue - } - - if url.Valid { - artifact.URL = url.String - } - if mimeType.Valid { - artifact.MimeType = mimeType.String - } - if len(metadataJSON) > 0 { - json.Unmarshal(metadataJSON, &artifact.Metadata) - } - - artifacts = append(artifacts, artifact) - } - - return artifacts, nil -} - -func (r *ComputerArtifactRepo) GetContent(ctx context.Context, id string) ([]byte, error) { - query := `SELECT content FROM computer_artifacts WHERE id = $1` - var content []byte - err := r.db.QueryRowContext(ctx, query, id).Scan(&content) - return content, err -} - -func (r *ComputerArtifactRepo) UpdateURL(ctx context.Context, id, url string) error { - query := `UPDATE computer_artifacts SET url = $1 WHERE id = $2` - _, err := r.db.ExecContext(ctx, query, url, id) - return err -} - -func (r *ComputerArtifactRepo) Delete(ctx context.Context, id string) error { - query := `DELETE FROM computer_artifacts WHERE id = $1` - _, err := r.db.ExecContext(ctx, query, id) - return err -} - -func (r *ComputerArtifactRepo) DeleteByTaskID(ctx context.Context, taskID string) error { - query := `DELETE FROM computer_artifacts WHERE task_id = $1` - _, err := r.db.ExecContext(ctx, query, taskID) - return err -} - -func (r *ComputerArtifactRepo) DeleteOlderThan(ctx context.Context, days int) (int64, error) { - query := ` - DELETE FROM computer_artifacts - WHERE created_at < NOW() - INTERVAL '1 day' * $1 - ` - result, err := r.db.ExecContext(ctx, query, days) - if err != nil { - return 0, err - } - return result.RowsAffected() -} - -func (r *ComputerArtifactRepo) GetTotalSize(ctx context.Context, taskID string) (int64, error) { - query := `SELECT COALESCE(SUM(size), 0) FROM computer_artifacts WHERE task_id = $1` - var size int64 - err := r.db.QueryRowContext(ctx, query, taskID).Scan(&size) - return size, err -} - -func (r *ComputerArtifactRepo) Count(ctx context.Context, taskID string) (int64, error) { - query := `SELECT COUNT(*) FROM computer_artifacts WHERE task_id = $1` - var count int64 - err := r.db.QueryRowContext(ctx, query, taskID).Scan(&count) - return count, err -} - -type ArtifactSummary struct { - ID string `json:"id"` - TaskID string `json:"taskId"` - Type string `json:"type"` - Name string `json:"name"` - URL string `json:"url"` - Size int64 `json:"size"` - MimeType string `json:"mimeType"` - CreatedAt time.Time `json:"createdAt"` -} - -func (r *ComputerArtifactRepo) GetSummaries(ctx context.Context, taskID string) ([]ArtifactSummary, error) { - query := ` - SELECT id, task_id, type, name, url, size, mime_type, created_at - FROM computer_artifacts - WHERE task_id = $1 - ORDER BY created_at ASC - ` - - rows, err := r.db.QueryContext(ctx, query, taskID) - if err != nil { - return nil, err - } - defer rows.Close() - - var summaries []ArtifactSummary - - for rows.Next() { - var s ArtifactSummary - var url, mimeType sql.NullString - - err := rows.Scan( - &s.ID, - &s.TaskID, - &s.Type, - &s.Name, - &url, - &s.Size, - &mimeType, - &s.CreatedAt, - ) - if err != nil { - continue - } - - if url.Valid { - s.URL = url.String - } - if mimeType.Valid { - s.MimeType = mimeType.String - } - - summaries = append(summaries, s) - } - - return summaries, nil -} diff --git a/backend/internal/db/computer_memory_repo.go b/backend/internal/db/computer_memory_repo.go deleted file mode 100644 index 1dc9e31..0000000 --- a/backend/internal/db/computer_memory_repo.go +++ /dev/null @@ -1,306 +0,0 @@ -package db - -import ( - "context" - "database/sql" - "encoding/json" - "strings" - "time" - - "github.com/gooseek/backend/internal/computer" -) - -type ComputerMemoryRepo struct { - db *sql.DB -} - -func NewComputerMemoryRepo(db *sql.DB) *ComputerMemoryRepo { - return &ComputerMemoryRepo{db: db} -} - -func (r *ComputerMemoryRepo) Migrate() error { - query := ` - CREATE TABLE IF NOT EXISTS computer_memory ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - user_id UUID NOT NULL, - task_id UUID, - key VARCHAR(255) NOT NULL, - value JSONB NOT NULL, - type VARCHAR(50), - tags TEXT[], - created_at TIMESTAMPTZ DEFAULT NOW(), - expires_at TIMESTAMPTZ - ); - - CREATE INDEX IF NOT EXISTS idx_computer_memory_user_id ON computer_memory(user_id); - CREATE INDEX IF NOT EXISTS idx_computer_memory_task_id ON computer_memory(task_id); - CREATE INDEX IF NOT EXISTS idx_computer_memory_type ON computer_memory(type); - CREATE INDEX IF NOT EXISTS idx_computer_memory_expires ON computer_memory(expires_at) WHERE expires_at IS NOT NULL; - CREATE INDEX IF NOT EXISTS idx_computer_memory_key ON computer_memory(key); - ` - - _, err := r.db.Exec(query) - return err -} - -func (r *ComputerMemoryRepo) Store(ctx context.Context, entry *computer.MemoryEntry) error { - valueJSON, err := json.Marshal(entry.Value) - if err != nil { - return err - } - - query := ` - INSERT INTO computer_memory (id, user_id, task_id, key, value, type, tags, created_at, expires_at) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - ON CONFLICT (id) DO UPDATE SET - value = EXCLUDED.value, - type = EXCLUDED.type, - tags = EXCLUDED.tags, - expires_at = EXCLUDED.expires_at - ` - - var taskID interface{} - if entry.TaskID != "" { - taskID = entry.TaskID - } - - _, err = r.db.ExecContext(ctx, query, - entry.ID, - entry.UserID, - taskID, - entry.Key, - valueJSON, - entry.Type, - entry.Tags, - entry.CreatedAt, - entry.ExpiresAt, - ) - - return err -} - -func (r *ComputerMemoryRepo) GetByUser(ctx context.Context, userID string, limit int) ([]computer.MemoryEntry, error) { - query := ` - SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at - FROM computer_memory - WHERE user_id = $1 - AND (expires_at IS NULL OR expires_at > NOW()) - ORDER BY created_at DESC - LIMIT $2 - ` - - rows, err := r.db.QueryContext(ctx, query, userID, limit) - if err != nil { - return nil, err - } - defer rows.Close() - - return r.scanEntries(rows) -} - -func (r *ComputerMemoryRepo) GetByTask(ctx context.Context, taskID string) ([]computer.MemoryEntry, error) { - query := ` - SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at - FROM computer_memory - WHERE task_id = $1 - AND (expires_at IS NULL OR expires_at > NOW()) - ORDER BY created_at ASC - ` - - rows, err := r.db.QueryContext(ctx, query, taskID) - if err != nil { - return nil, err - } - defer rows.Close() - - return r.scanEntries(rows) -} - -func (r *ComputerMemoryRepo) Search(ctx context.Context, userID, query string, limit int) ([]computer.MemoryEntry, error) { - searchTerms := strings.Fields(strings.ToLower(query)) - if len(searchTerms) == 0 { - return r.GetByUser(ctx, userID, limit) - } - - likePatterns := make([]string, len(searchTerms)) - args := make([]interface{}, len(searchTerms)+2) - args[0] = userID - - for i, term := range searchTerms { - likePatterns[i] = "%" + term + "%" - args[i+1] = likePatterns[i] - } - args[len(args)-1] = limit - - var conditions []string - for i := range searchTerms { - conditions = append(conditions, "(LOWER(key) LIKE $"+string(rune('2'+i))+" OR LOWER(value::text) LIKE $"+string(rune('2'+i))+")") - } - - sqlQuery := ` - SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at - FROM computer_memory - WHERE user_id = $1 - AND (expires_at IS NULL OR expires_at > NOW()) - AND (` + strings.Join(conditions, " OR ") + `) - ORDER BY created_at DESC - LIMIT $` + string(rune('2'+len(searchTerms))) - - rows, err := r.db.QueryContext(ctx, sqlQuery, args...) - if err != nil { - return r.GetByUser(ctx, userID, limit) - } - defer rows.Close() - - return r.scanEntries(rows) -} - -func (r *ComputerMemoryRepo) GetByType(ctx context.Context, userID, memType string, limit int) ([]computer.MemoryEntry, error) { - query := ` - SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at - FROM computer_memory - WHERE user_id = $1 AND type = $2 - AND (expires_at IS NULL OR expires_at > NOW()) - ORDER BY created_at DESC - LIMIT $3 - ` - - rows, err := r.db.QueryContext(ctx, query, userID, memType, limit) - if err != nil { - return nil, err - } - defer rows.Close() - - return r.scanEntries(rows) -} - -func (r *ComputerMemoryRepo) GetByKey(ctx context.Context, userID, key string) (*computer.MemoryEntry, error) { - query := ` - SELECT id, user_id, task_id, key, value, type, tags, created_at, expires_at - FROM computer_memory - WHERE user_id = $1 AND key = $2 - AND (expires_at IS NULL OR expires_at > NOW()) - ORDER BY created_at DESC - LIMIT 1 - ` - - var entry computer.MemoryEntry - var valueJSON []byte - var taskID sql.NullString - var expiresAt sql.NullTime - var tags []string - - err := r.db.QueryRowContext(ctx, query, userID, key).Scan( - &entry.ID, - &entry.UserID, - &taskID, - &entry.Key, - &valueJSON, - &entry.Type, - &tags, - &entry.CreatedAt, - &expiresAt, - ) - - if err != nil { - return nil, err - } - - if taskID.Valid { - entry.TaskID = taskID.String - } - if expiresAt.Valid { - entry.ExpiresAt = &expiresAt.Time - } - entry.Tags = tags - - json.Unmarshal(valueJSON, &entry.Value) - - return &entry, nil -} - -func (r *ComputerMemoryRepo) Delete(ctx context.Context, id string) error { - query := `DELETE FROM computer_memory WHERE id = $1` - _, err := r.db.ExecContext(ctx, query, id) - return err -} - -func (r *ComputerMemoryRepo) DeleteByUser(ctx context.Context, userID string) error { - query := `DELETE FROM computer_memory WHERE user_id = $1` - _, err := r.db.ExecContext(ctx, query, userID) - return err -} - -func (r *ComputerMemoryRepo) DeleteByTask(ctx context.Context, taskID string) error { - query := `DELETE FROM computer_memory WHERE task_id = $1` - _, err := r.db.ExecContext(ctx, query, taskID) - return err -} - -func (r *ComputerMemoryRepo) DeleteExpired(ctx context.Context) (int64, error) { - query := `DELETE FROM computer_memory WHERE expires_at IS NOT NULL AND expires_at < NOW()` - result, err := r.db.ExecContext(ctx, query) - if err != nil { - return 0, err - } - return result.RowsAffected() -} - -func (r *ComputerMemoryRepo) scanEntries(rows *sql.Rows) ([]computer.MemoryEntry, error) { - var entries []computer.MemoryEntry - - for rows.Next() { - var entry computer.MemoryEntry - var valueJSON []byte - var taskID sql.NullString - var expiresAt sql.NullTime - var tags []string - - err := rows.Scan( - &entry.ID, - &entry.UserID, - &taskID, - &entry.Key, - &valueJSON, - &entry.Type, - &tags, - &entry.CreatedAt, - &expiresAt, - ) - if err != nil { - continue - } - - if taskID.Valid { - entry.TaskID = taskID.String - } - if expiresAt.Valid { - entry.ExpiresAt = &expiresAt.Time - } - entry.Tags = tags - - json.Unmarshal(valueJSON, &entry.Value) - - entries = append(entries, entry) - } - - return entries, nil -} - -func (r *ComputerMemoryRepo) Count(ctx context.Context, userID string) (int64, error) { - query := ` - SELECT COUNT(*) - FROM computer_memory - WHERE user_id = $1 - AND (expires_at IS NULL OR expires_at > NOW()) - ` - var count int64 - err := r.db.QueryRowContext(ctx, query, userID).Scan(&count) - return count, err -} - -func (r *ComputerMemoryRepo) UpdateExpiry(ctx context.Context, id string, expiresAt time.Time) error { - query := `UPDATE computer_memory SET expires_at = $1 WHERE id = $2` - _, err := r.db.ExecContext(ctx, query, expiresAt, id) - return err -} diff --git a/backend/internal/db/computer_task_repo.go b/backend/internal/db/computer_task_repo.go deleted file mode 100644 index d1dbb2c..0000000 --- a/backend/internal/db/computer_task_repo.go +++ /dev/null @@ -1,411 +0,0 @@ -package db - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "time" - - "github.com/gooseek/backend/internal/computer" -) - -type ComputerTaskRepo struct { - db *sql.DB -} - -func NewComputerTaskRepo(db *sql.DB) *ComputerTaskRepo { - return &ComputerTaskRepo{db: db} -} - -func (r *ComputerTaskRepo) Migrate() error { - query := ` - CREATE TABLE IF NOT EXISTS computer_tasks ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - user_id UUID NOT NULL, - query TEXT NOT NULL, - status VARCHAR(20) NOT NULL DEFAULT 'pending', - plan JSONB, - sub_tasks JSONB, - artifacts JSONB, - memory JSONB, - progress INT DEFAULT 0, - message TEXT, - error TEXT, - schedule JSONB, - next_run_at TIMESTAMPTZ, - run_count INT DEFAULT 0, - total_cost DECIMAL(10,6) DEFAULT 0, - created_at TIMESTAMPTZ DEFAULT NOW(), - updated_at TIMESTAMPTZ DEFAULT NOW(), - completed_at TIMESTAMPTZ - ); - - CREATE INDEX IF NOT EXISTS idx_computer_tasks_user_id ON computer_tasks(user_id); - CREATE INDEX IF NOT EXISTS idx_computer_tasks_status ON computer_tasks(status); - CREATE INDEX IF NOT EXISTS idx_computer_tasks_next_run ON computer_tasks(next_run_at) WHERE next_run_at IS NOT NULL; - CREATE INDEX IF NOT EXISTS idx_computer_tasks_created ON computer_tasks(created_at DESC); - ` - - _, err := r.db.Exec(query) - return err -} - -func (r *ComputerTaskRepo) Create(ctx context.Context, task *computer.ComputerTask) error { - planJSON, _ := json.Marshal(task.Plan) - subTasksJSON, _ := json.Marshal(task.SubTasks) - artifactsJSON, _ := json.Marshal(task.Artifacts) - memoryJSON, _ := json.Marshal(task.Memory) - scheduleJSON, _ := json.Marshal(task.Schedule) - - query := ` - INSERT INTO computer_tasks ( - id, user_id, query, status, plan, sub_tasks, artifacts, memory, - progress, message, error, schedule, next_run_at, run_count, total_cost, - created_at, updated_at, completed_at - ) VALUES ( - $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18 - ) - ` - - _, err := r.db.ExecContext(ctx, query, - task.ID, - task.UserID, - task.Query, - task.Status, - planJSON, - subTasksJSON, - artifactsJSON, - memoryJSON, - task.Progress, - task.Message, - task.Error, - scheduleJSON, - task.NextRunAt, - task.RunCount, - task.TotalCost, - task.CreatedAt, - task.UpdatedAt, - task.CompletedAt, - ) - - return err -} - -func (r *ComputerTaskRepo) Update(ctx context.Context, task *computer.ComputerTask) error { - planJSON, _ := json.Marshal(task.Plan) - subTasksJSON, _ := json.Marshal(task.SubTasks) - artifactsJSON, _ := json.Marshal(task.Artifacts) - memoryJSON, _ := json.Marshal(task.Memory) - scheduleJSON, _ := json.Marshal(task.Schedule) - - query := ` - UPDATE computer_tasks SET - status = $1, - plan = $2, - sub_tasks = $3, - artifacts = $4, - memory = $5, - progress = $6, - message = $7, - error = $8, - schedule = $9, - next_run_at = $10, - run_count = $11, - total_cost = $12, - updated_at = $13, - completed_at = $14 - WHERE id = $15 - ` - - _, err := r.db.ExecContext(ctx, query, - task.Status, - planJSON, - subTasksJSON, - artifactsJSON, - memoryJSON, - task.Progress, - task.Message, - task.Error, - scheduleJSON, - task.NextRunAt, - task.RunCount, - task.TotalCost, - time.Now(), - task.CompletedAt, - task.ID, - ) - - return err -} - -func (r *ComputerTaskRepo) GetByID(ctx context.Context, id string) (*computer.ComputerTask, error) { - query := ` - SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, - progress, message, error, schedule, next_run_at, run_count, total_cost, - created_at, updated_at, completed_at - FROM computer_tasks - WHERE id = $1 - ` - - var task computer.ComputerTask - var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte - var message, errStr sql.NullString - var nextRunAt, completedAt sql.NullTime - - err := r.db.QueryRowContext(ctx, query, id).Scan( - &task.ID, - &task.UserID, - &task.Query, - &task.Status, - &planJSON, - &subTasksJSON, - &artifactsJSON, - &memoryJSON, - &task.Progress, - &message, - &errStr, - &scheduleJSON, - &nextRunAt, - &task.RunCount, - &task.TotalCost, - &task.CreatedAt, - &task.UpdatedAt, - &completedAt, - ) - - if err != nil { - return nil, err - } - - if len(planJSON) > 0 { - json.Unmarshal(planJSON, &task.Plan) - } - if len(subTasksJSON) > 0 { - json.Unmarshal(subTasksJSON, &task.SubTasks) - } - if len(artifactsJSON) > 0 { - json.Unmarshal(artifactsJSON, &task.Artifacts) - } - if len(memoryJSON) > 0 { - json.Unmarshal(memoryJSON, &task.Memory) - } - if len(scheduleJSON) > 0 { - json.Unmarshal(scheduleJSON, &task.Schedule) - } - - if message.Valid { - task.Message = message.String - } - if errStr.Valid { - task.Error = errStr.String - } - if nextRunAt.Valid { - task.NextRunAt = &nextRunAt.Time - } - if completedAt.Valid { - task.CompletedAt = &completedAt.Time - } - - return &task, nil -} - -func (r *ComputerTaskRepo) GetByUserID(ctx context.Context, userID string, limit, offset int) ([]computer.ComputerTask, error) { - query := ` - SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, - progress, message, error, schedule, next_run_at, run_count, total_cost, - created_at, updated_at, completed_at - FROM computer_tasks - WHERE user_id = $1 - ORDER BY created_at DESC - LIMIT $2 OFFSET $3 - ` - - rows, err := r.db.QueryContext(ctx, query, userID, limit, offset) - if err != nil { - return nil, err - } - defer rows.Close() - - var tasks []computer.ComputerTask - - for rows.Next() { - var task computer.ComputerTask - var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte - var message, errStr sql.NullString - var nextRunAt, completedAt sql.NullTime - - err := rows.Scan( - &task.ID, - &task.UserID, - &task.Query, - &task.Status, - &planJSON, - &subTasksJSON, - &artifactsJSON, - &memoryJSON, - &task.Progress, - &message, - &errStr, - &scheduleJSON, - &nextRunAt, - &task.RunCount, - &task.TotalCost, - &task.CreatedAt, - &task.UpdatedAt, - &completedAt, - ) - if err != nil { - continue - } - - if len(planJSON) > 0 { - json.Unmarshal(planJSON, &task.Plan) - } - if len(subTasksJSON) > 0 { - json.Unmarshal(subTasksJSON, &task.SubTasks) - } - if len(artifactsJSON) > 0 { - json.Unmarshal(artifactsJSON, &task.Artifacts) - } - if len(memoryJSON) > 0 { - json.Unmarshal(memoryJSON, &task.Memory) - } - if len(scheduleJSON) > 0 { - json.Unmarshal(scheduleJSON, &task.Schedule) - } - - if message.Valid { - task.Message = message.String - } - if errStr.Valid { - task.Error = errStr.String - } - if nextRunAt.Valid { - task.NextRunAt = &nextRunAt.Time - } - if completedAt.Valid { - task.CompletedAt = &completedAt.Time - } - - tasks = append(tasks, task) - } - - return tasks, nil -} - -func (r *ComputerTaskRepo) GetScheduled(ctx context.Context) ([]computer.ComputerTask, error) { - query := ` - SELECT id, user_id, query, status, plan, sub_tasks, artifacts, memory, - progress, message, error, schedule, next_run_at, run_count, total_cost, - created_at, updated_at, completed_at - FROM computer_tasks - WHERE status = 'scheduled' AND schedule IS NOT NULL - ORDER BY next_run_at ASC - ` - - rows, err := r.db.QueryContext(ctx, query) - if err != nil { - return nil, err - } - defer rows.Close() - - var tasks []computer.ComputerTask - - for rows.Next() { - var task computer.ComputerTask - var planJSON, subTasksJSON, artifactsJSON, memoryJSON, scheduleJSON []byte - var message, errStr sql.NullString - var nextRunAt, completedAt sql.NullTime - - err := rows.Scan( - &task.ID, - &task.UserID, - &task.Query, - &task.Status, - &planJSON, - &subTasksJSON, - &artifactsJSON, - &memoryJSON, - &task.Progress, - &message, - &errStr, - &scheduleJSON, - &nextRunAt, - &task.RunCount, - &task.TotalCost, - &task.CreatedAt, - &task.UpdatedAt, - &completedAt, - ) - if err != nil { - continue - } - - if len(planJSON) > 0 { - json.Unmarshal(planJSON, &task.Plan) - } - if len(subTasksJSON) > 0 { - json.Unmarshal(subTasksJSON, &task.SubTasks) - } - if len(artifactsJSON) > 0 { - json.Unmarshal(artifactsJSON, &task.Artifacts) - } - if len(memoryJSON) > 0 { - json.Unmarshal(memoryJSON, &task.Memory) - } - if len(scheduleJSON) > 0 { - json.Unmarshal(scheduleJSON, &task.Schedule) - } - - if message.Valid { - task.Message = message.String - } - if errStr.Valid { - task.Error = errStr.String - } - if nextRunAt.Valid { - task.NextRunAt = &nextRunAt.Time - } - if completedAt.Valid { - task.CompletedAt = &completedAt.Time - } - - tasks = append(tasks, task) - } - - return tasks, nil -} - -func (r *ComputerTaskRepo) Delete(ctx context.Context, id string) error { - query := `DELETE FROM computer_tasks WHERE id = $1` - _, err := r.db.ExecContext(ctx, query, id) - return err -} - -func (r *ComputerTaskRepo) DeleteOlderThan(ctx context.Context, days int) (int64, error) { - query := ` - DELETE FROM computer_tasks - WHERE created_at < NOW() - INTERVAL '%d days' - AND status IN ('completed', 'failed', 'cancelled') - ` - result, err := r.db.ExecContext(ctx, fmt.Sprintf(query, days)) - if err != nil { - return 0, err - } - return result.RowsAffected() -} - -func (r *ComputerTaskRepo) CountByUser(ctx context.Context, userID string) (int64, error) { - query := `SELECT COUNT(*) FROM computer_tasks WHERE user_id = $1` - var count int64 - err := r.db.QueryRowContext(ctx, query, userID).Scan(&count) - return count, err -} - -func (r *ComputerTaskRepo) CountByStatus(ctx context.Context, status string) (int64, error) { - query := `SELECT COUNT(*) FROM computer_tasks WHERE status = $1` - var count int64 - err := r.db.QueryRowContext(ctx, query, status).Scan(&count) - return count, err -} diff --git a/backend/internal/db/file_repo.go b/backend/internal/db/file_repo.go index 326b3d7..1d521d3 100644 --- a/backend/internal/db/file_repo.go +++ b/backend/internal/db/file_repo.go @@ -99,20 +99,34 @@ func (r *FileRepository) GetByUserID(ctx context.Context, userID string, limit, return files, nil } -func (r *FileRepository) UpdateExtractedText(ctx context.Context, id, text string) error { - _, err := r.db.db.ExecContext(ctx, - "UPDATE uploaded_files SET extracted_text = $2 WHERE id = $1", - id, text, +func (r *FileRepository) UpdateExtractedText(ctx context.Context, id, text, userID string) error { + result, err := r.db.db.ExecContext(ctx, + "UPDATE uploaded_files SET extracted_text = $2 WHERE id = $1 AND user_id = $3", + id, text, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *FileRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM uploaded_files WHERE id = $1", id) - return err +func (r *FileRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM uploaded_files WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *FileRepository) GetByIDs(ctx context.Context, ids []string) ([]*UploadedFile, error) { +func (r *FileRepository) GetByIDs(ctx context.Context, ids []string, userID string) ([]*UploadedFile, error) { if len(ids) == 0 { return nil, nil } @@ -120,10 +134,10 @@ func (r *FileRepository) GetByIDs(ctx context.Context, ids []string) ([]*Uploade query := ` SELECT id, user_id, filename, file_type, file_size, storage_path, extracted_text, metadata, created_at FROM uploaded_files - WHERE id = ANY($1) + WHERE id = ANY($1) AND user_id = $2 ` - rows, err := r.db.db.QueryContext(ctx, query, ids) + rows, err := r.db.db.QueryContext(ctx, query, ids, userID) if err != nil { return nil, err } diff --git a/backend/internal/db/memory_repo.go b/backend/internal/db/memory_repo.go index 6640b83..52e3afc 100644 --- a/backend/internal/db/memory_repo.go +++ b/backend/internal/db/memory_repo.go @@ -147,17 +147,31 @@ func (r *MemoryRepository) GetContextForUser(ctx context.Context, userID string) return context, nil } -func (r *MemoryRepository) IncrementUseCount(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, - "UPDATE user_memories SET use_count = use_count + 1, last_used = NOW() WHERE id = $1", - id, +func (r *MemoryRepository) IncrementUseCount(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, + "UPDATE user_memories SET use_count = use_count + 1, last_used = NOW() WHERE id = $1 AND user_id = $2", + id, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *MemoryRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM user_memories WHERE id = $1", id) - return err +func (r *MemoryRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM user_memories WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } func (r *MemoryRepository) DeleteByUserID(ctx context.Context, userID string) error { diff --git a/backend/internal/db/page_repo.go b/backend/internal/db/page_repo.go index c4c5b7a..b8c51c4 100644 --- a/backend/internal/db/page_repo.go +++ b/backend/internal/db/page_repo.go @@ -182,27 +182,41 @@ func (r *PageRepository) GetByUserID(ctx context.Context, userID string, limit, return pagesList, nil } -func (r *PageRepository) Update(ctx context.Context, p *pages.Page) error { +func (r *PageRepository) Update(ctx context.Context, p *pages.Page, userID string) error { sectionsJSON, _ := json.Marshal(p.Sections) sourcesJSON, _ := json.Marshal(p.Sources) query := ` UPDATE pages SET title = $2, subtitle = $3, sections = $4, sources = $5, thumbnail = $6, is_public = $7, updated_at = NOW() - WHERE id = $1 + WHERE id = $1 AND user_id = $8 ` - _, err := r.db.db.ExecContext(ctx, query, - p.ID, p.Title, p.Subtitle, sectionsJSON, sourcesJSON, p.Thumbnail, p.IsPublic, + result, err := r.db.db.ExecContext(ctx, query, + p.ID, p.Title, p.Subtitle, sectionsJSON, sourcesJSON, p.Thumbnail, p.IsPublic, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *PageRepository) SetShareID(ctx context.Context, pageID, shareID string) error { - _, err := r.db.db.ExecContext(ctx, - "UPDATE pages SET share_id = $2, is_public = true WHERE id = $1", - pageID, shareID, +func (r *PageRepository) SetShareID(ctx context.Context, pageID, shareID, userID string) error { + result, err := r.db.db.ExecContext(ctx, + "UPDATE pages SET share_id = $2, is_public = true WHERE id = $1 AND user_id = $3", + pageID, shareID, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } func (r *PageRepository) IncrementViewCount(ctx context.Context, id string) error { @@ -213,7 +227,14 @@ func (r *PageRepository) IncrementViewCount(ctx context.Context, id string) erro return err } -func (r *PageRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM pages WHERE id = $1", id) - return err +func (r *PageRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM pages WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } diff --git a/backend/internal/db/space_repo.go b/backend/internal/db/space_repo.go index 0d20bb4..fba4999 100644 --- a/backend/internal/db/space_repo.go +++ b/backend/internal/db/space_repo.go @@ -139,7 +139,7 @@ func (r *SpaceRepository) GetByUserID(ctx context.Context, userID string) ([]*Sp return spaces, nil } -func (r *SpaceRepository) Update(ctx context.Context, s *Space) error { +func (r *SpaceRepository) Update(ctx context.Context, s *Space, userID string) error { settingsJSON, _ := json.Marshal(s.Settings) query := ` @@ -147,17 +147,31 @@ func (r *SpaceRepository) Update(ctx context.Context, s *Space) error { SET name = $2, description = $3, icon = $4, color = $5, custom_instructions = $6, default_focus_mode = $7, default_model = $8, is_public = $9, settings = $10, updated_at = NOW() - WHERE id = $1 + WHERE id = $1 AND user_id = $11 ` - _, err := r.db.db.ExecContext(ctx, query, + result, err := r.db.db.ExecContext(ctx, query, s.ID, s.Name, s.Description, s.Icon, s.Color, s.CustomInstructions, s.DefaultFocusMode, s.DefaultModel, - s.IsPublic, settingsJSON, + s.IsPublic, settingsJSON, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *SpaceRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM spaces WHERE id = $1", id) - return err +func (r *SpaceRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM spaces WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } diff --git a/backend/internal/db/thread_repo.go b/backend/internal/db/thread_repo.go index 0d39c13..8c4c837 100644 --- a/backend/internal/db/thread_repo.go +++ b/backend/internal/db/thread_repo.go @@ -4,9 +4,15 @@ import ( "context" "database/sql" "encoding/json" + "errors" "time" ) +var ( + ErrNotFound = errors.New("resource not found") + ErrForbidden = errors.New("access denied") +) + type Thread struct { ID string `json:"id"` UserID string `json:"userId"` @@ -176,30 +182,63 @@ func (r *ThreadRepository) GetByUserID(ctx context.Context, userID string, limit return threads, nil } -func (r *ThreadRepository) Update(ctx context.Context, t *Thread) error { +func (r *ThreadRepository) Update(ctx context.Context, t *Thread, userID string) error { query := ` UPDATE threads SET title = $2, focus_mode = $3, is_public = $4, updated_at = NOW() - WHERE id = $1 + WHERE id = $1 AND user_id = $5 ` - _, err := r.db.db.ExecContext(ctx, query, t.ID, t.Title, t.FocusMode, t.IsPublic) - return err + result, err := r.db.db.ExecContext(ctx, query, t.ID, t.Title, t.FocusMode, t.IsPublic, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *ThreadRepository) SetShareID(ctx context.Context, threadID, shareID string) error { - _, err := r.db.db.ExecContext(ctx, - "UPDATE threads SET share_id = $2, is_public = true WHERE id = $1", - threadID, shareID, +func (r *ThreadRepository) SetShareID(ctx context.Context, threadID, shareID, userID string) error { + result, err := r.db.db.ExecContext(ctx, + "UPDATE threads SET share_id = $2, is_public = true WHERE id = $1 AND user_id = $3", + threadID, shareID, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *ThreadRepository) Delete(ctx context.Context, id string) error { - _, err := r.db.db.ExecContext(ctx, "DELETE FROM threads WHERE id = $1", id) - return err +func (r *ThreadRepository) Delete(ctx context.Context, id, userID string) error { + result, err := r.db.db.ExecContext(ctx, "DELETE FROM threads WHERE id = $1 AND user_id = $2", id, userID) + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } -func (r *ThreadRepository) AddMessage(ctx context.Context, msg *ThreadMessage) error { +func (r *ThreadRepository) AddMessage(ctx context.Context, msg *ThreadMessage, userID string) error { + var ownerID string + err := r.db.db.QueryRowContext(ctx, "SELECT user_id FROM threads WHERE id = $1", msg.ThreadID).Scan(&ownerID) + if err == sql.ErrNoRows { + return ErrNotFound + } + if err != nil { + return err + } + if ownerID != userID { + return ErrForbidden + } + sourcesJSON, _ := json.Marshal(msg.Sources) widgetsJSON, _ := json.Marshal(msg.Widgets) relatedJSON, _ := json.Marshal(msg.RelatedQuestions) @@ -209,7 +248,7 @@ func (r *ThreadRepository) AddMessage(ctx context.Context, msg *ThreadMessage) e VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id, created_at ` - err := r.db.db.QueryRowContext(ctx, query, + err = r.db.db.QueryRowContext(ctx, query, msg.ThreadID, msg.Role, msg.Content, sourcesJSON, widgetsJSON, relatedJSON, msg.Model, msg.TokensUsed, ).Scan(&msg.ID, &msg.CreatedAt) @@ -220,7 +259,20 @@ func (r *ThreadRepository) AddMessage(ctx context.Context, msg *ThreadMessage) e return err } -func (r *ThreadRepository) GetMessages(ctx context.Context, threadID string, limit, offset int) ([]ThreadMessage, error) { +func (r *ThreadRepository) GetMessages(ctx context.Context, threadID, userID string, limit, offset int) ([]ThreadMessage, error) { + var ownerID string + var isPublic bool + err := r.db.db.QueryRowContext(ctx, "SELECT user_id, is_public FROM threads WHERE id = $1", threadID).Scan(&ownerID, &isPublic) + if err == sql.ErrNoRows { + return nil, ErrNotFound + } + if err != nil { + return nil, err + } + if ownerID != userID && !isPublic { + return nil, ErrForbidden + } + query := ` SELECT id, thread_id, role, content, sources, widgets, related_questions, model, tokens_used, created_at FROM thread_messages @@ -257,14 +309,21 @@ func (r *ThreadRepository) GetMessages(ctx context.Context, threadID string, lim return messages, nil } -func (r *ThreadRepository) GenerateTitle(ctx context.Context, threadID, firstMessage string) error { +func (r *ThreadRepository) GenerateTitle(ctx context.Context, threadID, firstMessage, userID string) error { title := firstMessage if len(title) > 100 { title = title[:97] + "..." } - _, err := r.db.db.ExecContext(ctx, - "UPDATE threads SET title = $2 WHERE id = $1", - threadID, title, + result, err := r.db.db.ExecContext(ctx, + "UPDATE threads SET title = $2 WHERE id = $1 AND user_id = $3", + threadID, title, userID, ) - return err + if err != nil { + return err + } + rows, _ := result.RowsAffected() + if rows == 0 { + return ErrNotFound + } + return nil } diff --git a/backend/internal/usage/repository.go b/backend/internal/usage/repository.go new file mode 100644 index 0000000..e513d29 --- /dev/null +++ b/backend/internal/usage/repository.go @@ -0,0 +1,177 @@ +package usage + +import ( + "context" + "database/sql" + "time" +) + +type Repository struct { + db *sql.DB +} + +func NewRepository(db *sql.DB) *Repository { + return &Repository{db: db} +} + +func (r *Repository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS usage_metrics ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + date DATE NOT NULL, + tier VARCHAR(50) NOT NULL DEFAULT 'free', + api_requests INT DEFAULT 0, + llm_requests INT DEFAULT 0, + llm_tokens INT DEFAULT 0, + search_requests INT DEFAULT 0, + storage_used BIGINT DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(user_id, date) + )`, + `CREATE INDEX IF NOT EXISTS idx_usage_user_date ON usage_metrics(user_id, date DESC)`, + `CREATE INDEX IF NOT EXISTS idx_usage_date ON usage_metrics(date)`, + } + + for _, m := range migrations { + if _, err := r.db.ExecContext(ctx, m); err != nil { + return err + } + } + return nil +} + +func (r *Repository) GetTodayUsage(ctx context.Context, userID string) (*UsageMetric, error) { + today := time.Now().Format("2006-01-02") + return r.GetUsageByDate(ctx, userID, today) +} + +func (r *Repository) GetUsageByDate(ctx context.Context, userID, date string) (*UsageMetric, error) { + query := ` + SELECT id, user_id, date, tier, api_requests, llm_requests, llm_tokens, + search_requests, storage_used, created_at, updated_at + FROM usage_metrics + WHERE user_id = $1 AND date = $2 + ` + + var m UsageMetric + var dateVal time.Time + err := r.db.QueryRowContext(ctx, query, userID, date).Scan( + &m.ID, &m.UserID, &dateVal, &m.Tier, + &m.APIRequests, &m.LLMRequests, &m.LLMTokens, + &m.SearchReqs, &m.StorageUsed, &m.CreatedAt, &m.UpdatedAt, + ) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + m.Date = dateVal.Format("2006-01-02") + return &m, nil +} + +func (r *Repository) IncrementAPIRequests(ctx context.Context, userID, tier string) error { + today := time.Now().Format("2006-01-02") + query := ` + INSERT INTO usage_metrics (user_id, date, tier, api_requests) + VALUES ($1, $2, $3, 1) + ON CONFLICT (user_id, date) + DO UPDATE SET api_requests = usage_metrics.api_requests + 1, updated_at = NOW() + ` + _, err := r.db.ExecContext(ctx, query, userID, today, tier) + return err +} + +func (r *Repository) IncrementLLMUsage(ctx context.Context, userID, tier string, tokens int) error { + today := time.Now().Format("2006-01-02") + query := ` + INSERT INTO usage_metrics (user_id, date, tier, llm_requests, llm_tokens) + VALUES ($1, $2, $3, 1, $4) + ON CONFLICT (user_id, date) + DO UPDATE SET + llm_requests = usage_metrics.llm_requests + 1, + llm_tokens = usage_metrics.llm_tokens + $4, + updated_at = NOW() + ` + _, err := r.db.ExecContext(ctx, query, userID, today, tier, tokens) + return err +} + +func (r *Repository) IncrementSearchRequests(ctx context.Context, userID, tier string) error { + today := time.Now().Format("2006-01-02") + query := ` + INSERT INTO usage_metrics (user_id, date, tier, search_requests) + VALUES ($1, $2, $3, 1) + ON CONFLICT (user_id, date) + DO UPDATE SET search_requests = usage_metrics.search_requests + 1, updated_at = NOW() + ` + _, err := r.db.ExecContext(ctx, query, userID, today, tier) + return err +} + +func (r *Repository) UpdateStorageUsed(ctx context.Context, userID, tier string, bytes int64) error { + today := time.Now().Format("2006-01-02") + query := ` + INSERT INTO usage_metrics (user_id, date, tier, storage_used) + VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id, date) + DO UPDATE SET storage_used = $4, updated_at = NOW() + ` + _, err := r.db.ExecContext(ctx, query, userID, today, tier, bytes) + return err +} + +func (r *Repository) GetUsageHistory(ctx context.Context, userID string, days int) ([]*UsageMetric, error) { + query := ` + SELECT id, user_id, date, tier, api_requests, llm_requests, llm_tokens, + search_requests, storage_used, created_at, updated_at + FROM usage_metrics + WHERE user_id = $1 AND date >= CURRENT_DATE - $2::int + ORDER BY date DESC + ` + + rows, err := r.db.QueryContext(ctx, query, userID, days) + if err != nil { + return nil, err + } + defer rows.Close() + + var metrics []*UsageMetric + for rows.Next() { + var m UsageMetric + var dateVal time.Time + if err := rows.Scan( + &m.ID, &m.UserID, &dateVal, &m.Tier, + &m.APIRequests, &m.LLMRequests, &m.LLMTokens, + &m.SearchReqs, &m.StorageUsed, &m.CreatedAt, &m.UpdatedAt, + ); err != nil { + return nil, err + } + m.Date = dateVal.Format("2006-01-02") + metrics = append(metrics, &m) + } + + return metrics, nil +} + +func (r *Repository) CheckLLMLimits(ctx context.Context, userID, tier string) (bool, string) { + usage, err := r.GetTodayUsage(ctx, userID) + if err != nil { + return true, "" + } + + limits := GetLimits(tier) + + if usage != nil { + if usage.LLMRequests >= limits.LLMRequestsPerDay { + return false, "Daily LLM request limit exceeded" + } + if usage.LLMTokens >= limits.LLMTokensPerDay { + return false, "Daily LLM token limit exceeded" + } + } + + return true, "" +} diff --git a/backend/internal/usage/types.go b/backend/internal/usage/types.go new file mode 100644 index 0000000..791aa7f --- /dev/null +++ b/backend/internal/usage/types.go @@ -0,0 +1,56 @@ +package usage + +import "time" + +type UsageMetric struct { + ID string `json:"id"` + UserID string `json:"userId"` + Date string `json:"date"` + Tier string `json:"tier"` + APIRequests int `json:"apiRequests"` + LLMRequests int `json:"llmRequests"` + LLMTokens int `json:"llmTokens"` + SearchReqs int `json:"searchRequests"` + StorageUsed int64 `json:"storageUsed"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type TierLimits struct { + APIRequestsPerDay int `json:"apiRequestsPerDay"` + LLMRequestsPerDay int `json:"llmRequestsPerDay"` + LLMTokensPerDay int `json:"llmTokensPerDay"` + MaxTokensPerReq int `json:"maxTokensPerRequest"` + StorageLimitBytes int64 `json:"storageLimitBytes"` +} + +var DefaultTierLimits = map[string]TierLimits{ + "free": { + APIRequestsPerDay: 1000, + LLMRequestsPerDay: 50, + LLMTokensPerDay: 100000, + MaxTokensPerReq: 2000, + StorageLimitBytes: 100 * 1024 * 1024, + }, + "pro": { + APIRequestsPerDay: 10000, + LLMRequestsPerDay: 500, + LLMTokensPerDay: 2000000, + MaxTokensPerReq: 8000, + StorageLimitBytes: 1024 * 1024 * 1024, + }, + "business": { + APIRequestsPerDay: 100000, + LLMRequestsPerDay: 5000, + LLMTokensPerDay: 20000000, + MaxTokensPerReq: 32000, + StorageLimitBytes: 10 * 1024 * 1024 * 1024, + }, +} + +func GetLimits(tier string) TierLimits { + if limits, ok := DefaultTierLimits[tier]; ok { + return limits + } + return DefaultTierLimits["free"] +} diff --git a/backend/pkg/config/config.go b/backend/pkg/config/config.go index 2bda6d6..7a44148 100644 --- a/backend/pkg/config/config.go +++ b/backend/pkg/config/config.go @@ -20,6 +20,7 @@ type Config struct { SearchSvcPort int LLMSvcPort int ScraperSvcPort int + AdminSvcPort int // Service URLs ChatSvcURL string @@ -29,6 +30,7 @@ type Config struct { ScraperSvcURL string MemorySvcURL string LibrarySvcURL string + AdminSvcURL string // External services SearXNGURL string @@ -44,6 +46,13 @@ type Config struct { FinanceHeatmapURL string LearningSvcURL string + // MinIO / S3 storage + MinioEndpoint string + MinioAccessKey string + MinioSecretKey string + MinioBucket string + MinioUseSSL bool + // Auth JWTSecret string AuthSvcURL string @@ -88,6 +97,7 @@ func Load() (*Config, error) { SearchSvcPort: getEnvInt("SEARCH_SVC_PORT", 3001), LLMSvcPort: getEnvInt("LLM_SVC_PORT", 3020), ScraperSvcPort: getEnvInt("SCRAPER_SVC_PORT", 3021), + AdminSvcPort: getEnvInt("ADMIN_SVC_PORT", 3040), ChatSvcURL: getEnv("CHAT_SVC_URL", "http://localhost:3005"), AgentSvcURL: getEnv("MASTER_AGENTS_SVC_URL", "http://localhost:3018"), @@ -96,6 +106,7 @@ func Load() (*Config, error) { ScraperSvcURL: getEnv("SCRAPER_SVC_URL", "http://localhost:3021"), MemorySvcURL: getEnv("MEMORY_SVC_URL", ""), LibrarySvcURL: getEnv("LIBRARY_SVC_URL", "http://localhost:3009"), + AdminSvcURL: getEnv("ADMIN_SVC_URL", "http://localhost:3040"), SearXNGURL: getEnv("SEARXNG_URL", "http://searxng:8080"), SearXNGFallbackURL: strings.Split(getEnv("SEARXNG_FALLBACK_URL", ""), ","), @@ -110,6 +121,12 @@ func Load() (*Config, error) { FinanceHeatmapURL: getEnv("FINANCE_HEATMAP_SVC_URL", "http://localhost:3033"), LearningSvcURL: getEnv("LEARNING_SVC_URL", "http://localhost:3034"), + MinioEndpoint: getEnv("MINIO_ENDPOINT", "minio:9000"), + MinioAccessKey: getEnv("MINIO_ACCESS_KEY", "minioadmin"), + MinioSecretKey: getEnv("MINIO_SECRET_KEY", "minioadmin"), + MinioBucket: getEnv("MINIO_BUCKET", "gooseek"), + MinioUseSSL: getEnv("MINIO_USE_SSL", "false") == "true", + JWTSecret: getEnv("JWT_SECRET", ""), AuthSvcURL: getEnv("AUTH_SVC_URL", ""), @@ -158,6 +175,10 @@ func getEnvInt(key string, defaultValue int) int { return defaultValue } +func GetEnvInt(key string, defaultValue int) int { + return getEnvInt(key, defaultValue) +} + func parseOrigins(s string) []string { if s == "*" { return []string{"*"} diff --git a/backend/pkg/middleware/llm_limits.go b/backend/pkg/middleware/llm_limits.go new file mode 100644 index 0000000..88c79b1 --- /dev/null +++ b/backend/pkg/middleware/llm_limits.go @@ -0,0 +1,106 @@ +package middleware + +import ( + "context" + "database/sql" + + "github.com/gofiber/fiber/v2" + "github.com/gooseek/backend/internal/usage" +) + +type LLMLimitsConfig struct { + UsageRepo *usage.Repository +} + +func LLMLimits(config LLMLimitsConfig) fiber.Handler { + return func(c *fiber.Ctx) error { + userID := GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{ + "error": "Authentication required", + }) + } + + tier := GetUserTier(c) + if tier == "" { + tier = "free" + } + + if config.UsageRepo != nil { + allowed, reason := config.UsageRepo.CheckLLMLimits(c.Context(), userID, tier) + if !allowed { + limits := usage.GetLimits(tier) + return c.Status(429).JSON(fiber.Map{ + "error": reason, + "tier": tier, + "dailyLimit": limits.LLMRequestsPerDay, + "tokenLimit": limits.LLMTokensPerDay, + "upgradeUrl": "/settings/billing", + }) + } + } + + return c.Next() + } +} + +type UsageTracker struct { + repo *usage.Repository +} + +func NewUsageTracker(db *sql.DB) *UsageTracker { + return &UsageTracker{ + repo: usage.NewRepository(db), + } +} + +func (t *UsageTracker) RunMigrations(ctx context.Context) error { + return t.repo.RunMigrations(ctx) +} + +func (t *UsageTracker) TrackAPIRequest(c *fiber.Ctx) { + userID := GetUserID(c) + if userID == "" { + return + } + tier := GetUserTier(c) + if tier == "" { + tier = "free" + } + go t.repo.IncrementAPIRequests(context.Background(), userID, tier) +} + +func (t *UsageTracker) TrackLLMRequest(ctx context.Context, userID, tier string, tokens int) { + if userID == "" { + return + } + if tier == "" { + tier = "free" + } + go t.repo.IncrementLLMUsage(ctx, userID, tier, tokens) +} + +func (t *UsageTracker) TrackSearchRequest(c *fiber.Ctx) { + userID := GetUserID(c) + if userID == "" { + return + } + tier := GetUserTier(c) + if tier == "" { + tier = "free" + } + go t.repo.IncrementSearchRequests(context.Background(), userID, tier) +} + +func (t *UsageTracker) GetRepository() *usage.Repository { + return t.repo +} + +func UsageTracking(tracker *UsageTracker) fiber.Handler { + return func(c *fiber.Ctx) error { + if tracker != nil { + tracker.TrackAPIRequest(c) + } + return c.Next() + } +} diff --git a/backend/pkg/middleware/ratelimit_tiered.go b/backend/pkg/middleware/ratelimit_tiered.go new file mode 100644 index 0000000..da008c7 --- /dev/null +++ b/backend/pkg/middleware/ratelimit_tiered.go @@ -0,0 +1,158 @@ +package middleware + +import ( + "sync" + "time" + + "github.com/gofiber/fiber/v2" +) + +type TierConfig struct { + Max int + Window time.Duration +} + +type TieredRateLimitConfig struct { + Tiers map[string]TierConfig + DefaultTier string + KeyFunc func(*fiber.Ctx) string + GetTierFunc func(*fiber.Ctx) string +} + +type tieredRateLimiter struct { + requests map[string][]time.Time + mu sync.RWMutex + tiers map[string]TierConfig +} + +func newTieredRateLimiter(tiers map[string]TierConfig) *tieredRateLimiter { + rl := &tieredRateLimiter{ + requests: make(map[string][]time.Time), + tiers: tiers, + } + go rl.cleanup() + return rl +} + +func (rl *tieredRateLimiter) cleanup() { + ticker := time.NewTicker(time.Minute) + for range ticker.C { + rl.mu.Lock() + now := time.Now() + for key, times := range rl.requests { + var valid []time.Time + for _, t := range times { + if now.Sub(t) < 5*time.Minute { + valid = append(valid, t) + } + } + if len(valid) == 0 { + delete(rl.requests, key) + } else { + rl.requests[key] = valid + } + } + rl.mu.Unlock() + } +} + +func (rl *tieredRateLimiter) allow(key string, tier string) (bool, int, int) { + rl.mu.Lock() + defer rl.mu.Unlock() + + cfg, ok := rl.tiers[tier] + if !ok { + cfg = rl.tiers["free"] + } + + now := time.Now() + windowStart := now.Add(-cfg.Window) + + times := rl.requests[key] + var valid []time.Time + for _, t := range times { + if t.After(windowStart) { + valid = append(valid, t) + } + } + + remaining := cfg.Max - len(valid) + if remaining <= 0 { + rl.requests[key] = valid + return false, 0, cfg.Max + } + + rl.requests[key] = append(valid, now) + return true, remaining - 1, cfg.Max +} + +func TieredRateLimit(config TieredRateLimitConfig) fiber.Handler { + if config.Tiers == nil { + config.Tiers = map[string]TierConfig{ + "free": {Max: 60, Window: time.Minute}, + "pro": {Max: 300, Window: time.Minute}, + "business": {Max: 1000, Window: time.Minute}, + } + } + if config.DefaultTier == "" { + config.DefaultTier = "free" + } + if config.KeyFunc == nil { + config.KeyFunc = func(c *fiber.Ctx) string { + userID := GetUserID(c) + if userID != "" { + return "user:" + userID + } + return "ip:" + c.IP() + } + } + if config.GetTierFunc == nil { + config.GetTierFunc = func(c *fiber.Ctx) string { + tier := GetUserTier(c) + if tier == "" { + return config.DefaultTier + } + return tier + } + } + + limiter := newTieredRateLimiter(config.Tiers) + + return func(c *fiber.Ctx) error { + key := config.KeyFunc(c) + tier := config.GetTierFunc(c) + + allowed, remaining, limit := limiter.allow(key, tier) + + c.Set("X-RateLimit-Limit", formatInt(limit)) + c.Set("X-RateLimit-Remaining", formatInt(remaining)) + c.Set("X-RateLimit-Tier", tier) + + if !allowed { + c.Set("Retry-After", "60") + return c.Status(429).JSON(fiber.Map{ + "error": "Rate limit exceeded", + "tier": tier, + "limit": limit, + "retryAfter": 60, + }) + } + + return c.Next() + } +} + +func formatInt(n int) string { + if n < 0 { + n = 0 + } + s := "" + if n == 0 { + return "0" + } + for n > 0 { + s = string(rune('0'+n%10)) + s + n /= 10 + } + return s +} diff --git a/backend/pkg/storage/minio.go b/backend/pkg/storage/minio.go new file mode 100644 index 0000000..bcbacb2 --- /dev/null +++ b/backend/pkg/storage/minio.go @@ -0,0 +1,228 @@ +package storage + +import ( + "context" + "fmt" + "io" + "net/url" + "path/filepath" + "strings" + "time" + + "github.com/google/uuid" + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" +) + +type MinioConfig struct { + Endpoint string + AccessKey string + SecretKey string + Bucket string + UseSSL bool +} + +type MinioStorage struct { + client *minio.Client + bucket string +} + +type UploadResult struct { + Key string `json:"key"` + Bucket string `json:"bucket"` + Size int64 `json:"size"` + ETag string `json:"etag"` + PublicURL string `json:"publicUrl,omitempty"` +} + +func NewMinioStorage(cfg MinioConfig) (*MinioStorage, error) { + client, err := minio.New(cfg.Endpoint, &minio.Options{ + Creds: credentials.NewStaticV4(cfg.AccessKey, cfg.SecretKey, ""), + Secure: cfg.UseSSL, + }) + if err != nil { + return nil, fmt.Errorf("failed to create minio client: %w", err) + } + + ctx := context.Background() + exists, err := client.BucketExists(ctx, cfg.Bucket) + if err != nil { + return nil, fmt.Errorf("failed to check bucket: %w", err) + } + + if !exists { + if err := client.MakeBucket(ctx, cfg.Bucket, minio.MakeBucketOptions{}); err != nil { + return nil, fmt.Errorf("failed to create bucket: %w", err) + } + } + + return &MinioStorage{ + client: client, + bucket: cfg.Bucket, + }, nil +} + +func (s *MinioStorage) Upload(ctx context.Context, reader io.Reader, size int64, filename, contentType string) (*UploadResult, error) { + ext := filepath.Ext(filename) + key := generateStorageKey(ext) + + opts := minio.PutObjectOptions{ + ContentType: contentType, + } + + info, err := s.client.PutObject(ctx, s.bucket, key, reader, size, opts) + if err != nil { + return nil, fmt.Errorf("failed to upload file: %w", err) + } + + return &UploadResult{ + Key: key, + Bucket: s.bucket, + Size: info.Size, + ETag: info.ETag, + }, nil +} + +func (s *MinioStorage) UploadUserFile(ctx context.Context, userID string, reader io.Reader, size int64, filename, contentType string) (*UploadResult, error) { + ext := filepath.Ext(filename) + key := fmt.Sprintf("users/%s/%s%s", userID, uuid.New().String(), ext) + + opts := minio.PutObjectOptions{ + ContentType: contentType, + UserMetadata: map[string]string{ + "original-name": filename, + "user-id": userID, + }, + } + + info, err := s.client.PutObject(ctx, s.bucket, key, reader, size, opts) + if err != nil { + return nil, fmt.Errorf("failed to upload file: %w", err) + } + + return &UploadResult{ + Key: key, + Bucket: s.bucket, + Size: info.Size, + ETag: info.ETag, + }, nil +} + +func (s *MinioStorage) Download(ctx context.Context, key string) (io.ReadCloser, *minio.ObjectInfo, error) { + obj, err := s.client.GetObject(ctx, s.bucket, key, minio.GetObjectOptions{}) + if err != nil { + return nil, nil, fmt.Errorf("failed to get object: %w", err) + } + + info, err := obj.Stat() + if err != nil { + obj.Close() + return nil, nil, fmt.Errorf("failed to stat object: %w", err) + } + + return obj, &info, nil +} + +func (s *MinioStorage) Delete(ctx context.Context, key string) error { + return s.client.RemoveObject(ctx, s.bucket, key, minio.RemoveObjectOptions{}) +} + +func (s *MinioStorage) DeleteUserFiles(ctx context.Context, userID string) error { + prefix := fmt.Sprintf("users/%s/", userID) + + objectsCh := s.client.ListObjects(ctx, s.bucket, minio.ListObjectsOptions{ + Prefix: prefix, + Recursive: true, + }) + + for obj := range objectsCh { + if obj.Err != nil { + return obj.Err + } + if err := s.client.RemoveObject(ctx, s.bucket, obj.Key, minio.RemoveObjectOptions{}); err != nil { + return err + } + } + + return nil +} + +func (s *MinioStorage) GetPresignedURL(ctx context.Context, key string, expiry time.Duration) (string, error) { + presignedURL, err := s.client.PresignedGetObject(ctx, s.bucket, key, expiry, url.Values{}) + if err != nil { + return "", fmt.Errorf("failed to generate presigned URL: %w", err) + } + return presignedURL.String(), nil +} + +func (s *MinioStorage) GetPresignedUploadURL(ctx context.Context, key string, expiry time.Duration) (string, error) { + presignedURL, err := s.client.PresignedPutObject(ctx, s.bucket, key, expiry) + if err != nil { + return "", fmt.Errorf("failed to generate presigned upload URL: %w", err) + } + return presignedURL.String(), nil +} + +func (s *MinioStorage) ListUserFiles(ctx context.Context, userID string) ([]minio.ObjectInfo, error) { + prefix := fmt.Sprintf("users/%s/", userID) + + var files []minio.ObjectInfo + objectsCh := s.client.ListObjects(ctx, s.bucket, minio.ListObjectsOptions{ + Prefix: prefix, + Recursive: true, + }) + + for obj := range objectsCh { + if obj.Err != nil { + return nil, obj.Err + } + files = append(files, obj) + } + + return files, nil +} + +func (s *MinioStorage) GetUserStorageUsage(ctx context.Context, userID string) (int64, error) { + files, err := s.ListUserFiles(ctx, userID) + if err != nil { + return 0, err + } + + var total int64 + for _, f := range files { + total += f.Size + } + + return total, nil +} + +func (s *MinioStorage) CopyObject(ctx context.Context, srcKey, dstKey string) error { + _, err := s.client.CopyObject(ctx, + minio.CopyDestOptions{Bucket: s.bucket, Object: dstKey}, + minio.CopySrcOptions{Bucket: s.bucket, Object: srcKey}, + ) + return err +} + +func (s *MinioStorage) ObjectExists(ctx context.Context, key string) (bool, error) { + _, err := s.client.StatObject(ctx, s.bucket, key, minio.StatObjectOptions{}) + if err != nil { + errResp := minio.ToErrorResponse(err) + if errResp.Code == "NoSuchKey" { + return false, nil + } + return false, err + } + return true, nil +} + +func generateStorageKey(ext string) string { + now := time.Now() + return fmt.Sprintf("%d/%02d/%02d/%s%s", + now.Year(), + now.Month(), + now.Day(), + uuid.New().String(), + strings.ToLower(ext), + ) +} diff --git a/backend/webui/.env.example b/backend/webui/.env.example index 5342e98..00cf3c0 100644 --- a/backend/webui/.env.example +++ b/backend/webui/.env.example @@ -5,3 +5,12 @@ API_URL=http://api-gateway:3015 # Public API URL (for browser requests) NEXT_PUBLIC_API_URL= + +# ============================================ +# === MENU VISIBILITY === +# ============================================ + +# Отключённые маршруты (через запятую) +# Страницы в разработке можно скрыть из меню +# Пример: /travel,/medicine,/finance,/learning +NEXT_PUBLIC_DISABLED_ROUTES= diff --git a/backend/webui/Dockerfile b/backend/webui/Dockerfile index 701bf90..3d46ca7 100644 --- a/backend/webui/Dockerfile +++ b/backend/webui/Dockerfile @@ -15,6 +15,10 @@ COPY . . ENV NEXT_TELEMETRY_DISABLED=1 ENV NODE_ENV=production +# Build-time переменные для Next.js (NEXT_PUBLIC_* должны быть доступны во время сборки) +ARG NEXT_PUBLIC_DISABLED_ROUTES +ENV NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES} + RUN npm run build FROM base AS runner diff --git a/backend/webui/src/app/(auth)/forgot-password/page.tsx b/backend/webui/src/app/(auth)/forgot-password/page.tsx new file mode 100644 index 0000000..5ed4e53 --- /dev/null +++ b/backend/webui/src/app/(auth)/forgot-password/page.tsx @@ -0,0 +1,33 @@ +'use client'; + +import { useRouter } from 'next/navigation'; +import Link from 'next/link'; +import { ForgotPasswordForm } from '@/components/auth'; + +export default function ForgotPasswordPage() { + const router = useRouter(); + + return ( +
+
+ +
+ G +
+ GooSeek + +
+ +
+
+
+ router.push('/login')} + /> +
+
+
+
+ ); +} diff --git a/backend/webui/src/app/(auth)/layout.tsx b/backend/webui/src/app/(auth)/layout.tsx new file mode 100644 index 0000000..e77dc86 --- /dev/null +++ b/backend/webui/src/app/(auth)/layout.tsx @@ -0,0 +1,5 @@ +import { ReactNode } from 'react'; + +export default function AuthLayout({ children }: { children: ReactNode }) { + return children; +} diff --git a/backend/webui/src/app/(auth)/login/page.tsx b/backend/webui/src/app/(auth)/login/page.tsx new file mode 100644 index 0000000..0a399ae --- /dev/null +++ b/backend/webui/src/app/(auth)/login/page.tsx @@ -0,0 +1,62 @@ +'use client'; + +import { useEffect } from 'react'; +import { useRouter } from 'next/navigation'; +import Link from 'next/link'; +import { LoginForm } from '@/components/auth'; +import { useAuth } from '@/lib/contexts/AuthContext'; + +export default function LoginPage() { + const router = useRouter(); + const { isAuthenticated, isLoading } = useAuth(); + + useEffect(() => { + if (!isLoading && isAuthenticated) { + router.push('/'); + } + }, [isAuthenticated, isLoading, router]); + + if (isLoading) { + return ( +
+
+
+ ); + } + + return ( +
+
+ +
+ G +
+ GooSeek + +
+ +
+
+
+ router.push('/')} + onSwitchToRegister={() => router.push('/register')} + /> +
+ +

+ Продолжая, вы соглашаетесь с{' '} + + условиями + {' '} + и{' '} + + политикой конфиденциальности + +

+
+
+
+ ); +} diff --git a/backend/webui/src/app/(auth)/register/page.tsx b/backend/webui/src/app/(auth)/register/page.tsx new file mode 100644 index 0000000..29b7604 --- /dev/null +++ b/backend/webui/src/app/(auth)/register/page.tsx @@ -0,0 +1,58 @@ +'use client'; + +import { useEffect } from 'react'; +import { useRouter } from 'next/navigation'; +import Link from 'next/link'; +import { RegisterForm } from '@/components/auth'; +import { useAuth } from '@/lib/contexts/AuthContext'; + +export default function RegisterPage() { + const router = useRouter(); + const { isAuthenticated, isLoading } = useAuth(); + + useEffect(() => { + if (!isLoading && isAuthenticated) { + router.push('/'); + } + }, [isAuthenticated, isLoading, router]); + + if (isLoading) { + return ( +
+
+
+ ); + } + + return ( +
+
+ +
+ G +
+ GooSeek + +
+ +
+
+
+ router.push('/')} + onSwitchToLogin={() => router.push('/login')} + /> +
+ +

+ Уже есть аккаунт?{' '} + + Войти + +

+
+
+
+ ); +} diff --git a/backend/webui/src/app/(auth)/reset-password/page.tsx b/backend/webui/src/app/(auth)/reset-password/page.tsx new file mode 100644 index 0000000..819bc41 --- /dev/null +++ b/backend/webui/src/app/(auth)/reset-password/page.tsx @@ -0,0 +1,208 @@ +'use client'; + +import { useState, FormEvent, Suspense } from 'react'; +import { useRouter, useSearchParams } from 'next/navigation'; +import Link from 'next/link'; +import { Lock, Loader2, Eye, EyeOff, CheckCircle, AlertCircle } from 'lucide-react'; +import { resetPassword } from '@/lib/auth'; + +function ResetPasswordContent() { + const router = useRouter(); + const searchParams = useSearchParams(); + const token = searchParams.get('token'); + + const [password, setPassword] = useState(''); + const [confirmPassword, setConfirmPassword] = useState(''); + const [showPassword, setShowPassword] = useState(false); + const [error, setError] = useState(''); + const [isLoading, setIsLoading] = useState(false); + const [isSuccess, setIsSuccess] = useState(false); + + const isPasswordValid = password.length >= 8; + const doPasswordsMatch = password === confirmPassword && confirmPassword.length > 0; + + const handleSubmit = async (e: FormEvent) => { + e.preventDefault(); + setError(''); + + if (!token) { + setError('Недействительная ссылка для сброса пароля'); + return; + } + + if (!isPasswordValid) { + setError('Пароль должен содержать минимум 8 символов'); + return; + } + + if (!doPasswordsMatch) { + setError('Пароли не совпадают'); + return; + } + + setIsLoading(true); + + try { + await resetPassword({ token, newPassword: password }); + setIsSuccess(true); + } catch (err) { + setError(err instanceof Error ? err.message : 'Ошибка сброса пароля'); + } finally { + setIsLoading(false); + } + }; + + if (!token) { + return ( +
+
+ +
+

Недействительная ссылка

+

+ Ссылка для сброса пароля недействительна или устарела. +

+ + Запросить новую ссылку + +
+ ); + } + + if (isSuccess) { + return ( +
+
+ +
+

Пароль изменён

+

+ Ваш пароль успешно изменён. Теперь вы можете войти с новым паролем. +

+ +
+ ); + } + + return ( +
+
+

Новый пароль

+

Введите новый пароль для вашего аккаунта

+
+ + {error && ( +
+ {error} +
+ )} + +
+
+ +
+ + setPassword(e.target.value)} + placeholder="••••••••" + required + autoComplete="new-password" + className="w-full pl-11 pr-11 py-3 bg-surface/50 border border-border rounded-xl + text-primary placeholder:text-muted + focus:outline-none focus:border-accent/50 focus:ring-1 focus:ring-accent/20 + transition-all duration-200" + /> + +
+
+ +
+ +
+ + setConfirmPassword(e.target.value)} + placeholder="••••••••" + required + autoComplete="new-password" + className="w-full pl-11 pr-4 py-3 bg-surface/50 border border-border rounded-xl + text-primary placeholder:text-muted + focus:outline-none focus:border-accent/50 focus:ring-1 focus:ring-accent/20 + transition-all duration-200" + /> +
+
+
+ + +
+ ); +} + +export default function ResetPasswordPage() { + return ( +
+
+ +
+ G +
+ GooSeek + +
+ +
+
+
+ + +
+ }> + + +
+
+ +
+ ); +} diff --git a/backend/webui/src/app/(main)/admin/audit/page.tsx b/backend/webui/src/app/(main)/admin/audit/page.tsx new file mode 100644 index 0000000..0311266 --- /dev/null +++ b/backend/webui/src/app/(main)/admin/audit/page.tsx @@ -0,0 +1,225 @@ +'use client'; + +import { useEffect, useState, useCallback } from 'react'; +import { + Shield, + User, + FileText, + Settings, + Compass, + Clock, +} from 'lucide-react'; +import { fetchAuditLogs } from '@/lib/api'; +import type { AuditLog } from '@/lib/types'; + +const actionIcons: Record = { + create: FileText, + update: Settings, + delete: Shield, + publish: Compass, + reorder: Settings, +}; + +const actionLabels: Record = { + create: 'Создание', + update: 'Изменение', + delete: 'Удаление', + publish: 'Публикация', + reorder: 'Сортировка', +}; + +const resourceLabels: Record = { + user: 'Пользователь', + post: 'Пост', + settings: 'Настройки', + discover_category: 'Категория Discover', + discover_categories: 'Категории Discover', + discover_source: 'Источник Discover', +}; + +export default function AdminAuditPage() { + const [logs, setLogs] = useState([]); + const [total, setTotal] = useState(0); + const [page, setPage] = useState(1); + const [actionFilter, setActionFilter] = useState(''); + const [resourceFilter, setResourceFilter] = useState(''); + const [loading, setLoading] = useState(true); + + const loadLogs = useCallback(async () => { + setLoading(true); + try { + const data = await fetchAuditLogs( + page, + 50, + actionFilter || undefined, + resourceFilter || undefined + ); + setLogs(data.logs); + setTotal(data.total); + } catch (err) { + console.error('Failed to load audit logs:', err); + } finally { + setLoading(false); + } + }, [page, actionFilter, resourceFilter]); + + useEffect(() => { + loadLogs(); + }, [loadLogs]); + + const totalPages = Math.ceil(total / 50); + + const formatDate = (dateStr: string) => { + const date = new Date(dateStr); + return date.toLocaleString('ru-RU', { + day: '2-digit', + month: '2-digit', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + }); + }; + + const getActionColor = (action: string) => { + switch (action) { + case 'create': + return 'bg-green-500/10 text-green-400'; + case 'update': + return 'bg-blue-500/10 text-blue-400'; + case 'delete': + return 'bg-red-500/10 text-red-400'; + case 'publish': + return 'bg-purple-500/10 text-purple-400'; + default: + return 'bg-gray-500/10 text-gray-400'; + } + }; + + return ( +
+
+

Аудит

+

История действий администраторов

+
+ +
+
+ + +
+ + {loading ? ( +
+
+
+ ) : logs.length === 0 ? ( +
+ +

Записей аудита нет

+
+ ) : ( + <> +
+ {logs.map((log) => { + const ActionIcon = actionIcons[log.action] || Shield; + return ( +
+
+
+ +
+
+
+ + {actionLabels[log.action] || log.action} + + + {resourceLabels[log.resource] || log.resource} + + {log.resourceId && ( + + {log.resourceId.slice(0, 8)}... + + )} +
+
+ + + {log.userEmail} + + + + {formatDate(log.createdAt)} + + {log.ipAddress && ( + + IP: {log.ipAddress} + + )} +
+
+
+
+ ); + })} +
+ + {totalPages > 1 && ( +
+

+ Показано {logs.length} из {total} +

+
+ + + {page} / {totalPages} + + +
+
+ )} + + )} +
+
+ ); +} diff --git a/backend/webui/src/app/(main)/admin/discover/page.tsx b/backend/webui/src/app/(main)/admin/discover/page.tsx new file mode 100644 index 0000000..2fbc22c --- /dev/null +++ b/backend/webui/src/app/(main)/admin/discover/page.tsx @@ -0,0 +1,513 @@ +'use client'; + +import { useEffect, useState, useCallback } from 'react'; +import { + Plus, + Trash2, + GripVertical, + ExternalLink, + ToggleLeft, + ToggleRight, +} from 'lucide-react'; +import { + fetchDiscoverCategories, + createDiscoverCategory, + updateDiscoverCategory, + deleteDiscoverCategory, + reorderDiscoverCategories, + fetchDiscoverSources, + createDiscoverSource, + deleteDiscoverSource, +} from '@/lib/api'; +import type { DiscoverCategory, DiscoverSource } from '@/lib/types'; + +interface CategoryModalProps { + category?: DiscoverCategory; + onClose: () => void; + onSave: (data: { name: string; nameRu: string; icon: string; color: string; keywords: string[]; regions: string[] }) => void; +} + +function CategoryModal({ category, onClose, onSave }: CategoryModalProps) { + const [name, setName] = useState(category?.name || ''); + const [nameRu, setNameRu] = useState(category?.nameRu || ''); + const [icon, setIcon] = useState(category?.icon || '📰'); + const [color, setColor] = useState(category?.color || '#6B7280'); + const [keywords, setKeywords] = useState(category?.keywords?.join(', ') || ''); + const [regions, setRegions] = useState(category?.regions?.join(', ') || 'world, russia'); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + onSave({ + name, + nameRu, + icon, + color, + keywords: keywords.split(',').map(k => k.trim()).filter(Boolean), + regions: regions.split(',').map(r => r.trim()).filter(Boolean), + }); + }; + + return ( +
+
+

+ {category ? 'Редактировать категорию' : 'Новая категория'} +

+
+
+
+ + setName(e.target.value)} + placeholder="tech" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + required + /> +
+
+ + setNameRu(e.target.value)} + placeholder="Технологии" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + required + /> +
+
+
+
+ + setIcon(e.target.value)} + placeholder="💻" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary text-center text-2xl" + required + /> +
+
+ +
+ setColor(e.target.value)} + className="w-12 h-10 bg-base border border-border/50 rounded-lg cursor-pointer" + /> + setColor(e.target.value)} + className="flex-1 px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary font-mono" + /> +
+
+
+
+ + setKeywords(e.target.value)} + placeholder="technology, AI, software" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + /> +
+
+ + setRegions(e.target.value)} + placeholder="world, russia, eu" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + /> +
+
+ + +
+
+
+
+ ); +} + +interface SourceModalProps { + onClose: () => void; + onSave: (data: { name: string; url: string; logoUrl?: string; categories: string[]; trustScore: number; description?: string }) => void; +} + +function SourceModal({ onClose, onSave }: SourceModalProps) { + const [name, setName] = useState(''); + const [url, setUrl] = useState(''); + const [logoUrl, setLogoUrl] = useState(''); + const [categories, setCategories] = useState(''); + const [trustScore, setTrustScore] = useState(0.5); + const [description, setDescription] = useState(''); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + onSave({ + name, + url, + logoUrl: logoUrl || undefined, + categories: categories.split(',').map(c => c.trim()).filter(Boolean), + trustScore, + description: description || undefined, + }); + }; + + return ( +
+
+

Новый источник

+
+
+ + setName(e.target.value)} + placeholder="Habr" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + required + /> +
+
+ + setUrl(e.target.value)} + placeholder="https://habr.com" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + required + /> +
+
+ + setLogoUrl(e.target.value)} + placeholder="https://..." + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + /> +
+
+ + setCategories(e.target.value)} + placeholder="tech, science" + className="w-full px-3 py-2 bg-base border border-border/50 rounded-lg text-primary focus:outline-none focus:border-primary" + /> +
+
+ + setTrustScore(parseFloat(e.target.value))} + className="w-full" + /> +
+
+ +