From ab48a0632b630f20348c05097cede93b8b7c9ae1 Mon Sep 17 00:00:00 2001 From: home Date: Mon, 2 Mar 2026 20:25:44 +0300 Subject: [PATCH] feat: CI/CD pipeline + Learning/Medicine/Travel services - Add Gitea Actions workflow for automated build & deploy - Add K8s manifests: webui, travel-svc, medicine-svc, sandbox-svc - Update kustomization for localhost:5000 registry - Add ingress for gooseek.ru and api.gooseek.ru - Learning cabinet with onboarding, courses, sandbox integration - Medicine service with symptom analysis and doctor matching - Travel service with itinerary planning - Server setup scripts (NVIDIA/CUDA, K3s, Gitea runner) Made-with: Cursor --- .dockerignore | 48 +- .gitea/workflows/deploy.yaml | 81 + .gitignore | 42 +- CONTINUE.md | 114 +- backend/cmd/agent-svc/main.go | 20 + backend/cmd/api-gateway/main.go | 6 + backend/cmd/learning-svc/main.go | 642 +++--- backend/cmd/medicine-svc/main.go | 120 ++ backend/cmd/sandbox-svc/main.go | 540 +++++ backend/cmd/travel-svc/main.go | 23 + backend/deploy/docker/Dockerfile.all | 4 +- backend/deploy/docker/docker-compose.yml | 122 +- backend/deploy/k8s/configmap.yaml | 22 + backend/deploy/k8s/deploy.sh | 82 +- backend/deploy/k8s/gitea-deployment.yaml | 44 + backend/deploy/k8s/ingress.yaml | 35 +- backend/deploy/k8s/kustomization.yaml | 10 + backend/deploy/k8s/learning-svc.yaml | 68 + backend/deploy/k8s/medicine-svc.yaml | 70 + backend/deploy/k8s/opensandbox.yaml | 165 ++ backend/deploy/k8s/registry-with-auth.yaml | 78 + backend/deploy/k8s/sandbox-svc.yaml | 70 + backend/deploy/k8s/travel-svc.yaml | 68 + backend/deploy/k8s/webui.yaml | 63 + backend/deploy/scripts/README-nvidia-cuda.md | 49 + backend/deploy/scripts/gitea-runner.service | 14 + backend/deploy/scripts/install-cicd-stack.sh | 55 + .../deploy/scripts/install-gitea-manifest.sh | 248 +++ .../deploy/scripts/install-gitea-registry.sh | 130 ++ backend/deploy/scripts/setup-gitea-runner.sh | 65 + backend/deploy/scripts/setup-k3s-cicd.sh | 60 + .../scripts/setup-nvidia-cuda-ubuntu24.sh | 73 + .../internal/agent/learning_orchestrator.go | 836 ++++++++ backend/internal/agent/orchestrator.go | 5 + .../agent/travel_context_collector.go | 130 +- backend/internal/agent/travel_data_client.go | 49 +- .../internal/agent/travel_events_collector.go | 70 +- .../internal/agent/travel_hotels_collector.go | 30 + backend/internal/agent/travel_orchestrator.go | 586 ++++-- backend/internal/agent/travel_photo_cache.go | 194 ++ .../internal/agent/travel_poi_collector.go | 77 +- backend/internal/agent/travel_types.go | 7 +- backend/internal/db/learning_repo.go | 642 ++++++ backend/internal/db/learning_repo_test.go | 43 + backend/internal/learning/course_autogen.go | 556 +++++ .../internal/learning/course_autogen_test.go | 79 + backend/internal/learning/mock_llm_test.go | 36 + backend/internal/learning/plan_builder.go | 118 ++ .../internal/learning/plan_builder_test.go | 66 + backend/internal/learning/profile_builder.go | 203 ++ .../internal/learning/profile_builder_test.go | 65 + backend/internal/learning/stepper.go | 8 +- backend/internal/medicine/service.go | 671 ++++++ backend/internal/travel/service.go | 135 ++ backend/pkg/config/config.go | 16 +- backend/pkg/storage/minio.go | 41 +- backend/webui/.env.example | 16 - backend/webui/Dockerfile | 8 +- backend/webui/next.config.mjs | 1 + backend/webui/package-lock.json | 217 +- backend/webui/package.json | 6 +- .../webui/src/app/(main)/admin/posts/page.tsx | 2 +- backend/webui/src/app/(main)/layout.tsx | 22 +- .../(main)/learning/courses/[slug]/page.tsx | 263 +++ .../src/app/(main)/learning/new/page.tsx | 201 -- .../webui/src/app/(main)/learning/page.tsx | 1862 +++++++++++++++-- .../webui/src/app/(main)/medicine/page.tsx | 529 +++-- backend/webui/src/app/(main)/spaces/page.tsx | 4 +- backend/webui/src/app/(main)/travel/page.tsx | 574 ++--- backend/webui/src/app/globals.css | 314 +-- backend/webui/src/app/layout.tsx | 19 +- backend/webui/src/app/providers.tsx | 15 +- backend/webui/src/app/robots.ts | 40 + backend/webui/src/app/sitemap.ts | 40 + .../src/components/EditableItinerary.tsx | 954 +++++++++ .../src/components/MedicineWidgetTabs.tsx | 668 ++++++ backend/webui/src/components/SandboxPanel.tsx | 643 ++++++ backend/webui/src/components/Sidebar.tsx | 58 +- backend/webui/src/components/ThemeToggle.tsx | 37 + backend/webui/src/components/TravelMap.tsx | 416 +++- .../webui/src/components/TravelWidgetTabs.tsx | 649 ++++++ .../webui/src/components/TravelWidgets.tsx | 669 ++++-- backend/webui/src/lib/api.ts | 312 ++- backend/webui/src/lib/config/menu.ts | 47 +- .../webui/src/lib/contexts/ThemeContext.tsx | 94 + .../src/lib/hooks/useEditableItinerary.ts | 375 ++++ .../webui/src/lib/hooks/useLearningChat.ts | 282 +++ .../webui/src/lib/hooks/useMedicineChat.ts | 140 ++ backend/webui/src/lib/hooks/useTravelChat.ts | 144 +- backend/webui/src/lib/types.ts | 192 +- backend/webui/src/middleware.ts | 49 + backend/webui/tailwind.config.ts | 4 + 92 files changed, 15562 insertions(+), 2198 deletions(-) create mode 100644 .gitea/workflows/deploy.yaml create mode 100644 backend/cmd/medicine-svc/main.go create mode 100644 backend/cmd/sandbox-svc/main.go create mode 100644 backend/deploy/k8s/gitea-deployment.yaml create mode 100644 backend/deploy/k8s/learning-svc.yaml create mode 100644 backend/deploy/k8s/medicine-svc.yaml create mode 100644 backend/deploy/k8s/opensandbox.yaml create mode 100644 backend/deploy/k8s/registry-with-auth.yaml create mode 100644 backend/deploy/k8s/sandbox-svc.yaml create mode 100644 backend/deploy/k8s/travel-svc.yaml create mode 100644 backend/deploy/k8s/webui.yaml create mode 100644 backend/deploy/scripts/README-nvidia-cuda.md create mode 100644 backend/deploy/scripts/gitea-runner.service create mode 100644 backend/deploy/scripts/install-cicd-stack.sh create mode 100644 backend/deploy/scripts/install-gitea-manifest.sh create mode 100644 backend/deploy/scripts/install-gitea-registry.sh create mode 100644 backend/deploy/scripts/setup-gitea-runner.sh create mode 100644 backend/deploy/scripts/setup-k3s-cicd.sh create mode 100644 backend/deploy/scripts/setup-nvidia-cuda-ubuntu24.sh create mode 100644 backend/internal/agent/learning_orchestrator.go create mode 100644 backend/internal/agent/travel_photo_cache.go create mode 100644 backend/internal/db/learning_repo.go create mode 100644 backend/internal/db/learning_repo_test.go create mode 100644 backend/internal/learning/course_autogen.go create mode 100644 backend/internal/learning/course_autogen_test.go create mode 100644 backend/internal/learning/mock_llm_test.go create mode 100644 backend/internal/learning/plan_builder.go create mode 100644 backend/internal/learning/plan_builder_test.go create mode 100644 backend/internal/learning/profile_builder.go create mode 100644 backend/internal/learning/profile_builder_test.go create mode 100644 backend/internal/medicine/service.go delete mode 100644 backend/webui/.env.example create mode 100644 backend/webui/src/app/(main)/learning/courses/[slug]/page.tsx delete mode 100644 backend/webui/src/app/(main)/learning/new/page.tsx create mode 100644 backend/webui/src/app/robots.ts create mode 100644 backend/webui/src/app/sitemap.ts create mode 100644 backend/webui/src/components/EditableItinerary.tsx create mode 100644 backend/webui/src/components/MedicineWidgetTabs.tsx create mode 100644 backend/webui/src/components/SandboxPanel.tsx create mode 100644 backend/webui/src/components/ThemeToggle.tsx create mode 100644 backend/webui/src/components/TravelWidgetTabs.tsx create mode 100644 backend/webui/src/lib/contexts/ThemeContext.tsx create mode 100644 backend/webui/src/lib/hooks/useEditableItinerary.ts create mode 100644 backend/webui/src/lib/hooks/useLearningChat.ts create mode 100644 backend/webui/src/lib/hooks/useMedicineChat.ts create mode 100644 backend/webui/src/middleware.ts diff --git a/.dockerignore b/.dockerignore index 3e73251..0296c63 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,40 +3,74 @@ node_modules/ **/node_modules/ npm-debug.log yarn-error.log +package-lock.json -# Build output -.next/ -**/.next/ +# Build output (кроме webui .next для standalone) out/ dist/ **/dist/ +*.tsbuildinfo # Git .git/ .gitignore +.gitattributes +.github/ +.gitea/ # IDE .vscode/ .idea/ *.iml +*.swp +*.swo -# Env (не в образ) +# Env (NEVER in image) .env .env.* !.env.example -# Docs, deploy scripts (не нужны в образе) +# Docs (не нужны в образе) docs/ -deploy/ *.md -!README.md +LICENSE +CHANGELOG* +CONTINUE.md +AGENTS.md + +# Deploy scripts (не в образ, только код) +backend/deploy/scripts/ +backend/deploy/k8s/*.sh +*.sh # Data, certs db.sqlite +*.sqlite certificates/ searxng/ +*.pem +*.key +*.crt # Misc .DS_Store +Thumbs.db *.log coverage/ +**/coverage/ +*.bak +*.tmp + +# Test files +**/*_test.go +**/test/ +**/tests/ +**/__tests__/ + +# CI temp +.runner +*.runner + +# Cursor/IDE +.cursor/ +.cursorignore diff --git a/.gitea/workflows/deploy.yaml b/.gitea/workflows/deploy.yaml new file mode 100644 index 0000000..152ad24 --- /dev/null +++ b/.gitea/workflows/deploy.yaml @@ -0,0 +1,81 @@ +name: Build and Deploy GooSeek + +on: + push: + branches: + - main + - master + +env: + REGISTRY: localhost:5000 + +jobs: + build-backend: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Build backend image (all services) + working-directory: backend + run: | + docker build \ + -f deploy/docker/Dockerfile.all \ + -t $REGISTRY/gooseek/backend:${{ github.sha }} \ + -t $REGISTRY/gooseek/backend:latest \ + . + + - name: Push backend image + run: | + docker push $REGISTRY/gooseek/backend:${{ github.sha }} + docker push $REGISTRY/gooseek/backend:latest + + build-webui: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Build WebUI image + working-directory: backend/webui + run: | + docker build \ + -t $REGISTRY/gooseek/webui:${{ github.sha }} \ + -t $REGISTRY/gooseek/webui:latest \ + . + + - name: Push WebUI image + run: | + docker push $REGISTRY/gooseek/webui:${{ github.sha }} + docker push $REGISTRY/gooseek/webui:latest + + deploy: + needs: [build-backend, build-webui] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Deploy to Kubernetes + run: | + export KUBECONFIG=/etc/rancher/k3s/k3s.yaml + cd backend/deploy/k8s + + # Apply kustomization + kubectl apply -k . + + # Restart deployments to pull new images + kubectl -n gooseek rollout restart deployment/api-gateway + kubectl -n gooseek rollout restart deployment/webui + kubectl -n gooseek rollout restart deployment/chat-svc + kubectl -n gooseek rollout restart deployment/agent-svc + kubectl -n gooseek rollout restart deployment/search-svc + kubectl -n gooseek rollout restart deployment/discover-svc + kubectl -n gooseek rollout restart deployment/learning-svc + kubectl -n gooseek rollout restart deployment/medicine-svc + kubectl -n gooseek rollout restart deployment/travel-svc + kubectl -n gooseek rollout restart deployment/sandbox-svc + + # Wait for critical deployments + kubectl -n gooseek rollout status deployment/api-gateway --timeout=180s + kubectl -n gooseek rollout status deployment/webui --timeout=180s + + echo "=== Deploy completed ===" + kubectl -n gooseek get pods diff --git a/.gitignore b/.gitignore index a05f212..8f0d389 100644 --- a/.gitignore +++ b/.gitignore @@ -1,24 +1,29 @@ # Node.js node_modules/ +**/node_modules/ npm-debug.log yarn-error.log +package-lock.json # Build output .next/ +**/.next/ out/ dist/ +**/dist/ +*.tsbuildinfo # IDE/Editor specific .vscode/ .idea/ *.iml +*.swp +*.swo -# Environment variables +# Environment variables (SECRETS - NEVER COMMIT!) .env -.env.local -.env.development.local -.env.test.local -.env.production.local +.env.* +!.env.example # Config files config.toml @@ -29,22 +34,41 @@ logs/ # Testing /coverage/ +**/coverage/ # Miscellaneous .DS_Store Thumbs.db +*.bak +*.tmp # Db & data db.sqlite +*.sqlite apps/frontend/data/* !apps/frontend/data/.gitignore /searxng -certificates +# Certificates and secrets +certificates/ +*.pem +*.key +*.crt -# SSL backup (приватные ключи не в репо) +# SSL backup deploy/k3s/ssl/backup/* !deploy/k3s/ssl/backup/.gitkeep -# Vendor cache (npm + Docker images для оффлайн-билда) -vendor/ \ No newline at end of file +# Vendor cache +vendor/ + +# CI/CD temp files +.runner +*.runner + +# K8s secrets (generated) +backend/deploy/k8s/*-secrets.yaml + +# Go +backend/cmd/*/main +backend/**/*.exe \ No newline at end of file diff --git a/CONTINUE.md b/CONTINUE.md index ade0c6c..94c6da8 100644 --- a/CONTINUE.md +++ b/CONTINUE.md @@ -1,87 +1,39 @@ # Недоделки — начать отсюда -## Последнее изменение (01.03.2026) +## CI/CD готов — осталось создать репозиторий в Gitea -### СДЕЛАНО: Маршруты по дорогам + стоимость проезда +### Сделано (CI/CD подготовка репозитория) +- [x] Обновлён `.gitignore`: игнорируются секреты, временные файлы, кэши +- [x] Обновлён `.dockerignore`: оптимизирован для сборки образов +- [x] Созданы K8s манифесты: + - `backend/deploy/k8s/webui.yaml` — новый + - `backend/deploy/k8s/travel-svc.yaml` — новый +- [x] Обновлён `backend/deploy/k8s/kustomization.yaml`: + - images используют `localhost:5000/gooseek/*` + - добавлены webui.yaml и travel-svc.yaml +- [x] Обновлён `backend/deploy/k8s/ingress.yaml`: + - gooseek.ru → webui:3000 + - api.gooseek.ru → api-gateway:3015 +- [x] Обновлён `backend/deploy/k8s/deploy.sh`: + - push в localhost:5000 registry + - rolling restart всех сервисов +- [x] Создан `.gitea/workflows/deploy.yaml`: + - CI/CD workflow для Gitea Actions + - Сборка backend + webui + - Автодеплой в K8s -#### Что сделано: +### Осталось сделать +1. [ ] Создать репозиторий `gooseek` в Gitea (https://git.gooseek.ru) +2. [ ] Пуш кода: `git remote add gitea https://git.gooseek.ru/admin/gooseek.git && git push -u gitea main` +3. [ ] Проверить что CI/CD workflow запустился и задеплоился -**1. `backend/internal/travel/twogis.go` — 2GIS Routing API клиент:** -- Метод `GetRoute(ctx, points, transport)` — POST `routing.api.2gis.com/routing/7.0.0/global` -- Поддержка transport: `driving`, `taxi`, `walking`, `bicycle` -- Парсинг WKT LINESTRING из `outcoming_path.geometry[].selection` -- Сборка `RouteDirection` с geometry, distance, duration, steps +### Ранее сделано +- Learning кабинет полностью готов +- Medicine сервис полностью готов +- Все K8s манифесты для всех сервисов -**2. `backend/internal/travel/service.go` — переключение на 2GIS:** -- `GetRoute()` сначала пробует 2GIS Routing, fallback на OpenRouteService -- `mapProfileToTwoGISTransport()` — маппинг профилей - -**3. `backend/internal/agent/travel_data_client.go` — обновлённый клиент:** -- `GetRoute(ctx, points, transport)` — полный `RouteDirectionResult` с geometry -- `GetRouteSegments()` — маршруты между каждой парой точек -- Новые типы: `RouteDirectionResult`, `RouteGeometryResult`, `RouteStepResult`, `RouteSegmentResult` - -**4. `backend/internal/agent/travel_orchestrator.go` — дорожные маршруты:** -- `emitTravelWidgets()` вызывает `buildRoadRoute()` вместо прямых линий -- `buildTransportSegments()` — маршруты между каждой парой точек -- `calculateTransportCosts()` — расчёт стоимости (машина ~8₽/км, автобус ~2.5₽/км, такси ~100₽+18₽/км) -- `routeDirection` и `segments` передаются в виджеты `travel_map` и `travel_itinerary` - -**5. Фронтенд — отображение дорожных маршрутов:** -- `types.ts` — новые типы `RouteSegment`, `TransportCostOption`, расширены `TravelMapWidgetParams` и `TravelItineraryWidgetParams` -- `useTravelChat.ts` — извлечение `routeDirection` и `segments` из виджетов, новые state -- `travel/page.tsx` — передача `routeDirection` в `TravelMap` -- `TravelWidgets.tsx` — `TransportSegmentCard` между элементами маршрута с иконками машина/автобус/такси и ценами - ---- - -### СДЕЛАНО ранее: Переработка POI коллектора — 2GIS как основной источник - -(см. предыдущую версию CONTINUE.md) - ---- - -## Осталось сделать - -### Высокий приоритет - -1. **Цены отелей из SearXNG** — LLM не всегда извлекает цены (0 RUB/night). Нужно: - - Добавить fallback: если цена 0, попробовать парсить из snippet - - Файл: `backend/internal/agent/travel_hotels_collector.go` - -2. **Авиабилеты для маршрутов** — "Золотое кольцо" не имеет IATA кода. Нужно: - - Если destination не IATA, искать билеты до первого конкретного города в маршруте - - Файл: `backend/internal/agent/travel_flights_collector.go` - -### Средний приоритет - -3. **Drag & drop в ItineraryWidget** — перетаскивание элементов между днями -4. **Кеш SearXNG результатов** — Redis кеш на 10-30 минут -5. **Сохранение draft в БД** — персистентность TripDraft через trip_drafts таблицу - -### Низкий приоритет - -6. **Экспорт маршрута** — PDF/Markdown -7. **Real-time обновления** — WebSocket для тредов - ---- - -## Контекст - -### Архитектура travel pipeline: - -``` -User -> /travel page -> streamTravelAgent() -> api-gateway -> chat-svc -> agent-svc - -> RunTravelOrchestrator: - 1. Planner Agent (LLM) -> TripBrief - 2. Geocode destinations -> travel-svc -> 2GIS Geocoder API - 3. Parallel collectors: - - Events: SearXNG -> Crawl4AI -> LLM extraction -> geocode (2GIS) - - POI: 2GIS Places API (primary) -> LLM enrichment -> SearXNG fallback - - Hotels: SearXNG -> Crawl4AI -> LLM extraction -> geocode (2GIS) - - Transport: TravelPayouts API - 4. Itinerary Builder (LLM) -> ItineraryDay[] - 5. Road routing: 2GIS Routing API -> RouteDirection (дорожная геометрия) - 6. Transport costs: calculateTransportCosts() -> машина/автобус/такси - 7. Widget emission -> NDJSON stream -> frontend (карта 2GIS MapGL) -``` +### Контекст для продолжения +- Сервер: 192.168.31.59 (внутренний IP), 5.187.77.89 (внешний) +- Gitea: https://git.gooseek.ru +- Registry: localhost:5000 (внутренний, без внешнего доступа) +- K3s + Nginx Ingress + Cert-Manager уже установлены diff --git a/backend/cmd/agent-svc/main.go b/backend/cmd/agent-svc/main.go index 3a991ca..98e82f0 100644 --- a/backend/cmd/agent-svc/main.go +++ b/backend/cmd/agent-svc/main.go @@ -18,6 +18,7 @@ import ( "github.com/gooseek/backend/pkg/config" "github.com/gooseek/backend/pkg/middleware" "github.com/gooseek/backend/pkg/ndjson" + "github.com/gooseek/backend/pkg/storage" ) type SearchRequest struct { @@ -55,6 +56,24 @@ func main() { searchClient := search.NewSearXNGClient(cfg) + var photoCache *agent.PhotoCacheService + if cfg.MinioEndpoint != "" { + minioStorage, err := storage.NewMinioStorage(storage.MinioConfig{ + Endpoint: cfg.MinioEndpoint, + AccessKey: cfg.MinioAccessKey, + SecretKey: cfg.MinioSecretKey, + Bucket: cfg.MinioBucket, + UseSSL: cfg.MinioUseSSL, + PublicURL: cfg.MinioPublicURL, + }) + if err != nil { + log.Printf("Warning: MinIO init failed (photo cache disabled): %v", err) + } else { + photoCache = agent.NewPhotoCacheService(minioStorage) + log.Printf("Photo cache enabled: MinIO at %s, bucket=%s, publicURL=%s", cfg.MinioEndpoint, cfg.MinioBucket, cfg.MinioPublicURL) + } + } + app := fiber.New(fiber.Config{ StreamRequestBody: true, BodyLimit: 10 * 1024 * 1024, @@ -166,6 +185,7 @@ func main() { TravelSvcURL: cfg.TravelSvcURL, TravelPayoutsToken: cfg.TravelPayoutsToken, TravelPayoutsMarker: cfg.TravelPayoutsMarker, + PhotoCache: photoCache, }, } diff --git a/backend/cmd/api-gateway/main.go b/backend/cmd/api-gateway/main.go index 987d239..a12ead3 100644 --- a/backend/cmd/api-gateway/main.go +++ b/backend/cmd/api-gateway/main.go @@ -53,7 +53,9 @@ func main() { "discover": cfg.DiscoverSvcURL, "finance": cfg.FinanceHeatmapURL, "learning": cfg.LearningSvcURL, + "sandbox": cfg.SandboxSvcURL, "travel": cfg.TravelSvcURL, + "medicine": cfg.MedicineSvcURL, "admin": cfg.AdminSvcURL, } @@ -140,8 +142,12 @@ func getTarget(path string) (base, rewrite string) { return svcURLs["finance"], path case strings.HasPrefix(path, "/api/v1/learning"): return svcURLs["learning"], path + case strings.HasPrefix(path, "/api/v1/sandbox"): + return svcURLs["sandbox"], path case strings.HasPrefix(path, "/api/v1/travel"): return svcURLs["travel"], path + case strings.HasPrefix(path, "/api/v1/medicine"): + return svcURLs["medicine"], path case strings.HasPrefix(path, "/api/v1/admin"): return svcURLs["admin"], path default: diff --git a/backend/cmd/learning-svc/main.go b/backend/cmd/learning-svc/main.go index e07f59e..4fc230c 100644 --- a/backend/cmd/learning-svc/main.go +++ b/backend/cmd/learning-svc/main.go @@ -2,119 +2,75 @@ package main import ( "context" + "encoding/json" "fmt" "log" "os" + "strings" "time" "github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2/middleware/cors" "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" "github.com/gooseek/backend/internal/learning" "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/search" "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" ) -type LessonStore struct { - lessons map[string]*learning.StepByStepLesson -} - -func NewLessonStore() *LessonStore { - return &LessonStore{ - lessons: make(map[string]*learning.StepByStepLesson), - } -} - -func (s *LessonStore) Save(lesson *learning.StepByStepLesson) { - s.lessons[lesson.ID] = lesson -} - -func (s *LessonStore) Get(id string) *learning.StepByStepLesson { - return s.lessons[id] -} - -func (s *LessonStore) List(limit, offset int) []*learning.StepByStepLesson { - result := make([]*learning.StepByStepLesson, 0) - i := 0 - for _, l := range s.lessons { - if i >= offset && len(result) < limit { - result = append(result, l) - } - i++ - } - return result -} - -func (s *LessonStore) Delete(id string) bool { - if _, ok := s.lessons[id]; ok { - delete(s.lessons, id) - return true - } - return false -} - func main() { cfg, err := config.Load() if err != nil { log.Fatal("Failed to load config:", err) } - var llmClient llm.Client + var database *db.PostgresDB + var repo *db.LearningRepository - // Priority 1: Timeweb Cloud AI (recommended for production) - if cfg.TimewebAgentAccessID != "" && cfg.TimewebAPIKey != "" { - client, err := llm.NewTimewebClient(llm.TimewebConfig{ - ProviderID: "timeweb", - BaseURL: cfg.TimewebAPIBaseURL, - AgentAccessID: cfg.TimewebAgentAccessID, - APIKey: cfg.TimewebAPIKey, - ModelKey: cfg.DefaultLLMModel, - ProxySource: cfg.TimewebProxySource, - }) - if err != nil { - log.Printf("Warning: Failed to create Timeweb client: %v", err) - } else { - llmClient = client - log.Println("Using Timeweb Cloud AI as LLM provider") - } - } - - // Priority 2: Anthropic - if llmClient == nil && cfg.AnthropicAPIKey != "" && !isJWT(cfg.AnthropicAPIKey) { - client, err := llm.NewAnthropicClient(llm.ProviderConfig{ - ProviderID: "anthropic", - APIKey: cfg.AnthropicAPIKey, - ModelKey: "claude-3-5-sonnet-20241022", - }) - if err != nil { - log.Printf("Warning: Failed to create Anthropic client: %v", err) - } else { - llmClient = client - log.Println("Using Anthropic as LLM provider") - } - } - - // Priority 3: OpenAI (only if it's a real OpenAI key, not Timeweb JWT) - if llmClient == nil && cfg.OpenAIAPIKey != "" && !isJWT(cfg.OpenAIAPIKey) { - client, err := llm.NewOpenAIClient(llm.ProviderConfig{ - ProviderID: "openai", - APIKey: cfg.OpenAIAPIKey, - ModelKey: "gpt-4o-mini", - }) - if err != nil { - log.Printf("Warning: Failed to create OpenAI client: %v", err) - } else { - llmClient = client - log.Println("Using OpenAI as LLM provider") + if cfg.DatabaseURL != "" { + maxRetries := 30 + for i := 0; i < maxRetries; i++ { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err == nil { + break + } + log.Printf("Waiting for database (attempt %d/%d): %v", i+1, maxRetries, err) + time.Sleep(2 * time.Second) } + if err != nil { + log.Fatal("Database required for learning-svc:", err) + } + defer database.Close() + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Base migrations warning: %v", err) + } + repo = db.NewLearningRepository(database) + if err := repo.RunMigrations(ctx); err != nil { + log.Printf("Learning migrations warning: %v", err) + } + log.Println("PostgreSQL connected, learning migrations complete") + } else { + log.Fatal("DATABASE_URL required for learning-svc") } + llmClient := createLLMClient(cfg) if llmClient == nil { - log.Fatal("No LLM provider configured. Please set TIMEWEB_AGENT_ACCESS_ID + TIMEWEB_API_KEY, or OPENAI_API_KEY, or ANTHROPIC_API_KEY") + log.Fatal("No LLM provider configured") } - generator := learning.NewLearningGenerator(llmClient) - store := NewLessonStore() + searchClient := search.NewSearXNGClient(cfg) + + courseGen := learning.NewCourseAutoGenerator(learning.CourseAutoGenConfig{ + LLM: llmClient, + Repo: repo, + SearchClient: searchClient, + }) + go courseGen.StartBackground(context.Background()) app := fiber.New(fiber.Config{ BodyLimit: 50 * 1024 * 1024, @@ -129,259 +85,296 @@ func main() { return c.JSON(fiber.Map{"status": "ok"}) }) - app.Post("/api/v1/learning/lesson", func(c *fiber.Ctx) error { - var req struct { - Topic string `json:"topic"` - Query string `json:"query"` - Difficulty string `json:"difficulty"` - Mode string `json:"mode"` - MaxSteps int `json:"maxSteps"` - Locale string `json:"locale"` - IncludeCode bool `json:"includeCode"` - IncludeQuiz bool `json:"includeQuiz"` - } + api := app.Group("/api/v1/learning", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: true, + })) - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) - defer cancel() - - difficulty := learning.DifficultyBeginner - switch req.Difficulty { - case "intermediate": - difficulty = learning.DifficultyIntermediate - case "advanced": - difficulty = learning.DifficultyAdvanced - case "expert": - difficulty = learning.DifficultyExpert - } - - mode := learning.ModeExplain - switch req.Mode { - case "guided": - mode = learning.ModeGuided - case "interactive": - mode = learning.ModeInteractive - case "practice": - mode = learning.ModePractice - case "quiz": - mode = learning.ModeQuiz - } - - lesson, err := generator.GenerateLesson(ctx, learning.GenerateLessonOptions{ - Topic: req.Topic, - Query: req.Query, - Difficulty: difficulty, - Mode: mode, - MaxSteps: req.MaxSteps, - Locale: req.Locale, - IncludeCode: req.IncludeCode, - IncludeQuiz: req.IncludeQuiz, - }) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - store.Save(lesson) - - return c.JSON(lesson) - }) - - app.Post("/api/v1/learning/explain", func(c *fiber.Ctx) error { - var req struct { - Topic string `json:"topic"` - Difficulty string `json:"difficulty"` - Locale string `json:"locale"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - difficulty := learning.DifficultyBeginner - switch req.Difficulty { - case "intermediate": - difficulty = learning.DifficultyIntermediate - case "advanced": - difficulty = learning.DifficultyAdvanced - case "expert": - difficulty = learning.DifficultyExpert - } - - step, err := generator.GenerateExplanation(ctx, req.Topic, difficulty, req.Locale) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(step) - }) - - app.Post("/api/v1/learning/quiz", func(c *fiber.Ctx) error { - var req struct { - Topic string `json:"topic"` - NumQuestions int `json:"numQuestions"` - Difficulty string `json:"difficulty"` - Locale string `json:"locale"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - if req.NumQuestions == 0 { - req.NumQuestions = 5 - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - difficulty := learning.DifficultyBeginner - switch req.Difficulty { - case "intermediate": - difficulty = learning.DifficultyIntermediate - case "advanced": - difficulty = learning.DifficultyAdvanced - case "expert": - difficulty = learning.DifficultyExpert - } - - questions, err := generator.GenerateQuiz(ctx, req.Topic, req.NumQuestions, difficulty, req.Locale) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(fiber.Map{"questions": questions}) - }) - - app.Post("/api/v1/learning/practice", func(c *fiber.Ctx) error { - var req struct { - Topic string `json:"topic"` - Language string `json:"language"` - Difficulty string `json:"difficulty"` - Locale string `json:"locale"` - } - - if err := c.BodyParser(&req); err != nil { - return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) - } - - ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) - defer cancel() - - difficulty := learning.DifficultyBeginner - switch req.Difficulty { - case "intermediate": - difficulty = learning.DifficultyIntermediate - case "advanced": - difficulty = learning.DifficultyAdvanced - case "expert": - difficulty = learning.DifficultyExpert - } - - exercise, err := generator.GeneratePracticeExercise(ctx, req.Topic, req.Language, difficulty, req.Locale) - if err != nil { - return c.Status(500).JSON(fiber.Map{"error": err.Error()}) - } - - return c.JSON(exercise) - }) - - app.Get("/api/v1/learning/lessons", func(c *fiber.Ctx) error { + api.Get("/courses", func(c *fiber.Ctx) error { + category := c.Query("category") + difficulty := c.Query("difficulty") + search := c.Query("search") limit := c.QueryInt("limit", 20) offset := c.QueryInt("offset", 0) - lessons := store.List(limit, offset) - - summaries := make([]map[string]interface{}, 0) - for _, l := range lessons { - summaries = append(summaries, map[string]interface{}{ - "id": l.ID, - "title": l.Title, - "topic": l.Topic, - "difficulty": l.Difficulty, - "mode": l.Mode, - "stepsCount": len(l.Steps), - "estimatedTime": l.EstimatedTime, - "progress": l.Progress, - "createdAt": l.CreatedAt, - }) + courses, total, err := repo.ListCourses(c.Context(), category, difficulty, search, limit, offset) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to list courses"}) } - - return c.JSON(fiber.Map{"lessons": summaries, "count": len(summaries)}) + return c.JSON(fiber.Map{"courses": courses, "total": total}) }) - app.Get("/api/v1/learning/lessons/:id", func(c *fiber.Ctx) error { - id := c.Params("id") - lesson := store.Get(id) - if lesson == nil { - return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + api.Get("/courses/:slug", func(c *fiber.Ctx) error { + slug := c.Params("slug") + course, err := repo.GetCourseBySlug(c.Context(), slug) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get course"}) } - return c.JSON(lesson) + if course == nil { + return c.Status(404).JSON(fiber.Map{"error": "Course not found"}) + } + return c.JSON(course) }) - app.Post("/api/v1/learning/lessons/:id/complete-step", func(c *fiber.Ctx) error { - id := c.Params("id") - lesson := store.Get(id) - if lesson == nil { - return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + api.Get("/me/profile", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + profile, err := repo.GetProfile(c.Context(), userID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to get profile"}) + } + if profile == nil { + return c.JSON(fiber.Map{"profile": nil, "exists": false}) + } + return c.JSON(fiber.Map{"profile": profile, "exists": true}) + }) + + api.Post("/me/profile", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) } var req struct { - StepIndex int `json:"stepIndex"` + DisplayName string `json:"displayName"` + Profile json.RawMessage `json:"profile"` + OnboardingCompleted bool `json:"onboardingCompleted"` } - if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) } - lesson.CompleteStep(req.StepIndex) + if req.Profile == nil { + req.Profile = json.RawMessage("{}") + } - return c.JSON(fiber.Map{ - "success": true, - "progress": lesson.Progress, - }) + profile := &db.LearningUserProfile{ + UserID: userID, + DisplayName: req.DisplayName, + Profile: req.Profile, + OnboardingCompleted: req.OnboardingCompleted, + } + if err := repo.UpsertProfile(c.Context(), profile); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save profile"}) + } + return c.JSON(fiber.Map{"success": true}) }) - app.Post("/api/v1/learning/lessons/:id/submit-answer", func(c *fiber.Ctx) error { - id := c.Params("id") - lesson := store.Get(id) - if lesson == nil { - return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + api.Post("/me/onboarding", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) } var req struct { - StepIndex int `json:"stepIndex"` - SelectedOptions []string `json:"selectedOptions"` + DisplayName string `json:"displayName"` + Answers map[string]string `json:"answers"` } - if err := c.BodyParser(&req); err != nil { return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) } - correct, explanation := lesson.SubmitQuizAnswer(req.StepIndex, req.SelectedOptions) - - if correct { - lesson.CompleteStep(req.StepIndex) + sanitizedAnswers := make(map[string]string, len(req.Answers)) + for k, v := range req.Answers { + key := strings.TrimSpace(k) + val := strings.TrimSpace(v) + if key == "" || val == "" { + continue + } + if len(val) > 600 { + val = val[:600] + } + sanitizedAnswers[key] = val + } + if len(sanitizedAnswers) < 3 { + return c.Status(400).JSON(fiber.Map{"error": "At least 3 onboarding answers are required"}) } - return c.JSON(fiber.Map{ - "correct": correct, - "explanation": explanation, - "progress": lesson.Progress, - }) + ctx, cancel := context.WithTimeout(c.Context(), 60*time.Second) + defer cancel() + + profileJSON, err := learning.BuildProfileFromOnboarding(ctx, llmClient, sanitizedAnswers) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to build onboarding profile"}) + } + + existingProfile, _ := repo.GetProfile(c.Context(), userID) + profile := &db.LearningUserProfile{ + UserID: userID, + DisplayName: strings.TrimSpace(req.DisplayName), + Profile: profileJSON, + OnboardingCompleted: true, + } + if existingProfile != nil { + if profile.DisplayName == "" { + profile.DisplayName = existingProfile.DisplayName + } + profile.ResumeFileID = existingProfile.ResumeFileID + profile.ResumeExtractedText = existingProfile.ResumeExtractedText + } + + if err := repo.UpsertProfile(c.Context(), profile); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save onboarding profile"}) + } + + return c.JSON(fiber.Map{"success": true, "profile": profileJSON}) }) - app.Delete("/api/v1/learning/lessons/:id", func(c *fiber.Ctx) error { - id := c.Params("id") - if store.Delete(id) { - return c.JSON(fiber.Map{"success": true}) + api.Post("/me/resume", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) } - return c.Status(404).JSON(fiber.Map{"error": "Lesson not found"}) + + var req struct { + FileID string `json:"fileId"` + ExtractedText string `json:"extractedText"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + if req.ExtractedText == "" { + return c.Status(400).JSON(fiber.Map{"error": "Extracted text required"}) + } + + ctx, cancel := context.WithTimeout(c.Context(), 60*time.Second) + defer cancel() + + profileJSON, err := learning.BuildProfileFromResume(ctx, llmClient, req.ExtractedText) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to build profile from resume"}) + } + + profile := &db.LearningUserProfile{ + UserID: userID, + Profile: profileJSON, + ResumeFileID: &req.FileID, + ResumeExtractedText: req.ExtractedText, + OnboardingCompleted: true, + } + if err := repo.UpsertProfile(c.Context(), profile); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save profile"}) + } + return c.JSON(fiber.Map{"success": true, "profile": profileJSON}) + }) + + api.Post("/enroll", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + + var req struct { + CourseID string `json:"courseId"` + Slug string `json:"slug"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + var course *db.LearningCourse + var courseErr error + if req.CourseID != "" { + course, courseErr = repo.GetCourseByID(c.Context(), req.CourseID) + } else if req.Slug != "" { + course, courseErr = repo.GetCourseBySlug(c.Context(), req.Slug) + } else { + return c.Status(400).JSON(fiber.Map{"error": "courseId or slug required"}) + } + if courseErr != nil || course == nil { + return c.Status(404).JSON(fiber.Map{"error": "Course not found"}) + } + + ctx, cancel := context.WithTimeout(c.Context(), 90*time.Second) + defer cancel() + + profile, _ := repo.GetProfile(ctx, userID) + var profileText string + if profile != nil { + profileText = string(profile.Profile) + } + + plan, err := learning.BuildPersonalPlan(ctx, llmClient, course, profileText) + if err != nil { + plan = course.BaseOutline + } + + enrollment := &db.LearningEnrollment{ + UserID: userID, + CourseID: course.ID, + Status: "active", + Plan: plan, + Progress: json.RawMessage(`{"completed_modules":[],"current_module":0,"score":0}`), + } + if err := repo.CreateEnrollment(c.Context(), enrollment); err != nil { + if strings.Contains(err.Error(), "duplicate") || strings.Contains(err.Error(), "unique") { + return c.Status(409).JSON(fiber.Map{"error": "Already enrolled in this course"}) + } + return c.Status(500).JSON(fiber.Map{"error": "Failed to create enrollment"}) + } + return c.Status(201).JSON(enrollment) + }) + + api.Get("/enrollments", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + enrollments, err := repo.ListEnrollments(c.Context(), userID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to list enrollments"}) + } + if enrollments == nil { + enrollments = []*db.LearningEnrollment{} + } + return c.JSON(fiber.Map{"enrollments": enrollments}) + }) + + api.Get("/enrollments/:id", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + enrollment, err := repo.GetEnrollment(c.Context(), c.Params("id")) + if err != nil || enrollment == nil { + return c.Status(404).JSON(fiber.Map{"error": "Enrollment not found"}) + } + if enrollment.UserID != userID { + return c.Status(403).JSON(fiber.Map{"error": "Access denied"}) + } + + course, _ := repo.GetCourseByID(c.Context(), enrollment.CourseID) + enrollment.Course = course + + tasks, _ := repo.ListTasksByEnrollment(c.Context(), enrollment.ID) + if tasks == nil { + tasks = []*db.LearningTask{} + } + + return c.JSON(fiber.Map{"enrollment": enrollment, "tasks": tasks}) + }) + + api.Get("/enrollments/:id/tasks", func(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + enrollment, err := repo.GetEnrollment(c.Context(), c.Params("id")) + if err != nil || enrollment == nil || enrollment.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Not found"}) + } + tasks, err := repo.ListTasksByEnrollment(c.Context(), enrollment.ID) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to list tasks"}) + } + if tasks == nil { + tasks = []*db.LearningTask{} + } + return c.JSON(fiber.Map{"tasks": tasks}) }) port := getEnvInt("LEARNING_SVC_PORT", 3034) @@ -389,6 +382,43 @@ func main() { log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) } +func createLLMClient(cfg *config.Config) llm.Client { + if cfg.TimewebAgentAccessID != "" && cfg.TimewebAPIKey != "" { + client, err := llm.NewTimewebClient(llm.TimewebConfig{ + ProviderID: "timeweb", + BaseURL: cfg.TimewebAPIBaseURL, + AgentAccessID: cfg.TimewebAgentAccessID, + APIKey: cfg.TimewebAPIKey, + ModelKey: cfg.DefaultLLMModel, + ProxySource: cfg.TimewebProxySource, + }) + if err == nil { + return client + } + } + if cfg.AnthropicAPIKey != "" && !isJWT(cfg.AnthropicAPIKey) { + client, err := llm.NewAnthropicClient(llm.ProviderConfig{ + ProviderID: "anthropic", + APIKey: cfg.AnthropicAPIKey, + ModelKey: "claude-3-5-sonnet-20241022", + }) + if err == nil { + return client + } + } + if cfg.OpenAIAPIKey != "" && !isJWT(cfg.OpenAIAPIKey) { + client, err := llm.NewOpenAIClient(llm.ProviderConfig{ + ProviderID: "openai", + APIKey: cfg.OpenAIAPIKey, + ModelKey: "gpt-4o-mini", + }) + if err == nil { + return client + } + } + return nil +} + func getEnvInt(key string, defaultValue int) int { if val := os.Getenv(key); val != "" { var result int diff --git a/backend/cmd/medicine-svc/main.go b/backend/cmd/medicine-svc/main.go new file mode 100644 index 0000000..5103dc4 --- /dev/null +++ b/backend/cmd/medicine-svc/main.go @@ -0,0 +1,120 @@ +package main + +import ( + "bufio" + "context" + "fmt" + "log" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/medicine" + "github.com/gooseek/backend/pkg/middleware" +) + +func main() { + llmClient := createLLMClient() + if llmClient == nil { + log.Println("medicine-svc: no LLM configured, fallback mode enabled") + } + + svc := medicine.NewService(medicine.ServiceConfig{ + LLM: llmClient, + SearXNGURL: getEnv("SEARXNG_URL", "http://searxng:8080"), + Timeout: 20 * time.Second, + }) + + app := fiber.New(fiber.Config{ + ReadTimeout: 60 * time.Second, + WriteTimeout: 120 * time.Second, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok", "service": "medicine-svc"}) + }) + + jwtOptional := middleware.JWTConfig{ + Secret: os.Getenv("JWT_SECRET"), + AuthSvcURL: getEnv("AUTH_SVC_URL", "http://auth-svc:3050"), + AllowGuest: true, + } + + api := app.Group("/api/v1/medicine") + api.Post("/consult", middleware.JWT(jwtOptional), func(c *fiber.Ctx) error { + var req medicine.ConsultRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + if req.Symptoms == "" { + return c.Status(400).JSON(fiber.Map{"error": "symptoms is required"}) + } + + c.Set("Content-Type", "application/x-ndjson") + c.Set("Cache-Control", "no-cache") + c.Set("Transfer-Encoding", "chunked") + + c.Context().SetBodyStreamWriter(func(w *bufio.Writer) { + ctx, cancel := context.WithTimeout(context.Background(), 90*time.Second) + defer cancel() + if err := svc.StreamConsult(ctx, req, w); err != nil { + log.Printf("medicine consult error: %v", err) + } + }) + return nil + }) + + port := getEnvInt("PORT", 3037) + log.Printf("medicine-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func createLLMClient() llm.Client { + cfg := llm.ProviderConfig{ + ProviderID: getEnv("LLM_PROVIDER", "timeweb"), + ModelKey: getEnv("LLM_MODEL", "gpt-4o-mini"), + BaseURL: os.Getenv("TIMEWEB_API_BASE_URL"), + APIKey: os.Getenv("TIMEWEB_API_KEY"), + AgentAccessID: os.Getenv("TIMEWEB_AGENT_ACCESS_ID"), + } + client, err := llm.NewClient(cfg) + if err == nil { + return client + } + if os.Getenv("OPENAI_API_KEY") != "" { + openAIClient, openAIErr := llm.NewClient(llm.ProviderConfig{ + ProviderID: "openai", + ModelKey: "gpt-4o-mini", + APIKey: os.Getenv("OPENAI_API_KEY"), + }) + if openAIErr == nil { + return openAIClient + } + } + return nil +} + +func getEnv(key, fallback string) string { + if v := os.Getenv(key); v != "" { + return v + } + return fallback +} + +func getEnvInt(key string, fallback int) int { + v := os.Getenv(key) + if v == "" { + return fallback + } + var out int + if _, err := fmt.Sscanf(v, "%d", &out); err != nil { + return fallback + } + return out +} diff --git a/backend/cmd/sandbox-svc/main.go b/backend/cmd/sandbox-svc/main.go new file mode 100644 index 0000000..3526d5a --- /dev/null +++ b/backend/cmd/sandbox-svc/main.go @@ -0,0 +1,540 @@ +package main + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "net/url" + "os" + "time" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/cors" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/pkg/config" + "github.com/gooseek/backend/pkg/middleware" +) + +var ( + openSandboxURL string + repo *db.LearningRepository +) + +func main() { + cfg, err := config.Load() + if err != nil { + log.Fatal("Failed to load config:", err) + } + + openSandboxURL = getEnv("OPENSANDBOX_URL", "http://opensandbox-server:8080") + + var database *db.PostgresDB + if cfg.DatabaseURL != "" { + maxRetries := 30 + for i := 0; i < maxRetries; i++ { + database, err = db.NewPostgresDB(cfg.DatabaseURL) + if err == nil { + break + } + log.Printf("Waiting for database (attempt %d/%d): %v", i+1, maxRetries, err) + time.Sleep(2 * time.Second) + } + if err != nil { + log.Fatal("Database required for sandbox-svc:", err) + } + defer database.Close() + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + if err := database.RunMigrations(ctx); err != nil { + log.Printf("Base migrations warning: %v", err) + } + repo = db.NewLearningRepository(database) + if err := repo.RunMigrations(ctx); err != nil { + log.Printf("Learning migrations warning: %v", err) + } + log.Println("PostgreSQL connected") + } else { + log.Fatal("DATABASE_URL required for sandbox-svc") + } + + app := fiber.New(fiber.Config{ + BodyLimit: 50 * 1024 * 1024, + ReadTimeout: 60 * time.Second, + WriteTimeout: 5 * time.Minute, + }) + + app.Use(logger.New()) + app.Use(cors.New()) + + app.Get("/health", func(c *fiber.Ctx) error { + return c.JSON(fiber.Map{"status": "ok"}) + }) + + api := app.Group("/api/v1/sandbox", middleware.JWT(middleware.JWTConfig{ + Secret: cfg.JWTSecret, + AuthSvcURL: cfg.AuthSvcURL, + AllowGuest: false, + })) + + api.Post("/sessions", handleCreateSession) + api.Get("/sessions/:id", handleGetSession) + api.Get("/sessions/:id/files", handleListFiles) + api.Get("/sessions/:id/file", handleReadFile) + api.Put("/sessions/:id/file", handleWriteFile) + api.Post("/sessions/:id/commands/run", handleRunCommand) + api.Post("/sessions/:id/verify", handleVerify) + + port := getEnvInt("SANDBOX_SVC_PORT", 3036) + log.Printf("sandbox-svc listening on :%d", port) + log.Fatal(app.Listen(fmt.Sprintf(":%d", port))) +} + +func handleCreateSession(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + if userID == "" { + return c.Status(401).JSON(fiber.Map{"error": "Authentication required"}) + } + + var req struct { + TaskID string `json:"taskId"` + Image string `json:"image"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + image := req.Image + if image == "" { + image = "opensandbox/code-interpreter:v1.0.1" + } + + sandboxResp, err := createOpenSandbox(image) + if err != nil { + log.Printf("OpenSandbox create error: %v", err) + return c.Status(503).JSON(fiber.Map{"error": "Sandbox creation failed: " + err.Error()}) + } + + session := &db.SandboxSession{ + UserID: userID, + OpenSandboxID: sandboxResp.ID, + Status: "ready", + Metadata: json.RawMessage(fmt.Sprintf(`{"image":"%s"}`, image)), + } + if req.TaskID != "" { + session.TaskID = &req.TaskID + } + + if err := repo.CreateSandboxSession(c.Context(), session); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to save session"}) + } + + logEvent(c.Context(), session.ID, "session_created", map[string]interface{}{"image": image}) + + return c.Status(201).JSON(session) +} + +func handleGetSession(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + return c.JSON(session) +} + +func handleListFiles(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + path := c.Query("path", "/home/user") + result, err := sandboxFilesRequest(session.OpenSandboxID, path) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to list files"}) + } + return c.JSON(result) +} + +func handleReadFile(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + path := c.Query("path") + if path == "" { + return c.Status(400).JSON(fiber.Map{"error": "path query required"}) + } + + content, err := sandboxReadFile(session.OpenSandboxID, path) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to read file"}) + } + + logEvent(c.Context(), session.ID, "file_read", map[string]interface{}{"path": path}) + + return c.JSON(fiber.Map{"path": path, "content": content}) +} + +func handleWriteFile(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + var req struct { + Path string `json:"path"` + Content string `json:"content"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + if err := sandboxWriteFile(session.OpenSandboxID, req.Path, req.Content); err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to write file"}) + } + + logEvent(c.Context(), session.ID, "file_write", map[string]interface{}{ + "path": req.Path, + "size": len(req.Content), + }) + + return c.JSON(fiber.Map{"success": true}) +} + +func handleRunCommand(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + var req struct { + Command string `json:"command"` + Cwd string `json:"cwd"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + result, err := sandboxRunCommand(session.OpenSandboxID, req.Command, req.Cwd) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Failed to run command"}) + } + + logEvent(c.Context(), session.ID, "command_run", map[string]interface{}{ + "command": req.Command, + "exit_code": result["exit_code"], + }) + + return c.JSON(result) +} + +func handleVerify(c *fiber.Ctx) error { + userID := middleware.GetUserID(c) + session, err := repo.GetSandboxSession(c.Context(), c.Params("id")) + if err != nil || session == nil || session.UserID != userID { + return c.Status(404).JSON(fiber.Map{"error": "Session not found"}) + } + + var req struct { + Command string `json:"command"` + } + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request"}) + } + + if req.Command == "" { + if session.TaskID != nil { + task, _ := repo.GetTask(c.Context(), *session.TaskID) + if task != nil && task.VerificationCmd != "" { + req.Command = task.VerificationCmd + } + } + if req.Command == "" { + req.Command = "echo 'No verification command configured'" + } + } + + result, err := sandboxRunCommand(session.OpenSandboxID, req.Command, "") + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": "Verification failed"}) + } + + logEvent(c.Context(), session.ID, "verify", map[string]interface{}{ + "command": req.Command, + "exit_code": result["exit_code"], + "stdout": result["stdout"], + }) + + passed := false + if exitCode, ok := result["exit_code"].(float64); ok && exitCode == 0 { + passed = true + } + + if session.TaskID != nil { + resultJSON, _ := json.Marshal(result) + submission := &db.LearningSubmission{ + TaskID: *session.TaskID, + SandboxSessionID: &session.ID, + Result: resultJSON, + Score: 0, + MaxScore: 100, + } + if passed { + submission.Score = 100 + } + repo.CreateSubmission(c.Context(), submission) + repo.UpdateTaskStatus(c.Context(), *session.TaskID, "verified") + } + + return c.JSON(fiber.Map{ + "passed": passed, + "result": result, + "sessionId": session.ID, + }) +} + +// --- OpenSandbox HTTP client --- + +type sandboxCreateResponse struct { + ID string `json:"id"` + SandboxID string `json:"sandbox_id"` + Data struct { + ID string `json:"id"` + } `json:"data"` +} + +func createOpenSandbox(image string) (*sandboxCreateResponse, error) { + payload, _ := json.Marshal(map[string]interface{}{ + "image": image, + "entrypoint": []string{"/opt/opensandbox/code-interpreter.sh"}, + "timeout": "30m", + }) + + ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, "POST", openSandboxURL+"/api/v1/sandboxes", bytes.NewReader(payload)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, fmt.Errorf("opensandbox unreachable: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("opensandbox error %d: %s", resp.StatusCode, string(body)) + } + + var result sandboxCreateResponse + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return nil, err + } + if result.ID == "" { + if result.SandboxID != "" { + result.ID = result.SandboxID + } else if result.Data.ID != "" { + result.ID = result.Data.ID + } + } + if result.ID == "" { + return nil, fmt.Errorf("opensandbox response missing sandbox id") + } + return &result, nil +} + +func sandboxFilesRequest(sandboxID, path string) (interface{}, error) { + reqURL := fmt.Sprintf("%s/api/v1/sandboxes/%s/files?path=%s", openSandboxURL, sandboxID, url.QueryEscape(path)) + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, "GET", reqURL, nil) + if err != nil { + return nil, err + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("files request failed: status %d: %s", resp.StatusCode, string(body)) + } + + var result interface{} + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return nil, err + } + return result, nil +} + +func sandboxReadFile(sandboxID, path string) (string, error) { + reqURL := fmt.Sprintf("%s/api/v1/sandboxes/%s/files/read?path=%s", openSandboxURL, sandboxID, url.QueryEscape(path)) + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, "GET", reqURL, nil) + if err != nil { + return "", err + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + body, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("read file failed: status %d: %s", resp.StatusCode, string(body)) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", err + } + + var structured map[string]interface{} + if err := json.Unmarshal(body, &structured); err == nil { + if content, ok := structured["content"].(string); ok { + return content, nil + } + if data, ok := structured["data"].(map[string]interface{}); ok { + if content, ok := data["content"].(string); ok { + return content, nil + } + } + } + + return string(body), nil +} + +func sandboxWriteFile(sandboxID, path, content string) error { + payload, _ := json.Marshal(map[string]interface{}{ + "entries": []map[string]interface{}{ + {"path": path, "data": content, "mode": 644}, + }, + }) + url := fmt.Sprintf("%s/api/v1/sandboxes/%s/files/write", openSandboxURL, sandboxID) + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(payload)) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode >= 400 { + body, _ := io.ReadAll(resp.Body) + return fmt.Errorf("write file failed: status %d: %s", resp.StatusCode, string(body)) + } + return nil +} + +func sandboxRunCommand(sandboxID, command, cwd string) (map[string]interface{}, error) { + if cwd == "" { + cwd = "/home/user" + } + payload, _ := json.Marshal(map[string]interface{}{ + "cmd": command, + "cwd": cwd, + }) + url := fmt.Sprintf("%s/api/v1/sandboxes/%s/commands/run", openSandboxURL, sandboxID) + + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(payload)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("run command failed: status %d: %s", resp.StatusCode, string(body)) + } + + var result map[string]interface{} + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return nil, err + } + normalizeCommandResult(result) + return result, nil +} + +func normalizeCommandResult(result map[string]interface{}) { + if result == nil { + return + } + if _, ok := result["exit_code"]; !ok { + if exitCode, exists := result["exitCode"]; exists { + result["exit_code"] = exitCode + } + } + if _, ok := result["stdout"]; !ok { + if output, exists := result["output"]; exists { + if s, ok := output.(string); ok { + result["stdout"] = s + } + } + } + if _, ok := result["stderr"]; !ok { + result["stderr"] = "" + } +} + +func logEvent(ctx context.Context, sessionID, eventType string, data map[string]interface{}) { + if repo == nil { + return + } + payload, _ := json.Marshal(data) + repo.CreateSandboxEvent(ctx, sessionID, eventType, payload) +} + +func getEnv(key, defaultValue string) string { + if val := os.Getenv(key); val != "" { + return val + } + return defaultValue +} + +func getEnvInt(key string, defaultValue int) int { + if val := os.Getenv(key); val != "" { + var result int + if _, err := fmt.Sscanf(val, "%d", &result); err == nil { + return result + } + } + return defaultValue +} + diff --git a/backend/cmd/travel-svc/main.go b/backend/cmd/travel-svc/main.go index 258d66d..c78392e 100644 --- a/backend/cmd/travel-svc/main.go +++ b/backend/cmd/travel-svc/main.go @@ -121,6 +121,7 @@ func main() { api.Get("/poi", middleware.JWT(jwtOptional), handleSearchPOI(svc)) api.Post("/poi", middleware.JWT(jwtOptional), handleSearchPOIPost(svc)) api.Post("/places", middleware.JWT(jwtOptional), handleSearchPlaces(svc)) + api.Post("/validate-itinerary", middleware.JWT(jwtOptional), handleValidateItinerary(svc)) port := getEnvInt("PORT", 3035) log.Printf("travel-svc listening on :%d", port) @@ -522,6 +523,28 @@ func handleSearchPlaces(svc *travel.Service) fiber.Handler { } } +func handleValidateItinerary(svc *travel.Service) fiber.Handler { + return func(c *fiber.Ctx) error { + var req travel.ValidateItineraryRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(400).JSON(fiber.Map{"error": "Invalid request body"}) + } + if len(req.Days) == 0 { + return c.Status(400).JSON(fiber.Map{"error": "days required"}) + } + + ctx, cancel := context.WithTimeout(c.Context(), 30*time.Second) + defer cancel() + + result, err := svc.ValidateItinerary(ctx, req) + if err != nil { + return c.Status(500).JSON(fiber.Map{"error": err.Error()}) + } + + return c.JSON(result) + } +} + func getEnv(key, defaultValue string) string { if val := os.Getenv(key); val != "" { return val diff --git a/backend/deploy/docker/Dockerfile.all b/backend/deploy/docker/Dockerfile.all index ac7d15e..c7d6f88 100644 --- a/backend/deploy/docker/Dockerfile.all +++ b/backend/deploy/docker/Dockerfile.all @@ -24,7 +24,9 @@ RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/file-svc ./cmd/fi RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/thread-svc ./cmd/thread-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/finance-heatmap-svc ./cmd/finance-heatmap-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/learning-svc ./cmd/learning-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/sandbox-svc ./cmd/sandbox-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/travel-svc ./cmd/travel-svc +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/medicine-svc ./cmd/medicine-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/labs-svc ./cmd/labs-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/podcast-svc ./cmd/podcast-svc RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o /bin/admin-svc ./cmd/admin-svc @@ -43,7 +45,7 @@ COPY --from=builder /bin/* /app/ ENV SERVICE=api-gateway ENV PORT=3015 -EXPOSE 3015 3018 3005 3001 3020 3021 3002 3025 3026 3027 3035 3040 +EXPOSE 3015 3018 3005 3001 3020 3021 3002 3025 3026 3027 3034 3035 3036 3037 3040 HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT}/health || exit 1 diff --git a/backend/deploy/docker/docker-compose.yml b/backend/deploy/docker/docker-compose.yml index 08b687b..e639364 100644 --- a/backend/deploy/docker/docker-compose.yml +++ b/backend/deploy/docker/docker-compose.yml @@ -5,11 +5,11 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=auth-svc - PORT=3050 - - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - JWT_SECRET=${JWT_SECRET} + - AUTH_SVC_URL=http://auth-svc:3050 ports: - "3050:3050" depends_on: @@ -26,6 +26,7 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=api-gateway - PORT=3015 @@ -39,9 +40,10 @@ services: - DISCOVER_SVC_URL=http://discover-svc:3002 - FINANCE_HEATMAP_SVC_URL=http://finance-heatmap-svc:3033 - LEARNING_SVC_URL=http://learning-svc:3034 + - SANDBOX_SVC_URL=http://sandbox-svc:3036 - TRAVEL_SVC_URL=http://travel-svc:3035 + - MEDICINE_SVC_URL=http://medicine-svc:3037 - ADMIN_SVC_URL=http://admin-svc:3040 - - JWT_SECRET=${JWT_SECRET} - REDIS_URL=redis://redis:6379 ports: - "3015:3015" @@ -52,6 +54,7 @@ services: - thread-svc - admin-svc - travel-svc + - medicine-svc - redis networks: - gooseek @@ -60,10 +63,10 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.chat-svc + env_file: ../../../.env environment: - SERVICE=chat-svc - PORT=3005 - - JWT_SECRET=${JWT_SECRET} - AUTH_SVC_URL=http://auth-svc:3050 - MASTER_AGENTS_SVC_URL=http://agent-svc:3018 - DISCOVER_SVC_URL=http://discover-svc:3002 @@ -79,23 +82,15 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.agent-svc + env_file: ../../../.env environment: - SERVICE=agent-svc - PORT=3018 - - JWT_SECRET=${JWT_SECRET} - AUTH_SVC_URL=http://auth-svc:3050 - SEARXNG_URL=http://searxng:8080 - DISCOVER_SVC_URL=http://discover-svc:3002 - CRAWL4AI_URL=http://crawl4ai:11235 - TRAVEL_SVC_URL=http://travel-svc:3035 - - TRAVELPAYOUTS_TOKEN=${TRAVELPAYOUTS_TOKEN} - - TRAVELPAYOUTS_MARKER=${TRAVELPAYOUTS_MARKER} - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} - - GEMINI_API_KEY=${GEMINI_API_KEY} - - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} - - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} - - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} ports: - "3018:3018" depends_on: @@ -111,6 +106,7 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.search-svc + env_file: ../../../.env environment: - SERVICE=search-svc - PORT=3001 @@ -126,12 +122,10 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=llm-svc - PORT=3020 - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} - - GEMINI_API_KEY=${GEMINI_API_KEY} ports: - "3020:3020" networks: @@ -141,6 +135,7 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=scraper-svc - PORT=3021 @@ -154,6 +149,7 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.discover-svc + env_file: ../../../.env environment: - SERVICE=discover-svc - PORT=3002 @@ -173,12 +169,12 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=collection-svc - PORT=3025 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - JWT_SECRET=${JWT_SECRET} - - AUTH_SVC_URL=${AUTH_SVC_URL} + - AUTH_SVC_URL=http://auth-svc:3050 ports: - "3025:3025" depends_on: @@ -190,13 +186,11 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=file-svc - PORT=3026 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} - - JWT_SECRET=${JWT_SECRET} - FILE_STORAGE_PATH=/data/files ports: - "3026:3026" @@ -211,13 +205,12 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=thread-svc - PORT=3027 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - OPENAI_API_KEY=${OPENAI_API_KEY} - - JWT_SECRET=${JWT_SECRET} - - AUTH_SVC_URL=${AUTH_SVC_URL} + - AUTH_SVC_URL=http://auth-svc:3050 ports: - "3027:3027" depends_on: @@ -229,12 +222,11 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=labs-svc - PORT=3031 - LABS_SVC_PORT=3031 - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} ports: - "3031:3031" networks: @@ -244,13 +236,11 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=podcast-svc - PORT=3032 - PODCAST_SVC_PORT=3032 - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} - - ELEVENLABS_API_KEY=${ELEVENLABS_API_KEY} ports: - "3032:3032" volumes: @@ -262,12 +252,11 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=finance-heatmap-svc - PORT=3033 - REDIS_URL=redis://redis:6379 - # MOEX, Крипто, Валюты работают без URL (встроенные провайдеры). Для своих рынков — URL сервиса, GET ?market=...&range=... - - FINANCE_DATA_PROVIDER_URL=${FINANCE_DATA_PROVIDER_URL:-} ports: - "3033:3033" depends_on: @@ -279,16 +268,12 @@ services: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=learning-svc - PORT=3034 - LEARNING_SVC_PORT=3034 - - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} - - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} - - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} - - DEFAULT_LLM_MODEL=${DEFAULT_LLM_MODEL:-gpt-4o-mini} - - OPENAI_API_KEY=${OPENAI_API_KEY} - - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - AUTH_SVC_URL=http://auth-svc:3050 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable ports: - "3034:3034" @@ -297,31 +282,37 @@ services: networks: - gooseek + sandbox-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env + environment: + - SERVICE=sandbox-svc + - PORT=3036 + - SANDBOX_SVC_PORT=3036 + - AUTH_SVC_URL=http://auth-svc:3050 + - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable + - OPENSANDBOX_URL=http://opensandbox-server:8080 + ports: + - "3036:3036" + depends_on: + - postgres + networks: + - gooseek + travel-svc: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=travel-svc - PORT=3035 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - JWT_SECRET=${JWT_SECRET} - AUTH_SVC_URL=http://auth-svc:3050 - # Российские API (по умолчанию) - USE_RUSSIAN_APIS=true - - TRAVELPAYOUTS_TOKEN=${TRAVELPAYOUTS_TOKEN} - - TRAVELPAYOUTS_MARKER=${TRAVELPAYOUTS_MARKER} - - TWOGIS_API_KEY=${TWOGIS_API_KEY} - # Международные API (опционально) - - AMADEUS_API_KEY=${AMADEUS_API_KEY} - - AMADEUS_API_SECRET=${AMADEUS_API_SECRET} - - OPENROUTE_API_KEY=${OPENROUTE_API_KEY} - # LLM (TimeWeb) - LLM_PROVIDER=timeweb - - LLM_MODEL=${DEFAULT_LLM_MODEL:-gpt-4o-mini} - - TIMEWEB_API_BASE_URL=${TIMEWEB_API_BASE_URL} - - TIMEWEB_AGENT_ACCESS_ID=${TIMEWEB_AGENT_ACCESS_ID} - - TIMEWEB_API_KEY=${TIMEWEB_API_KEY} ports: - "3035:3035" depends_on: @@ -330,17 +321,36 @@ services: networks: - gooseek + medicine-svc: + build: + context: ../.. + dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env + environment: + - SERVICE=medicine-svc + - PORT=3037 + - AUTH_SVC_URL=http://auth-svc:3050 + - SEARXNG_URL=http://searxng:8080 + - LLM_PROVIDER=timeweb + ports: + - "3037:3037" + depends_on: + - auth-svc + - searxng + networks: + - gooseek + admin-svc: build: context: ../.. dockerfile: deploy/docker/Dockerfile.all + env_file: ../../../.env environment: - SERVICE=admin-svc - PORT=3040 - ADMIN_SVC_PORT=3040 - DATABASE_URL=postgres://gooseek:gooseek@postgres:5432/gooseek?sslmode=disable - - JWT_SECRET=${JWT_SECRET} - - AUTH_SVC_URL=${AUTH_SVC_URL} + - AUTH_SVC_URL=http://auth-svc:3050 - MINIO_ENDPOINT=minio:9000 - MINIO_ACCESS_KEY=minioadmin - MINIO_SECRET_KEY=minioadmin @@ -378,12 +388,12 @@ services: context: ../../webui dockerfile: Dockerfile args: - - NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES:-/medicine} + NEXT_PUBLIC_ENABLED_ROUTES: ${NEXT_PUBLIC_ENABLED_ROUTES:-} + NEXT_PUBLIC_TWOGIS_API_KEY: ${NEXT_PUBLIC_TWOGIS_API_KEY:-} + env_file: ../../../.env environment: - NODE_ENV=production - API_URL=http://api-gateway:3015 - - NEXT_PUBLIC_API_URL= - - NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES:-/medicine} ports: - "3000:3000" depends_on: diff --git a/backend/deploy/k8s/configmap.yaml b/backend/deploy/k8s/configmap.yaml index da29e6d..59a983b 100644 --- a/backend/deploy/k8s/configmap.yaml +++ b/backend/deploy/k8s/configmap.yaml @@ -16,6 +16,27 @@ data: COLLECTION_SVC_URL: "http://collection-svc:3025" FILE_SVC_URL: "http://file-svc:3026" THREAD_SVC_URL: "http://thread-svc:3027" + LEARNING_SVC_URL: "http://learning-svc:3034" + MEDICINE_SVC_URL: "http://medicine-svc:3037" + SANDBOX_SVC_URL: "http://sandbox-svc:3036" + OPENSANDBOX_URL: "http://opensandbox-server:8080" + AUTH_SVC_URL: "http://auth-svc:3050" + TRAVEL_SVC_URL: "http://travel-svc:3035" + ADMIN_SVC_URL: "http://admin-svc:3040" + DEFAULT_LLM_MODEL: "${DEFAULT_LLM_MODEL}" + DEFAULT_LLM_PROVIDER: "${DEFAULT_LLM_PROVIDER}" + TIMEWEB_API_BASE_URL: "${TIMEWEB_API_BASE_URL}" + TIMEWEB_AGENT_ACCESS_ID: "${TIMEWEB_AGENT_ACCESS_ID}" + TRAVELPAYOUTS_TOKEN: "${TRAVELPAYOUTS_TOKEN}" + NEXT_PUBLIC_ENABLED_ROUTES: "${NEXT_PUBLIC_ENABLED_ROUTES}" + NEXT_PUBLIC_TWOGIS_API_KEY: "${NEXT_PUBLIC_TWOGIS_API_KEY}" + S3_ENDPOINT: "${S3_ENDPOINT}" + S3_ACCESS_KEY: "${S3_ACCESS_KEY}" + S3_SECRET_KEY: "${S3_SECRET_KEY}" + S3_BUCKET: "${S3_BUCKET}" + S3_USE_SSL: "${S3_USE_SSL}" + S3_REGION: "${S3_REGION}" + S3_PUBLIC_URL: "${S3_PUBLIC_URL}" --- apiVersion: v1 kind: Secret @@ -28,5 +49,6 @@ stringData: ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY}" GEMINI_API_KEY: "${GEMINI_API_KEY}" JWT_SECRET: "${JWT_SECRET}" + TIMEWEB_API_KEY: "${TIMEWEB_API_KEY}" POSTGRES_USER: "gooseek" POSTGRES_PASSWORD: "gooseek" diff --git a/backend/deploy/k8s/deploy.sh b/backend/deploy/k8s/deploy.sh index e41c781..678254e 100755 --- a/backend/deploy/k8s/deploy.sh +++ b/backend/deploy/k8s/deploy.sh @@ -3,9 +3,24 @@ set -e SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" BACKEND_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" +ROOT_DIR="$(cd "$BACKEND_DIR/.." && pwd)" +ENV_FILE="$ROOT_DIR/.env" -echo "=== GooSeek Go Backend K8s Deployment ===" +REGISTRY="localhost:5000" +IMAGE_TAG="${IMAGE_TAG:-latest}" + +echo "=== GooSeek K8s Deployment ===" echo "Backend dir: $BACKEND_DIR" +echo "Registry: $REGISTRY" +echo "Tag: $IMAGE_TAG" + +# Load .env +if [ -f "$ENV_FILE" ]; then + echo "Loading env from $ENV_FILE" + set -a + source "$ENV_FILE" + set +a +fi # Check kubectl if ! command -v kubectl &> /dev/null; then @@ -13,17 +28,40 @@ if ! command -v kubectl &> /dev/null; then exit 1 fi -# Build Docker image +# Build and push backend image echo "" -echo "=== Building Docker image ===" +echo "=== Building Go backend image ===" cd "$BACKEND_DIR" -docker build -f deploy/docker/Dockerfile.all -t gooseek/backend:latest . +docker build -f deploy/docker/Dockerfile.all \ + -t "$REGISTRY/gooseek/backend:$IMAGE_TAG" \ + -t "$REGISTRY/gooseek/backend:latest" \ + . -# Load to k3s (if using k3s) -if command -v k3s &> /dev/null; then - echo "" - echo "=== Loading image to k3s ===" - docker save gooseek/backend:latest | sudo k3s ctr images import - +echo "=== Pushing backend to registry ===" +docker push "$REGISTRY/gooseek/backend:$IMAGE_TAG" +docker push "$REGISTRY/gooseek/backend:latest" + +# Build and push webui image +echo "" +echo "=== Building webui image ===" +docker build \ + -f "$BACKEND_DIR/webui/Dockerfile" \ + --build-arg "NEXT_PUBLIC_ENABLED_ROUTES=${NEXT_PUBLIC_ENABLED_ROUTES:-}" \ + --build-arg "NEXT_PUBLIC_TWOGIS_API_KEY=${NEXT_PUBLIC_TWOGIS_API_KEY:-}" \ + -t "$REGISTRY/gooseek/webui:$IMAGE_TAG" \ + -t "$REGISTRY/gooseek/webui:latest" \ + "$BACKEND_DIR/webui" + +echo "=== Pushing webui to registry ===" +docker push "$REGISTRY/gooseek/webui:$IMAGE_TAG" +docker push "$REGISTRY/gooseek/webui:latest" + +# Generate configmap/secrets from .env via envsubst +echo "" +echo "=== Generating K8s manifests from .env ===" +if command -v envsubst &> /dev/null && [ -f "$ENV_FILE" ]; then + envsubst < "$SCRIPT_DIR/configmap.yaml" > "$SCRIPT_DIR/_generated_configmap.yaml" + kubectl apply -f "$SCRIPT_DIR/_generated_configmap.yaml" -n gooseek fi # Apply kustomization @@ -32,20 +70,31 @@ echo "=== Applying K8s manifests ===" cd "$SCRIPT_DIR" kubectl apply -k . +# Rolling restart to pull new images +echo "" +echo "=== Rolling restart deployments ===" +kubectl -n gooseek rollout restart deployment/api-gateway +kubectl -n gooseek rollout restart deployment/webui +kubectl -n gooseek rollout restart deployment/chat-svc +kubectl -n gooseek rollout restart deployment/agent-svc +kubectl -n gooseek rollout restart deployment/discover-svc +kubectl -n gooseek rollout restart deployment/search-svc +kubectl -n gooseek rollout restart deployment/learning-svc +kubectl -n gooseek rollout restart deployment/medicine-svc +kubectl -n gooseek rollout restart deployment/travel-svc +kubectl -n gooseek rollout restart deployment/sandbox-svc + # Wait for rollout echo "" -echo "=== Waiting for deployments ===" -kubectl -n gooseek rollout status deployment/api-gateway --timeout=120s || true +echo "=== Waiting for rollouts ===" +kubectl -n gooseek rollout status deployment/api-gateway --timeout=180s || true kubectl -n gooseek rollout status deployment/chat-svc --timeout=120s || true kubectl -n gooseek rollout status deployment/agent-svc --timeout=120s || true -kubectl -n gooseek rollout status deployment/discover-svc --timeout=120s || true -kubectl -n gooseek rollout status deployment/search-svc --timeout=120s || true -kubectl -n gooseek rollout status deployment/redis --timeout=60s || true # Show status echo "" echo "=== Deployment Status ===" -kubectl -n gooseek get pods +kubectl -n gooseek get pods -o wide echo "" kubectl -n gooseek get svc echo "" @@ -53,4 +102,5 @@ kubectl -n gooseek get ingress echo "" echo "=== Done ===" -echo "API Gateway: http://localhost:3015 (NodePort) or via Ingress" +echo "API: https://api.gooseek.ru" +echo "Web: https://gooseek.ru" diff --git a/backend/deploy/k8s/gitea-deployment.yaml b/backend/deploy/k8s/gitea-deployment.yaml new file mode 100644 index 0000000..57b9d8d --- /dev/null +++ b/backend/deploy/k8s/gitea-deployment.yaml @@ -0,0 +1,44 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: gitea + namespace: gitea +spec: + replicas: 1 + selector: + matchLabels: + app: gitea + template: + metadata: + labels: + app: gitea + spec: + containers: + - name: gitea + image: gitea/gitea:1.22 + ports: + - containerPort: 3000 + name: http + - containerPort: 22 + name: ssh + volumeMounts: + - name: data + mountPath: /data + env: + - name: GITEA__database__DB_TYPE + value: sqlite3 + - name: GITEA__server__DOMAIN + value: git.gooseek.ru + - name: GITEA__server__ROOT_URL + value: https://git.gooseek.ru/ + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + volumes: + - name: data + persistentVolumeClaim: + claimName: gitea-data diff --git a/backend/deploy/k8s/ingress.yaml b/backend/deploy/k8s/ingress.yaml index e0be725..993e851 100644 --- a/backend/deploy/k8s/ingress.yaml +++ b/backend/deploy/k8s/ingress.yaml @@ -4,7 +4,7 @@ metadata: name: gooseek-ingress namespace: gooseek annotations: - nginx.ingress.kubernetes.io/proxy-body-size: "50m" + nginx.ingress.kubernetes.io/proxy-body-size: "100m" nginx.ingress.kubernetes.io/proxy-read-timeout: "300" nginx.ingress.kubernetes.io/proxy-send-timeout: "300" nginx.ingress.kubernetes.io/proxy-buffering: "off" @@ -14,9 +14,20 @@ spec: ingressClassName: nginx tls: - hosts: + - gooseek.ru - api.gooseek.ru secretName: gooseek-tls rules: + - host: gooseek.ru + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: webui + port: + number: 3000 - host: api.gooseek.ru http: paths: @@ -27,25 +38,3 @@ spec: name: api-gateway port: number: 3015 ---- -apiVersion: networking.k8s.io/v1 -kind: Ingress -metadata: - name: gooseek-ingress-local - namespace: gooseek - annotations: - nginx.ingress.kubernetes.io/proxy-body-size: "50m" - nginx.ingress.kubernetes.io/proxy-read-timeout: "300" -spec: - ingressClassName: nginx - rules: - - host: localhost - http: - paths: - - path: /api - pathType: Prefix - backend: - service: - name: api-gateway - port: - number: 3015 diff --git a/backend/deploy/k8s/kustomization.yaml b/backend/deploy/k8s/kustomization.yaml index 14128db..b4afd29 100644 --- a/backend/deploy/k8s/kustomization.yaml +++ b/backend/deploy/k8s/kustomization.yaml @@ -9,6 +9,7 @@ resources: - postgres.yaml - redis.yaml - api-gateway.yaml + - webui.yaml - chat-svc.yaml - agent-svc.yaml - search-svc.yaml @@ -18,6 +19,11 @@ resources: - collection-svc.yaml - file-svc.yaml - thread-svc.yaml + - learning-svc.yaml + - medicine-svc.yaml + - travel-svc.yaml + - sandbox-svc.yaml + - opensandbox.yaml - ingress.yaml commonLabels: @@ -26,4 +32,8 @@ commonLabels: images: - name: gooseek/backend + newName: localhost:5000/gooseek/backend + newTag: latest + - name: gooseek/webui + newName: localhost:5000/gooseek/webui newTag: latest diff --git a/backend/deploy/k8s/learning-svc.yaml b/backend/deploy/k8s/learning-svc.yaml new file mode 100644 index 0000000..7d6aa63 --- /dev/null +++ b/backend/deploy/k8s/learning-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: learning-svc + namespace: gooseek + labels: + app: learning-svc + app.kubernetes.io/name: learning-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: learning-svc + template: + metadata: + labels: + app: learning-svc + spec: + containers: + - name: learning-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "learning-svc" + - name: PORT + value: "3034" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3034 + name: http + livenessProbe: + httpGet: + path: /health + port: 3034 + initialDelaySeconds: 15 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /health + port: 3034 + initialDelaySeconds: 10 + periodSeconds: 15 + resources: + requests: + cpu: 250m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: learning-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: learning-svc + ports: + - port: 3034 + targetPort: 3034 + name: http diff --git a/backend/deploy/k8s/medicine-svc.yaml b/backend/deploy/k8s/medicine-svc.yaml new file mode 100644 index 0000000..579a29e --- /dev/null +++ b/backend/deploy/k8s/medicine-svc.yaml @@ -0,0 +1,70 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: medicine-svc + namespace: gooseek + labels: + app: medicine-svc + app.kubernetes.io/name: medicine-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: medicine-svc + template: + metadata: + labels: + app: medicine-svc + spec: + containers: + - name: medicine-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "medicine-svc" + - name: PORT + value: "3037" + - name: LLM_PROVIDER + value: "timeweb" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3037 + name: http + livenessProbe: + httpGet: + path: /health + port: 3037 + initialDelaySeconds: 10 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /health + port: 3037 + initialDelaySeconds: 5 + periodSeconds: 15 + resources: + requests: + cpu: 150m + memory: 192Mi + limits: + cpu: 700m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: medicine-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: medicine-svc + ports: + - port: 3037 + targetPort: 3037 + name: http diff --git a/backend/deploy/k8s/opensandbox.yaml b/backend/deploy/k8s/opensandbox.yaml new file mode 100644 index 0000000..7f41a26 --- /dev/null +++ b/backend/deploy/k8s/opensandbox.yaml @@ -0,0 +1,165 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: gooseek-sandbox + labels: + app.kubernetes.io/part-of: gooseek + purpose: user-sandboxes +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: opensandbox-sa + namespace: gooseek +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: opensandbox-role + namespace: gooseek-sandbox +rules: +- apiGroups: [""] + resources: ["pods", "pods/exec", "pods/log"] + verbs: ["create", "get", "list", "watch", "delete"] +- apiGroups: ["batch"] + resources: ["jobs"] + verbs: ["create", "get", "list", "watch", "delete"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: opensandbox-binding + namespace: gooseek-sandbox +subjects: +- kind: ServiceAccount + name: opensandbox-sa + namespace: gooseek +roleRef: + kind: Role + name: opensandbox-role + apiGroup: rbac.authorization.k8s.io +--- +apiVersion: v1 +kind: ResourceQuota +metadata: + name: sandbox-quota + namespace: gooseek-sandbox +spec: + hard: + requests.cpu: "8" + requests.memory: "16Gi" + limits.cpu: "16" + limits.memory: "32Gi" + pods: "50" +--- +apiVersion: v1 +kind: LimitRange +metadata: + name: sandbox-limits + namespace: gooseek-sandbox +spec: + limits: + - default: + cpu: "500m" + memory: "512Mi" + defaultRequest: + cpu: "100m" + memory: "128Mi" + max: + cpu: "2" + memory: "2Gi" + type: Container +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: opensandbox-server + namespace: gooseek + labels: + app: opensandbox-server + app.kubernetes.io/name: opensandbox-server + app.kubernetes.io/part-of: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: opensandbox-server + template: + metadata: + labels: + app: opensandbox-server + spec: + serviceAccountName: opensandbox-sa + containers: + - name: opensandbox + image: registry.cn-hangzhou.aliyuncs.com/open_sandbox/server:v1.0.1 + ports: + - containerPort: 8080 + name: http + env: + - name: SANDBOX_NAMESPACE + value: "gooseek-sandbox" + - name: SANDBOX_DEFAULT_TIMEOUT + value: "30m" + - name: SANDBOX_MAX_CONCURRENT + value: "20" + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi + livenessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 15 + periodSeconds: 30 + readinessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 10 + periodSeconds: 15 +--- +apiVersion: v1 +kind: Service +metadata: + name: opensandbox-server + namespace: gooseek +spec: + type: ClusterIP + selector: + app: opensandbox-server + ports: + - port: 8080 + targetPort: 8080 + name: http +--- +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: sandbox-isolation + namespace: gooseek-sandbox +spec: + podSelector: {} + policyTypes: + - Ingress + - Egress + ingress: + - from: + - namespaceSelector: + matchLabels: + app.kubernetes.io/part-of: gooseek + egress: + - to: + - namespaceSelector: + matchLabels: + app.kubernetes.io/part-of: gooseek + - to: [] + ports: + - protocol: TCP + port: 443 + - protocol: TCP + port: 80 diff --git a/backend/deploy/k8s/registry-with-auth.yaml b/backend/deploy/k8s/registry-with-auth.yaml new file mode 100644 index 0000000..84c8884 --- /dev/null +++ b/backend/deploy/k8s/registry-with-auth.yaml @@ -0,0 +1,78 @@ +apiVersion: v1 +kind: Secret +metadata: + name: registry-auth + namespace: gooseek +type: Opaque +stringData: + htpasswd: | + admin:$2y$05$A6oxuQhSjFObdjDsbjiWee.FJ62XQrc6BhLfzCMofY.9A/qQ050v6 +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: registry-config + namespace: gooseek +data: + config.yml: | + version: 0.1 + log: + level: info + storage: + filesystem: + rootdirectory: /var/lib/registry + delete: + enabled: true + http: + addr: :5000 + headers: + X-Content-Type-Options: [nosniff] + auth: + htpasswd: + realm: GooSeek Registry + path: /auth/htpasswd +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: registry + namespace: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: registry + template: + metadata: + labels: + app: registry + spec: + containers: + - name: registry + image: registry:2 + ports: + - containerPort: 5000 + volumeMounts: + - name: registry-data + mountPath: /var/lib/registry + - name: registry-config + mountPath: /etc/docker/registry + - name: registry-auth + mountPath: /auth + resources: + requests: + memory: "64Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "200m" + volumes: + - name: registry-data + persistentVolumeClaim: + claimName: registry-pvc + - name: registry-config + configMap: + name: registry-config + - name: registry-auth + secret: + secretName: registry-auth diff --git a/backend/deploy/k8s/sandbox-svc.yaml b/backend/deploy/k8s/sandbox-svc.yaml new file mode 100644 index 0000000..f899561 --- /dev/null +++ b/backend/deploy/k8s/sandbox-svc.yaml @@ -0,0 +1,70 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: sandbox-svc + namespace: gooseek + labels: + app: sandbox-svc + app.kubernetes.io/name: sandbox-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: sandbox-svc + template: + metadata: + labels: + app: sandbox-svc + spec: + containers: + - name: sandbox-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "sandbox-svc" + - name: PORT + value: "3036" + - name: OPENSANDBOX_URL + value: "http://opensandbox-server:8080" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3036 + name: http + livenessProbe: + httpGet: + path: /health + port: 3036 + initialDelaySeconds: 10 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /health + port: 3036 + initialDelaySeconds: 5 + periodSeconds: 15 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: sandbox-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: sandbox-svc + ports: + - port: 3036 + targetPort: 3036 + name: http diff --git a/backend/deploy/k8s/travel-svc.yaml b/backend/deploy/k8s/travel-svc.yaml new file mode 100644 index 0000000..4e0ffc4 --- /dev/null +++ b/backend/deploy/k8s/travel-svc.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: travel-svc + namespace: gooseek + labels: + app: travel-svc + app.kubernetes.io/name: travel-svc + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: travel-svc + template: + metadata: + labels: + app: travel-svc + spec: + containers: + - name: travel-svc + image: gooseek/backend:latest + env: + - name: SERVICE + value: "travel-svc" + - name: PORT + value: "3035" + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + ports: + - containerPort: 3035 + name: http + livenessProbe: + httpGet: + path: /health + port: 3035 + initialDelaySeconds: 10 + periodSeconds: 20 + readinessProbe: + httpGet: + path: /health + port: 3035 + initialDelaySeconds: 5 + periodSeconds: 15 + resources: + requests: + cpu: 150m + memory: 192Mi + limits: + cpu: 700m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: travel-svc + namespace: gooseek +spec: + type: ClusterIP + selector: + app: travel-svc + ports: + - port: 3035 + targetPort: 3035 + name: http diff --git a/backend/deploy/k8s/webui.yaml b/backend/deploy/k8s/webui.yaml new file mode 100644 index 0000000..a8555ac --- /dev/null +++ b/backend/deploy/k8s/webui.yaml @@ -0,0 +1,63 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: webui + namespace: gooseek + labels: + app: webui + app.kubernetes.io/name: webui + app.kubernetes.io/part-of: gooseek +spec: + replicas: 2 + selector: + matchLabels: + app: webui + template: + metadata: + labels: + app: webui + spec: + containers: + - name: webui + image: gooseek/webui:latest + ports: + - containerPort: 3000 + name: http + envFrom: + - configMapRef: + name: gooseek-config + - secretRef: + name: gooseek-secrets + livenessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 15 + periodSeconds: 20 + readinessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + requests: + cpu: 100m + memory: 256Mi + limits: + cpu: 1000m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: webui + namespace: gooseek +spec: + type: ClusterIP + selector: + app: webui + ports: + - port: 3000 + targetPort: 3000 + name: http diff --git a/backend/deploy/scripts/README-nvidia-cuda.md b/backend/deploy/scripts/README-nvidia-cuda.md new file mode 100644 index 0000000..88b1762 --- /dev/null +++ b/backend/deploy/scripts/README-nvidia-cuda.md @@ -0,0 +1,49 @@ +# Установка NVIDIA + CUDA на сервере (Ubuntu 24.04) + +Скрипт `setup-nvidia-cuda-ubuntu24.sh` ставит драйвер NVIDIA и CUDA Toolkit для работы с нейросетями (PyTorch, TensorFlow и т.д.). + +## Что уже сделано на 192.168.31.59 + +- **Драйвер:** nvidia-driver-570-server (Open kernel module) +- **CUDA:** 12.6 в `/usr/local/cuda-12.6` +- **Окружение:** `/etc/profile.d/cuda.sh` — подключать: `source /etc/profile.d/cuda.sh` + +## Обязательно после установки + +**Перезагрузка** (без неё драйвер не загрузится): + +```bash +sudo reboot +``` + +После перезагрузки проверка: + +```bash +nvidia-smi +source /etc/profile.d/cuda.sh && nvcc --version +``` + +## Проверка для нейросетей (PyTorch) + +```bash +source /etc/profile.d/cuda.sh +pip install torch --index-url https://download.pytorch.org/whl/cu124 +python3 -c "import torch; print('CUDA:', torch.cuda.is_available()); print('Device:', torch.cuda.get_device_name(0) if torch.cuda.is_available() else 'N/A')" +``` + +Для CUDA 12.6 подойдёт индекс `cu124` (PyTorch совместим с 12.4+). + +## Запуск скрипта вручную + +Если нужно переустановить или поставить на другом сервере: + +```bash +scp backend/deploy/scripts/setup-nvidia-cuda-ubuntu24.sh user@server:/tmp/ +ssh user@server "echo YOUR_SUDO_PASSWORD | sudo -S bash /tmp/setup-nvidia-cuda-ubuntu24.sh" +sudo reboot +``` + +## Железо на текущем сервере + +- **GPU:** NVIDIA GeForce RTX 4060 Ti 16GB +- **ОС:** Ubuntu 24.04.4 LTS, ядро 6.8.0-101-generic diff --git a/backend/deploy/scripts/gitea-runner.service b/backend/deploy/scripts/gitea-runner.service new file mode 100644 index 0000000..e785814 --- /dev/null +++ b/backend/deploy/scripts/gitea-runner.service @@ -0,0 +1,14 @@ +[Unit] +Description=Gitea Actions Runner +After=network.target + +[Service] +Type=simple +User=root +WorkingDirectory=/opt/gitea-runner +ExecStart=/usr/local/bin/act_runner daemon --config /opt/gitea-runner/config.yaml +Restart=always +RestartSec=10 + +[Install] +WantedBy=multi-user.target diff --git a/backend/deploy/scripts/install-cicd-stack.sh b/backend/deploy/scripts/install-cicd-stack.sh new file mode 100644 index 0000000..60e571a --- /dev/null +++ b/backend/deploy/scripts/install-cicd-stack.sh @@ -0,0 +1,55 @@ +#!/bin/bash +set -e + +echo "=== Installing Helm ===" +cd /tmp +curl -fsSL https://get.helm.sh/helm-v3.17.0-linux-amd64.tar.gz -o helm.tar.gz +tar -zxf helm.tar.gz +mv linux-amd64/helm /usr/local/bin/helm +rm -rf linux-amd64 helm.tar.gz +helm version + +echo "=== Adding Helm repos ===" +helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx +helm repo add jetstack https://charts.jetstack.io +helm repo update + +echo "=== Installing Nginx Ingress Controller ===" +helm upgrade --install ingress-nginx ingress-nginx/ingress-nginx \ + --namespace ingress-nginx --create-namespace \ + --set controller.hostNetwork=true \ + --set controller.kind=DaemonSet \ + --set controller.service.type=ClusterIP \ + --wait --timeout 300s + +echo "=== Installing Cert-Manager ===" +helm upgrade --install cert-manager jetstack/cert-manager \ + --namespace cert-manager --create-namespace \ + --set crds.enabled=true \ + --wait --timeout 300s + +echo "=== Creating Let's Encrypt ClusterIssuer ===" +cat <<'EOF' | kubectl apply -f - +apiVersion: cert-manager.io/v1 +kind: ClusterIssuer +metadata: + name: letsencrypt-prod +spec: + acme: + server: https://acme-v02.api.letsencrypt.org/directory + email: admin@gooseek.ru + privateKeySecretRef: + name: letsencrypt-prod + solvers: + - http01: + ingress: + class: nginx +EOF + +echo "=== Creating namespaces ===" +kubectl create namespace gooseek --dry-run=client -o yaml | kubectl apply -f - +kubectl create namespace gitea --dry-run=client -o yaml | kubectl apply -f - + +echo "=== Done! Checking status ===" +kubectl get nodes +kubectl get pods -A diff --git a/backend/deploy/scripts/install-gitea-manifest.sh b/backend/deploy/scripts/install-gitea-manifest.sh new file mode 100644 index 0000000..7ff7c38 --- /dev/null +++ b/backend/deploy/scripts/install-gitea-manifest.sh @@ -0,0 +1,248 @@ +#!/bin/bash +set -e +export KUBECONFIG=/etc/rancher/k3s/k3s.yaml + +echo "=== Installing Gitea via manifests ===" + +cat <<'EOF' | kubectl apply -f - +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: gitea-data + namespace: gitea +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 10Gi +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: gitea-config + namespace: gitea +data: + app.ini: | + APP_NAME = GooSeek Git + RUN_MODE = prod + + [server] + DOMAIN = git.gooseek.ru + ROOT_URL = https://git.gooseek.ru/ + HTTP_PORT = 3000 + SSH_PORT = 22 + SSH_DOMAIN = git.gooseek.ru + + [database] + DB_TYPE = sqlite3 + PATH = /data/gitea/gitea.db + + [security] + INSTALL_LOCK = true + SECRET_KEY = $(openssl rand -hex 32) + + [service] + DISABLE_REGISTRATION = false + REQUIRE_SIGNIN_VIEW = false +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: gitea + namespace: gitea +spec: + replicas: 1 + selector: + matchLabels: + app: gitea + template: + metadata: + labels: + app: gitea + spec: + containers: + - name: gitea + image: gitea/gitea:1.22 + ports: + - containerPort: 3000 + name: http + - containerPort: 22 + name: ssh + volumeMounts: + - name: data + mountPath: /data + - name: config + mountPath: /data/gitea/conf + env: + - name: GITEA__database__DB_TYPE + value: sqlite3 + - name: GITEA__database__PATH + value: /data/gitea/gitea.db + - name: GITEA__server__DOMAIN + value: git.gooseek.ru + - name: GITEA__server__ROOT_URL + value: https://git.gooseek.ru/ + - name: GITEA__security__INSTALL_LOCK + value: "false" + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + volumes: + - name: data + persistentVolumeClaim: + claimName: gitea-data + - name: config + configMap: + name: gitea-config +--- +apiVersion: v1 +kind: Service +metadata: + name: gitea + namespace: gitea +spec: + selector: + app: gitea + ports: + - port: 3000 + targetPort: 3000 + name: http + - port: 22 + targetPort: 22 + name: ssh +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: gitea-ingress + namespace: gitea + annotations: + cert-manager.io/cluster-issuer: letsencrypt-prod +spec: + ingressClassName: nginx + tls: + - hosts: + - git.gooseek.ru + secretName: gitea-tls + rules: + - host: git.gooseek.ru + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: gitea + port: + number: 3000 +EOF + +echo "=== Installing Docker Registry ===" +cat <<'EOF' | kubectl apply -f - +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: registry-pvc + namespace: gooseek +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: registry + namespace: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: registry + template: + metadata: + labels: + app: registry + spec: + containers: + - name: registry + image: registry:2 + ports: + - containerPort: 5000 + volumeMounts: + - name: registry-data + mountPath: /var/lib/registry + env: + - name: REGISTRY_STORAGE_DELETE_ENABLED + value: "true" + resources: + requests: + memory: "64Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "200m" + volumes: + - name: registry-data + persistentVolumeClaim: + claimName: registry-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: registry + namespace: gooseek +spec: + selector: + app: registry + ports: + - port: 5000 + targetPort: 5000 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: registry-ingress + namespace: gooseek + annotations: + cert-manager.io/cluster-issuer: letsencrypt-prod + nginx.ingress.kubernetes.io/proxy-body-size: "0" +spec: + ingressClassName: nginx + tls: + - hosts: + - registry.gooseek.ru + secretName: registry-tls + rules: + - host: registry.gooseek.ru + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: registry + port: + number: 5000 +EOF + +echo "=== Waiting for deployments ===" +kubectl -n gitea rollout status deployment/gitea --timeout=180s || true +kubectl -n gooseek rollout status deployment/registry --timeout=120s || true + +echo "=== Status ===" +kubectl get pods -A +kubectl get ingress -A +kubectl get certificates -A + +echo "" +echo "=== DONE ===" +echo "Gitea: https://git.gooseek.ru (first user to register will be admin)" +echo "Registry: https://registry.gooseek.ru" diff --git a/backend/deploy/scripts/install-gitea-registry.sh b/backend/deploy/scripts/install-gitea-registry.sh new file mode 100644 index 0000000..caa841f --- /dev/null +++ b/backend/deploy/scripts/install-gitea-registry.sh @@ -0,0 +1,130 @@ +#!/bin/bash +set -e +export KUBECONFIG=/etc/rancher/k3s/k3s.yaml + +echo "=== Adding Gitea Helm repo ===" +helm repo add gitea-charts https://dl.gitea.com/charts/ +helm repo update + +echo "=== Installing Gitea ===" +helm upgrade --install gitea gitea-charts/gitea \ + --namespace gitea \ + --set gitea.admin.username=admin \ + --set gitea.admin.password=GooSeek2026! \ + --set gitea.admin.email=admin@gooseek.ru \ + --set persistence.enabled=true \ + --set persistence.size=10Gi \ + --set postgresql-ha.enabled=false \ + --set postgresql.enabled=false \ + --set redis-cluster.enabled=false \ + --set redis.enabled=false \ + --set gitea.config.database.DB_TYPE=sqlite3 \ + --set gitea.config.server.ROOT_URL=https://git.gooseek.ru \ + --set gitea.config.server.DOMAIN=git.gooseek.ru \ + --set ingress.enabled=true \ + --set ingress.className=nginx \ + --set ingress.hosts[0].host=git.gooseek.ru \ + --set ingress.hosts[0].paths[0].path=/ \ + --set ingress.hosts[0].paths[0].pathType=Prefix \ + --set ingress.tls[0].secretName=gitea-tls \ + --set ingress.tls[0].hosts[0]=git.gooseek.ru \ + --set ingress.annotations."cert-manager\.io/cluster-issuer"=letsencrypt-prod \ + --wait --timeout 300s + +echo "=== Installing Docker Registry ===" +cat <<'EOF' | kubectl apply -f - +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: registry-pvc + namespace: gooseek +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: registry + namespace: gooseek +spec: + replicas: 1 + selector: + matchLabels: + app: registry + template: + metadata: + labels: + app: registry + spec: + containers: + - name: registry + image: registry:2 + ports: + - containerPort: 5000 + volumeMounts: + - name: registry-data + mountPath: /var/lib/registry + env: + - name: REGISTRY_STORAGE_DELETE_ENABLED + value: "true" + volumes: + - name: registry-data + persistentVolumeClaim: + claimName: registry-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: registry + namespace: gooseek +spec: + selector: + app: registry + ports: + - port: 5000 + targetPort: 5000 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: registry-ingress + namespace: gooseek + annotations: + cert-manager.io/cluster-issuer: letsencrypt-prod + nginx.ingress.kubernetes.io/proxy-body-size: "0" +spec: + ingressClassName: nginx + tls: + - hosts: + - registry.gooseek.ru + secretName: registry-tls + rules: + - host: registry.gooseek.ru + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: registry + port: + number: 5000 +EOF + +echo "=== Waiting for pods ===" +kubectl -n gitea wait --for=condition=Ready pod -l app.kubernetes.io/name=gitea --timeout=300s || true +kubectl -n gooseek wait --for=condition=Ready pod -l app=registry --timeout=120s || true + +echo "=== Final status ===" +kubectl get pods -A +kubectl get ingress -A +kubectl get certificates -A + +echo "" +echo "=== DONE ===" +echo "Gitea: https://git.gooseek.ru (admin / GooSeek2026!)" +echo "Registry: https://registry.gooseek.ru" diff --git a/backend/deploy/scripts/setup-gitea-runner.sh b/backend/deploy/scripts/setup-gitea-runner.sh new file mode 100644 index 0000000..48e35c2 --- /dev/null +++ b/backend/deploy/scripts/setup-gitea-runner.sh @@ -0,0 +1,65 @@ +#!/bin/bash +set -e +export KUBECONFIG=/etc/rancher/k3s/k3s.yaml + +echo "=== Enabling Gitea Actions ===" +# Update Gitea config to enable actions +kubectl -n gitea exec deploy/gitea -- sh -c ' +cat >> /data/gitea/conf/app.ini << EOF + +[actions] +ENABLED = true +DEFAULT_ACTIONS_URL = https://github.com +EOF +' + +echo "=== Restarting Gitea ===" +kubectl -n gitea rollout restart deploy/gitea +kubectl -n gitea rollout status deploy/gitea --timeout=120s + +echo "=== Installing Act Runner ===" +cd /tmp +curl -sL https://gitea.com/gitea/act_runner/releases/download/v0.2.11/act_runner-0.2.11-linux-amd64 -o act_runner +chmod +x act_runner +mv act_runner /usr/local/bin/ + +echo "=== Creating runner service ===" +mkdir -p /opt/gitea-runner +cd /opt/gitea-runner + +# Create config +cat > config.yaml << 'EOF' +log: + level: info +runner: + file: .runner + capacity: 2 + timeout: 3h + insecure: false + fetch_timeout: 5s + fetch_interval: 2s + labels: + - ubuntu-latest:docker://node:20-bullseye + - ubuntu-22.04:docker://node:20-bullseye +cache: + enabled: true + dir: /opt/gitea-runner/cache +container: + network: host + privileged: true + options: + workdir_parent: + valid_volumes: + - /opt/gitea-runner/cache +host: + workdir_parent: /opt/gitea-runner/workspace +EOF + +echo "=== Runner installed ===" +echo "" +echo "NEXT STEPS:" +echo "1. Go to https://git.gooseek.ru and register/login as admin" +echo "2. Go to Site Administration -> Actions -> Runners" +echo "3. Click 'Create new Runner' and copy the registration token" +echo "4. Run: act_runner register --config /opt/gitea-runner/config.yaml --instance https://git.gooseek.ru --token YOUR_TOKEN" +echo "5. Run: act_runner daemon --config /opt/gitea-runner/config.yaml" diff --git a/backend/deploy/scripts/setup-k3s-cicd.sh b/backend/deploy/scripts/setup-k3s-cicd.sh new file mode 100644 index 0000000..580f9f8 --- /dev/null +++ b/backend/deploy/scripts/setup-k3s-cicd.sh @@ -0,0 +1,60 @@ +#!/bin/bash +set -e + +echo "=== Installing K3s ===" +curl -sfL https://get.k3s.io | INSTALL_K3S_EXEC="--disable traefik --disable servicelb --tls-san gooseek.ru --tls-san 5.187.77.89" sh - + +echo "=== Waiting for K3s to be ready ===" +sleep 10 +sudo k3s kubectl wait --for=condition=Ready node --all --timeout=120s + +echo "=== Setting up kubectl for user ===" +mkdir -p ~/.kube +sudo cp /etc/rancher/k3s/k3s.yaml ~/.kube/config +sudo chown $(id -u):$(id -g) ~/.kube/config +chmod 600 ~/.kube/config + +echo "=== Installing Helm ===" +curl -fsSL https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash + +echo "=== Installing Nginx Ingress Controller ===" +helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx +helm repo update +helm install ingress-nginx ingress-nginx/ingress-nginx \ + --namespace ingress-nginx --create-namespace \ + --set controller.service.type=NodePort \ + --set controller.service.nodePorts.http=80 \ + --set controller.service.nodePorts.https=443 \ + --set controller.hostNetwork=true \ + --set controller.kind=DaemonSet + +echo "=== Installing Cert-Manager ===" +helm repo add jetstack https://charts.jetstack.io +helm install cert-manager jetstack/cert-manager \ + --namespace cert-manager --create-namespace \ + --set crds.enabled=true + +echo "=== Waiting for cert-manager ===" +kubectl -n cert-manager wait --for=condition=Available deployment --all --timeout=120s + +echo "=== Creating Let's Encrypt ClusterIssuer ===" +cat < /etc/modprobe.d/blacklist-nvidia-nouveau.conf << 'EOF' +blacklist nouveau +options nouveau modeset=0 +EOF +update-initramfs -u 2>/dev/null || true + +echo "=== 2/6 Обновление системы и установка зависимостей ===" +export DEBIAN_FRONTEND=noninteractive +apt-get update -qq +apt-get install -y build-essential wget + +echo "=== 3/6 Установка драйвера NVIDIA (рекомендуемый для железа) ===" +# Для RTX 4060 Ti подойдёт драйвер 550+; ubuntu-drivers выберет подходящий +apt-get install -y ubuntu-drivers-common +DRIVER=$(ubuntu-drivers list 2>/dev/null | grep -m1 "nvidia-driver-" || echo "nvidia-driver-560") +if apt-cache show "$DRIVER" &>/dev/null; then + apt-get install -y "$DRIVER" +else + apt-get install -y nvidia-driver-560 || apt-get install -y nvidia-driver-550 || ubuntu-drivers autoinstall +fi + +echo "=== 4/6 Добавление репозитория CUDA и установка CUDA Toolkit 12 ===" +KEYRING_DEB="/tmp/cuda-keyring.deb" +wget -q -O "$KEYRING_DEB" "https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/x86_64/cuda-keyring_1.1-1_all.deb" || { + echo "Ошибка загрузки cuda-keyring. Проверьте сеть." + exit 1 +} +dpkg -i "$KEYRING_DEB" +rm -f "$KEYRING_DEB" +apt-get update -qq +apt-get install -y cuda-toolkit-12-6 || apt-get install -y cuda-toolkit-12-5 || apt-get install -y cuda + +echo "=== 5/6 Настройка окружения CUDA (PATH и библиотеки) ===" +# Ubuntu/NVIDIA repo ставит в /usr/local/cuda-12.6, симлинк /usr/local/cuda создаётся пакетом cuda +for CUDA_ROOT in /usr/local/cuda /usr/local/cuda-12.6 /usr/local/cuda-12.5; do + if [ -d "$CUDA_ROOT" ]; then + cat > /etc/profile.d/cuda.sh << EOF +# CUDA for neural networks +export PATH=$CUDA_ROOT/bin:\$PATH +export LD_LIBRARY_PATH=$CUDA_ROOT/lib64:\${LD_LIBRARY_PATH:+:\$LD_LIBRARY_PATH} +EOF + chmod 644 /etc/profile.d/cuda.sh + echo "Файл /etc/profile.d/cuda.sh создан (CUDA_ROOT=$CUDA_ROOT)." + break + fi +done +if [ ! -f /etc/profile.d/cuda.sh ]; then + echo "Предупреждение: каталог CUDA не найден. После установки cuda-toolkit выполните: sudo bash -c 'echo \"export PATH=/usr/local/cuda/bin:\\\$PATH\" > /etc/profile.d/cuda.sh'" +fi + +echo "=== 6/6 Готово ===" +echo "" +echo "Драйвер NVIDIA и CUDA Toolkit установлены." +echo "ОБЯЗАТЕЛЬНО перезагрузите сервер для загрузки драйвера:" +echo " sudo reboot" +echo "" +echo "После перезагрузки проверьте:" +echo " nvidia-smi" +echo " source /etc/profile.d/cuda.sh && nvcc --version" +echo " python3 -c 'import torch; print(torch.cuda.is_available())' # если ставите PyTorch" diff --git a/backend/internal/agent/learning_orchestrator.go b/backend/internal/agent/learning_orchestrator.go new file mode 100644 index 0000000..9a525a1 --- /dev/null +++ b/backend/internal/agent/learning_orchestrator.go @@ -0,0 +1,836 @@ +package agent + +import ( + "context" + "encoding/json" + "fmt" + "log" + "strings" + "sync" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/session" + "github.com/gooseek/backend/internal/types" + "github.com/google/uuid" + "golang.org/x/sync/errgroup" +) + +type LearningIntent string + +const ( + IntentTaskGenerate LearningIntent = "task_generate" + IntentVerify LearningIntent = "verify" + IntentPlan LearningIntent = "plan" + IntentQuiz LearningIntent = "quiz" + IntentExplain LearningIntent = "explain" + IntentQuestion LearningIntent = "question" + IntentOnboarding LearningIntent = "onboarding" + IntentProgress LearningIntent = "progress" +) + +type LearningBrief struct { + Intent LearningIntent `json:"intent"` + Topic string `json:"topic"` + Difficulty string `json:"difficulty"` + Language string `json:"language"` + TaskType string `json:"task_type"` + SpecificRequest string `json:"specific_request"` + CodeSubmitted string `json:"code_submitted"` + NeedsContext bool `json:"needs_context"` +} + +type LearningDraft struct { + Brief *LearningBrief + ProfileContext string + CourseContext string + PlanContext string + TaskContext string + GeneratedTask *GeneratedTask + GeneratedQuiz *GeneratedQuiz + GeneratedPlan *GeneratedPlan + Evaluation *TaskEvaluation + Explanation string + Phase string +} + +type GeneratedTask struct { + Title string `json:"title"` + Difficulty string `json:"difficulty"` + EstimatedMin int `json:"estimated_minutes"` + Description string `json:"description"` + Requirements []string `json:"requirements"` + Acceptance []string `json:"acceptance_criteria"` + Hints []string `json:"hints"` + SandboxSetup string `json:"sandbox_setup"` + VerifyCmd string `json:"verify_command"` + StarterCode string `json:"starter_code"` + TestCode string `json:"test_code"` + SkillsTrained []string `json:"skills_trained"` +} + +type GeneratedQuiz struct { + Title string `json:"title"` + Questions []QuizQuestion `json:"questions"` +} + +type QuizQuestion struct { + Question string `json:"question"` + Options []string `json:"options"` + Correct int `json:"correct_index"` + Explain string `json:"explanation"` +} + +type GeneratedPlan struct { + Modules []PlanModule `json:"modules"` + TotalHours int `json:"total_hours"` + DifficultyAdjusted string `json:"difficulty_adjusted"` + PersonalizationNote string `json:"personalization_notes"` +} + +type PlanModule struct { + Index int `json:"index"` + Title string `json:"title"` + Description string `json:"description"` + Skills []string `json:"skills"` + EstimatedHrs int `json:"estimated_hours"` + PracticeFocus string `json:"practice_focus"` + TaskCount int `json:"task_count"` +} + +type TaskEvaluation struct { + Score int `json:"score"` + MaxScore int `json:"max_score"` + Passed bool `json:"passed"` + Strengths []string `json:"strengths"` + Issues []string `json:"issues"` + Suggestions []string `json:"suggestions"` + CodeQuality string `json:"code_quality"` +} + +// RunLearningOrchestrator is the multi-agent pipeline for learning chat. +// Flow: Intent Classifier → Context Collector → Specialized Agent → Widget Emission. +func RunLearningOrchestrator(ctx context.Context, sess *session.Session, input OrchestratorInput) error { + researchBlockID := uuid.New().String() + sess.EmitBlock(types.NewResearchBlock(researchBlockID)) + + emitPhase(sess, researchBlockID, "reasoning", "Анализирую запрос...") + + // --- Phase 1: Intent Classification via LLM --- + brief, err := runLearningPlanner(ctx, input) + if err != nil { + log.Printf("[learning] planner error: %v, falling back to keyword", err) + brief = fallbackClassify(input.FollowUp) + } + + log.Printf("[learning] intent=%s topic=%q difficulty=%s", brief.Intent, brief.Topic, brief.Difficulty) + + draft := &LearningDraft{ + Brief: brief, + Phase: "collecting", + } + + // --- Phase 2: Parallel Context Collection --- + emitPhase(sess, researchBlockID, "searching", phaseSearchLabel(brief.Intent)) + + var mu sync.Mutex + g, gctx := errgroup.WithContext(ctx) + + g.Go(func() error { + profile := extractProfileContext(input) + mu.Lock() + draft.ProfileContext = profile + mu.Unlock() + return nil + }) + + g.Go(func() error { + course := extractCourseContext(input) + mu.Lock() + draft.CourseContext = course + mu.Unlock() + return nil + }) + + g.Go(func() error { + plan := extractPlanContext(input) + mu.Lock() + draft.PlanContext = plan + mu.Unlock() + return nil + }) + + _ = g.Wait() + + // --- Phase 3: Specialized Agent based on intent --- + emitPhase(sess, researchBlockID, "reasoning", phaseAgentLabel(brief.Intent)) + + switch brief.Intent { + case IntentTaskGenerate: + task, err := runTaskGeneratorAgent(gctx, input, draft) + if err != nil { + log.Printf("[learning] task generator error: %v", err) + } else { + draft.GeneratedTask = task + } + + case IntentVerify: + eval, err := runCodeReviewAgent(gctx, input, draft) + if err != nil { + log.Printf("[learning] code review error: %v", err) + } else { + draft.Evaluation = eval + } + + case IntentQuiz: + quiz, err := runQuizGeneratorAgent(gctx, input, draft) + if err != nil { + log.Printf("[learning] quiz generator error: %v", err) + } else { + draft.GeneratedQuiz = quiz + } + + case IntentPlan: + plan, err := runPlanBuilderAgent(gctx, input, draft) + if err != nil { + log.Printf("[learning] plan builder error: %v", err) + } else { + draft.GeneratedPlan = plan + } + } + + sess.EmitResearchComplete() + + // --- Phase 4: Generate response text + emit widgets --- + emitLearningResponse(ctx, sess, input, draft) + + sess.EmitEnd() + return nil +} + +// --- Phase 1: LLM-based Intent Classification --- + +func runLearningPlanner(ctx context.Context, input OrchestratorInput) (*LearningBrief, error) { + plannerCtx, cancel := context.WithTimeout(ctx, 20*time.Second) + defer cancel() + + prompt := fmt.Sprintf(`Ты — классификатор запросов в образовательной платформе. Определи намерение ученика. + +Сообщение ученика: "%s" + +Контекст (если есть): %s + +Определи intent и параметры. Ответь строго JSON: +{ + "intent": "task_generate|verify|plan|quiz|explain|question|onboarding|progress", + "topic": "тема если определена", + "difficulty": "beginner|intermediate|advanced|expert", + "language": "язык программирования если есть", + "task_type": "code|test|review|deploy|debug|refactor|design", + "specific_request": "что конкретно просит", + "code_submitted": "код если ученик прислал код для проверки", + "needs_context": true +} + +Правила: +- "задание/задачу/практику/упражнение" → task_generate +- "проверь/оцени/ревью/посмотри код" → verify +- "план/программу/roadmap/что учить" → plan +- "тест/квиз/экзамен" → quiz +- "объясни/расскажи/как работает" → explain +- "прогресс/результаты/статистика" → progress +- Если код в сообщении и просьба проверить → verify + code_submitted +- По умолчанию → question`, input.FollowUp, truncate(input.Config.SystemInstructions, 500)) + + result, err := input.Config.LLM.GenerateText(plannerCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSONFromLLM(result) + var brief LearningBrief + if err := json.Unmarshal([]byte(jsonStr), &brief); err != nil { + return nil, fmt.Errorf("parse brief: %w", err) + } + + if brief.Intent == "" { + brief.Intent = IntentQuestion + } + return &brief, nil +} + +func fallbackClassify(query string) *LearningBrief { + q := strings.ToLower(query) + brief := &LearningBrief{Intent: IntentQuestion, NeedsContext: true} + + switch { + case containsAny(q, "задание", "задачу", "практик", "упражнение", "тренир", "дай задач"): + brief.Intent = IntentTaskGenerate + case containsAny(q, "провер", "оцени", "ревью", "посмотри код", "правильно ли", "code review"): + brief.Intent = IntentVerify + case containsAny(q, "план", "програм", "roadmap", "чему учить", "маршрут обучения"): + brief.Intent = IntentPlan + case containsAny(q, "тест", "экзамен", "квиз", "проверочн"): + brief.Intent = IntentQuiz + case containsAny(q, "объясни", "расскажи", "как работает", "что такое", "зачем нужн"): + brief.Intent = IntentExplain + case containsAny(q, "прогресс", "результат", "статистик", "сколько сделал"): + brief.Intent = IntentProgress + } + + return brief +} + +// --- Phase 2: Context Extraction --- + +func extractProfileContext(input OrchestratorInput) string { + if input.Config.UserMemory != "" { + return input.Config.UserMemory + } + return "" +} + +func extractCourseContext(input OrchestratorInput) string { + si := input.Config.SystemInstructions + if idx := strings.Index(si, "Текущий курс:"); idx >= 0 { + end := strings.Index(si[idx:], "\n") + if end > 0 { + return si[idx : idx+end] + } + return si[idx:] + } + return "" +} + +func extractPlanContext(input OrchestratorInput) string { + si := input.Config.SystemInstructions + if idx := strings.Index(si, "План обучения:"); idx >= 0 { + return si[idx:] + } + return "" +} + +// --- Phase 3: Specialized Agents --- + +func runTaskGeneratorAgent(ctx context.Context, input OrchestratorInput, draft *LearningDraft) (*GeneratedTask, error) { + agentCtx, cancel := context.WithTimeout(ctx, 45*time.Second) + defer cancel() + + contextBlock := buildTaskGenContext(draft) + + prompt := fmt.Sprintf(`Ты — ведущий разработчик в крупной IT-компании в РФ. Генерируй боевое практическое задание. + +%s + +Тема: %s +Сложность: %s +Язык: %s +Тип: %s + +ТРЕБОВАНИЯ К ЗАДАНИЮ: +1. Задание должно быть РЕАЛЬНЫМ — как задача из production проекта в российской IT-компании +2. Чёткая постановка: что сделать, какие входные данные, что на выходе +3. Обязательно: тесты, обработка ошибок, edge cases +4. Код-стайл: линтеры, форматирование, документация +5. Если backend: REST API, middleware, валидация, логирование +6. Если frontend: компоненты, стейт-менеджмент, адаптивность +7. Starter code должен компилироваться/запускаться +8. Verify command должен реально проверять решение + +Ответь строго JSON: +{ + "title": "Название задания", + "difficulty": "beginner|intermediate|advanced", + "estimated_minutes": 30, + "description": "Подробное описание задачи в markdown", + "requirements": ["Требование 1", "Требование 2"], + "acceptance_criteria": ["Критерий приёмки 1", "Критерий приёмки 2"], + "hints": ["Подсказка 1 (скрытая)"], + "sandbox_setup": "команды для подготовки окружения (apt install, npm init, etc.)", + "verify_command": "команда для проверки решения (go test ./... или npm test)", + "starter_code": "начальный код проекта", + "test_code": "код тестов для автопроверки", + "skills_trained": ["навык1", "навык2"] +}`, contextBlock, orDefault(draft.Brief.Topic, "по текущему курсу"), + orDefault(draft.Brief.Difficulty, "intermediate"), + orDefault(draft.Brief.Language, "go"), + orDefault(draft.Brief.TaskType, "code")) + + result, err := input.Config.LLM.GenerateText(agentCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSONFromLLM(result) + var task GeneratedTask + if err := json.Unmarshal([]byte(jsonStr), &task); err != nil { + return nil, fmt.Errorf("parse task: %w", err) + } + return &task, nil +} + +func runCodeReviewAgent(ctx context.Context, input OrchestratorInput, draft *LearningDraft) (*TaskEvaluation, error) { + agentCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + codeToReview := draft.Brief.CodeSubmitted + if codeToReview == "" { + codeToReview = input.Config.FileContext + } + if codeToReview == "" { + for _, m := range input.ChatHistory { + if m.Role == "user" && (strings.Contains(m.Content, "```") || strings.Contains(m.Content, "func ") || strings.Contains(m.Content, "function ")) { + codeToReview = m.Content + } + } + } + + if codeToReview == "" { + return &TaskEvaluation{ + Score: 0, + MaxScore: 100, + Issues: []string{"Код для проверки не найден. Пришлите код в сообщении или загрузите файл."}, + }, nil + } + + prompt := fmt.Sprintf(`Ты — senior code reviewer в крупной IT-компании. Проведи строгое ревью кода. + +Код для проверки: +%s + +Контекст задания: %s + +Оцени по критериям: +1. Корректность (работает ли код правильно) +2. Код-стайл (форматирование, именование, идиоматичность) +3. Обработка ошибок (edge cases, panic recovery, валидация) +4. Тесты (есть ли, покрытие, качество) +5. Безопасность (SQL injection, XSS, утечки данных) +6. Производительность (O-нотация, утечки памяти, N+1) + +Ответь строго JSON: +{ + "score": 75, + "max_score": 100, + "passed": true, + "strengths": ["Что хорошо"], + "issues": ["Что исправить"], + "suggestions": ["Рекомендации по улучшению"], + "code_quality": "good|acceptable|needs_work|poor" +}`, truncate(codeToReview, 6000), truncate(draft.TaskContext, 1000)) + + result, err := input.Config.LLM.GenerateText(agentCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSONFromLLM(result) + var eval TaskEvaluation + if err := json.Unmarshal([]byte(jsonStr), &eval); err != nil { + return nil, fmt.Errorf("parse eval: %w", err) + } + if eval.MaxScore == 0 { + eval.MaxScore = 100 + } + return &eval, nil +} + +func runQuizGeneratorAgent(ctx context.Context, input OrchestratorInput, draft *LearningDraft) (*GeneratedQuiz, error) { + agentCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + prompt := fmt.Sprintf(`Ты — методолог обучения. Создай тест для проверки знаний. + +Тема: %s +Сложность: %s +Контекст курса: %s + +Создай 5 вопросов. Вопросы должны проверять ПОНИМАНИЕ, а не зубрёжку. +Включи: практические сценарии, код-сниппеты, архитектурные решения. + +Ответь строго JSON: +{ + "title": "Название теста", + "questions": [ + { + "question": "Текст вопроса (может содержать код в markdown)", + "options": ["Вариант A", "Вариант B", "Вариант C", "Вариант D"], + "correct_index": 0, + "explanation": "Почему этот ответ правильный" + } + ] +}`, orDefault(draft.Brief.Topic, "текущий модуль"), + orDefault(draft.Brief.Difficulty, "intermediate"), + truncate(draft.CourseContext, 1000)) + + result, err := input.Config.LLM.GenerateText(agentCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSONFromLLM(result) + var quiz GeneratedQuiz + if err := json.Unmarshal([]byte(jsonStr), &quiz); err != nil { + return nil, fmt.Errorf("parse quiz: %w", err) + } + return &quiz, nil +} + +func runPlanBuilderAgent(ctx context.Context, input OrchestratorInput, draft *LearningDraft) (*GeneratedPlan, error) { + agentCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + prompt := fmt.Sprintf(`Ты — ведущий методолог обучения в IT. Построй персональный план обучения. + +Профиль ученика: %s +Текущий курс: %s +Текущий прогресс: %s + +Требования: +1. Минимум теории, максимум боевой практики +2. Каждый модуль = практическое задание из реального проекта +3. Прогрессия сложности от текущего уровня ученика +4. Учитывай уже пройденные темы +5. Задания как в российских IT-компаниях + +Ответь строго JSON: +{ + "modules": [ + { + "index": 0, + "title": "Название модуля", + "description": "Что изучаем и делаем", + "skills": ["навык1"], + "estimated_hours": 4, + "practice_focus": "Конкретная практическая задача", + "task_count": 3 + } + ], + "total_hours": 40, + "difficulty_adjusted": "intermediate", + "personalization_notes": "Как план адаптирован под ученика" +}`, truncate(draft.ProfileContext, 1500), + truncate(draft.CourseContext, 1000), + truncate(draft.PlanContext, 1000)) + + result, err := input.Config.LLM.GenerateText(agentCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + if err != nil { + return nil, err + } + + jsonStr := extractJSONFromLLM(result) + var plan GeneratedPlan + if err := json.Unmarshal([]byte(jsonStr), &plan); err != nil { + return nil, fmt.Errorf("parse plan: %w", err) + } + return &plan, nil +} + +// --- Phase 4: Response Generation + Widget Emission --- + +func emitLearningResponse(ctx context.Context, sess *session.Session, input OrchestratorInput, draft *LearningDraft) { + // Emit structured widgets first + emitLearningWidgets(sess, draft) + + // Then stream the conversational response + textBlockID := uuid.New().String() + sess.EmitBlock(types.NewTextBlock(textBlockID, "")) + + systemPrompt := buildLearningResponsePrompt(input, draft) + + messages := make([]llm.Message, 0, len(input.ChatHistory)+3) + messages = append(messages, llm.Message{Role: "system", Content: systemPrompt}) + for _, m := range input.ChatHistory { + messages = append(messages, m) + } + messages = append(messages, llm.Message{Role: "user", Content: input.FollowUp}) + + streamCtx, cancel := context.WithTimeout(ctx, 3*time.Minute) + defer cancel() + + ch, err := input.Config.LLM.StreamText(streamCtx, llm.StreamRequest{Messages: messages}) + if err != nil { + sess.UpdateBlock(textBlockID, []session.Patch{ + {Op: "replace", Path: "/data", Value: fmt.Sprintf("Ошибка: %v", err)}, + }) + return + } + + var fullContent strings.Builder + for chunk := range ch { + if chunk.ContentChunk == "" { + continue + } + fullContent.WriteString(chunk.ContentChunk) + sess.UpdateBlock(textBlockID, []session.Patch{ + {Op: "replace", Path: "/data", Value: fullContent.String()}, + }) + } +} + +func emitLearningWidgets(sess *session.Session, draft *LearningDraft) { + if draft.GeneratedTask != nil { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "learning_task", map[string]interface{}{ + "status": "ready", + "title": draft.GeneratedTask.Title, + "difficulty": draft.GeneratedTask.Difficulty, + "estimated": draft.GeneratedTask.EstimatedMin, + "requirements": draft.GeneratedTask.Requirements, + "acceptance": draft.GeneratedTask.Acceptance, + "hints": draft.GeneratedTask.Hints, + "verify_cmd": draft.GeneratedTask.VerifyCmd, + "starter_code": draft.GeneratedTask.StarterCode, + "test_code": draft.GeneratedTask.TestCode, + "skills": draft.GeneratedTask.SkillsTrained, + })) + } + + if draft.Evaluation != nil { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "learning_evaluation", map[string]interface{}{ + "score": draft.Evaluation.Score, + "max_score": draft.Evaluation.MaxScore, + "passed": draft.Evaluation.Passed, + "strengths": draft.Evaluation.Strengths, + "issues": draft.Evaluation.Issues, + "suggestions": draft.Evaluation.Suggestions, + "quality": draft.Evaluation.CodeQuality, + })) + } + + if draft.GeneratedQuiz != nil { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "learning_quiz", map[string]interface{}{ + "title": draft.GeneratedQuiz.Title, + "questions": draft.GeneratedQuiz.Questions, + "count": len(draft.GeneratedQuiz.Questions), + })) + } + + if draft.GeneratedPlan != nil { + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "learning_plan", map[string]interface{}{ + "modules": draft.GeneratedPlan.Modules, + "total_hours": draft.GeneratedPlan.TotalHours, + "difficulty": draft.GeneratedPlan.DifficultyAdjusted, + "notes": draft.GeneratedPlan.PersonalizationNote, + })) + } + + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), "learning_progress", map[string]interface{}{ + "phase": "idle", + "intent": string(draft.Brief.Intent), + "timestamp": time.Now().Format(time.RFC3339), + })) +} + +func buildLearningResponsePrompt(input OrchestratorInput, draft *LearningDraft) string { + var sb strings.Builder + + sb.WriteString(`Ты — AI-наставник на платформе GooSeek Education. Ведёшь обучение через чат. + +СТИЛЬ: +- Минимум теории, максимум практики. Объясняй кратко, по делу. +- Задания «боевые» — как на реальных проектах в российских IT-компаниях. +- Конструктивная обратная связь: что хорошо + что улучшить. +- Адаптируй сложность под ученика. +- Русский язык. Markdown для форматирования. +- Будь строгим но справедливым ментором, не «добрым учителем». + +`) + + switch draft.Brief.Intent { + case IntentTaskGenerate: + if draft.GeneratedTask != nil { + taskJSON, _ := json.Marshal(draft.GeneratedTask) + sb.WriteString("СГЕНЕРИРОВАННОЕ ЗАДАНИЕ (уже отправлено как виджет, не дублируй полностью):\n") + sb.WriteString(string(taskJSON)) + sb.WriteString("\n\nПредставь задание ученику: кратко опиши суть, мотивируй, дай контекст зачем это нужно в реальной работе. НЕ копируй JSON — виджет уже показан.\n\n") + } else { + sb.WriteString("Задание не удалось сгенерировать. Предложи ученику уточнить тему или сложность.\n\n") + } + + case IntentVerify: + if draft.Evaluation != nil { + evalJSON, _ := json.Marshal(draft.Evaluation) + sb.WriteString("РЕЗУЛЬТАТ РЕВЬЮ (уже отправлен как виджет):\n") + sb.WriteString(string(evalJSON)) + sb.WriteString("\n\nДай развёрнутую обратную связь: разбери каждый issue, объясни почему это проблема, покажи как исправить с примерами кода. Похвали за strengths.\n\n") + } + + case IntentQuiz: + if draft.GeneratedQuiz != nil { + sb.WriteString("Тест сгенерирован и показан как виджет. Кратко представь тест, объясни что проверяется.\n\n") + } + + case IntentPlan: + if draft.GeneratedPlan != nil { + planJSON, _ := json.Marshal(draft.GeneratedPlan) + sb.WriteString("ПЛАН ОБУЧЕНИЯ (уже отправлен как виджет):\n") + sb.WriteString(string(planJSON)) + sb.WriteString("\n\nПредставь план: объясни логику прогрессии, почему именно такие модули, что ученик получит в итоге.\n\n") + } + + case IntentProgress: + sb.WriteString("Покажи прогресс ученика на основе контекста. Если данных нет — предложи начать с задания или теста.\n\n") + } + + if draft.ProfileContext != "" { + sb.WriteString("ПРОФИЛЬ УЧЕНИКА:\n") + sb.WriteString(truncate(draft.ProfileContext, 1500)) + sb.WriteString("\n\n") + } + + if draft.CourseContext != "" { + sb.WriteString("ТЕКУЩИЙ КУРС:\n") + sb.WriteString(draft.CourseContext) + sb.WriteString("\n\n") + } + + if draft.PlanContext != "" { + sb.WriteString("ПЛАН ОБУЧЕНИЯ:\n") + sb.WriteString(truncate(draft.PlanContext, 2000)) + sb.WriteString("\n\n") + } + + if input.Config.FileContext != "" { + sb.WriteString("КОД/ФАЙЛЫ УЧЕНИКА:\n") + sb.WriteString(truncate(input.Config.FileContext, 4000)) + sb.WriteString("\n\n") + } + + return sb.String() +} + +// --- Helpers --- + +func emitPhase(sess *session.Session, blockID, stepType, text string) { + step := types.ResearchSubStep{ID: uuid.New().String(), Type: stepType} + switch stepType { + case "reasoning": + step.Reasoning = text + case "searching": + step.Searching = []string{text} + } + sess.UpdateBlock(blockID, []session.Patch{ + {Op: "replace", Path: "/data/subSteps", Value: []types.ResearchSubStep{step}}, + }) +} + +func phaseSearchLabel(intent LearningIntent) string { + switch intent { + case IntentTaskGenerate: + return "Проектирую практическое задание..." + case IntentVerify: + return "Анализирую код..." + case IntentPlan: + return "Строю план обучения..." + case IntentQuiz: + return "Создаю тест..." + case IntentExplain: + return "Готовлю объяснение..." + case IntentProgress: + return "Собираю статистику..." + default: + return "Готовлю ответ..." + } +} + +func phaseAgentLabel(intent LearningIntent) string { + switch intent { + case IntentTaskGenerate: + return "Генерирую боевое задание..." + case IntentVerify: + return "Провожу code review..." + case IntentPlan: + return "Адаптирую план под профиль..." + case IntentQuiz: + return "Составляю вопросы..." + default: + return "Формирую ответ..." + } +} + +func buildTaskGenContext(draft *LearningDraft) string { + var parts []string + if draft.ProfileContext != "" { + parts = append(parts, "Профиль ученика: "+truncate(draft.ProfileContext, 800)) + } + if draft.CourseContext != "" { + parts = append(parts, "Курс: "+draft.CourseContext) + } + if draft.PlanContext != "" { + parts = append(parts, "План: "+truncate(draft.PlanContext, 800)) + } + if len(parts) == 0 { + return "" + } + return strings.Join(parts, "\n\n") +} + +func extractJSONFromLLM(response string) string { + if strings.Contains(response, "```json") { + start := strings.Index(response, "```json") + 7 + end := strings.Index(response[start:], "```") + if end > 0 { + return strings.TrimSpace(response[start : start+end]) + } + } + if strings.Contains(response, "```") { + start := strings.Index(response, "```") + 3 + if nl := strings.Index(response[start:], "\n"); nl >= 0 { + start += nl + 1 + } + end := strings.Index(response[start:], "```") + if end > 0 { + candidate := strings.TrimSpace(response[start : start+end]) + if len(candidate) > 2 && candidate[0] == '{' { + return candidate + } + } + } + + depth := 0 + startIdx := -1 + for i, ch := range response { + if ch == '{' { + if depth == 0 { + startIdx = i + } + depth++ + } else if ch == '}' { + depth-- + if depth == 0 && startIdx >= 0 { + candidate := response[startIdx : i+1] + if len(candidate) > 10 { + return candidate + } + } + } + } + + return "{}" +} + +func containsAny(s string, substrs ...string) bool { + for _, sub := range substrs { + if strings.Contains(s, sub) { + return true + } + } + return false +} + +func orDefault(val, def string) string { + if val == "" { + return def + } + return val +} diff --git a/backend/internal/agent/orchestrator.go b/backend/internal/agent/orchestrator.go index fc51ce0..b459a7c 100644 --- a/backend/internal/agent/orchestrator.go +++ b/backend/internal/agent/orchestrator.go @@ -54,6 +54,7 @@ type OrchestratorConfig struct { TravelSvcURL string TravelPayoutsToken string TravelPayoutsMarker string + PhotoCache *PhotoCacheService } type DigestResponse struct { @@ -94,6 +95,10 @@ func RunOrchestrator(ctx context.Context, sess *session.Session, input Orchestra return RunTravelOrchestrator(ctx, sess, input) } + if input.Config.AnswerMode == "learning" || input.Config.LearningMode { + return RunLearningOrchestrator(ctx, sess, input) + } + detectedLang := detectLanguage(input.FollowUp) isArticleSummary := strings.HasPrefix(strings.TrimSpace(input.FollowUp), "Summary: ") diff --git a/backend/internal/agent/travel_context_collector.go b/backend/internal/agent/travel_context_collector.go index 379e174..c91c904 100644 --- a/backend/internal/agent/travel_context_collector.go +++ b/backend/internal/agent/travel_context_collector.go @@ -22,13 +22,25 @@ type TravelContext struct { BestTimeInfo string `json:"bestTimeInfo,omitempty"` } +type DailyForecast struct { + Date string `json:"date"` + TempMin float64 `json:"tempMin"` + TempMax float64 `json:"tempMax"` + Conditions string `json:"conditions"` + Icon string `json:"icon"` + RainChance string `json:"rainChance"` + Wind string `json:"wind,omitempty"` + Tip string `json:"tip,omitempty"` +} + type WeatherAssessment struct { - Summary string `json:"summary"` - TempMin float64 `json:"tempMin"` - TempMax float64 `json:"tempMax"` - Conditions string `json:"conditions"` - Clothing string `json:"clothing"` - RainChance string `json:"rainChance"` + Summary string `json:"summary"` + TempMin float64 `json:"tempMin"` + TempMax float64 `json:"tempMax"` + Conditions string `json:"conditions"` + Clothing string `json:"clothing"` + RainChance string `json:"rainChance"` + DailyForecast []DailyForecast `json:"dailyForecast,omitempty"` } type SafetyAssessment struct { @@ -87,7 +99,6 @@ func searchForContext(ctx context.Context, client *search.SearXNGClient, brief * dest := strings.Join(brief.Destinations, ", ") currentYear := time.Now().Format("2006") - currentMonth := time.Now().Format("01") monthNames := map[string]string{ "01": "январь", "02": "февраль", "03": "март", @@ -95,10 +106,25 @@ func searchForContext(ctx context.Context, client *search.SearXNGClient, brief * "07": "июль", "08": "август", "09": "сентябрь", "10": "октябрь", "11": "ноябрь", "12": "декабрь", } - month := monthNames[currentMonth] + + tripMonth := time.Now().Format("01") + if brief.StartDate != "" { + if t, err := time.Parse("2006-01-02", brief.StartDate); err == nil { + tripMonth = t.Format("01") + } + } + month := monthNames[tripMonth] + + dateRange := "" + if brief.StartDate != "" && brief.EndDate != "" { + dateRange = fmt.Sprintf("%s — %s", brief.StartDate, brief.EndDate) + } else if brief.StartDate != "" { + dateRange = brief.StartDate + } queries := []string{ - fmt.Sprintf("погода %s %s %s прогноз", dest, month, currentYear), + fmt.Sprintf("погода %s %s %s прогноз по дням", dest, month, currentYear), + fmt.Sprintf("прогноз погоды %s %s на 14 дней", dest, dateRange), fmt.Sprintf("безопасность туристов %s %s", dest, currentYear), fmt.Sprintf("ограничения %s туризм %s", dest, currentYear), fmt.Sprintf("что нужно знать туристу %s %s", dest, currentYear), @@ -154,20 +180,40 @@ func extractContextWithLLM(ctx context.Context, llmClient llm.Client, brief *Tri dest := strings.Join(brief.Destinations, ", ") currentDate := time.Now().Format("2006-01-02") - prompt := fmt.Sprintf(`Ты — эксперт по путешествиям. Оцени текущую обстановку в %s для поездки %s — %s. -Сегодня: %s. + tripDays := computeTripDays(brief.StartDate, brief.EndDate) + dailyForecastNote := "" + if tripDays > 0 { + dailyForecastNote = fmt.Sprintf(` +ВАЖНО: Поездка длится %d дней (%s — %s). Составь прогноз погоды НА КАЖДЫЙ ДЕНЬ поездки. +В "dailyForecast" должно быть ровно %d элементов — по одному на каждый день.`, tripDays, brief.StartDate, brief.EndDate, tripDays) + } + prompt := fmt.Sprintf(`Ты — эксперт по путешествиям. Оцени обстановку в %s для поездки %s — %s. +Сегодня: %s. +%s %s Верни ТОЛЬКО JSON (без текста): { "weather": { - "summary": "Краткое описание погоды на период поездки", - "tempMin": число_градусов_минимум, - "tempMax": число_градусов_максимум, - "conditions": "солнечно/облачно/дождливо/снежно", - "clothing": "Что надеть: конкретные рекомендации", - "rainChance": "низкая/средняя/высокая" + "summary": "Общее описание погоды на весь период поездки", + "tempMin": число_минимум_за_весь_период, + "tempMax": число_максимум_за_весь_период, + "conditions": "преобладающие условия: солнечно/облачно/переменная облачность/дождливо/снежно", + "clothing": "Что надеть: конкретные рекомендации по одежде", + "rainChance": "низкая/средняя/высокая", + "dailyForecast": [ + { + "date": "YYYY-MM-DD", + "tempMin": число, + "tempMax": число, + "conditions": "солнечно/облачно/дождь/гроза/снег/туман/переменная облачность", + "icon": "sun/cloud/cloud-sun/rain/storm/snow/fog/wind", + "rainChance": "низкая/средняя/высокая", + "wind": "слабый/умеренный/сильный", + "tip": "Краткий совет на этот день (необязательно, только если есть что сказать)" + } + ] }, "safety": { "level": "safe/caution/warning/danger", @@ -190,27 +236,35 @@ func extractContextWithLLM(ctx context.Context, llmClient llm.Client, brief *Tri } Правила: -- Используй ТОЛЬКО актуальные данные %s года -- weather: реальный прогноз на период поездки, не среднегодовые значения -- safety: объективная оценка, не преувеличивай опасности +- Используй актуальные данные %s года и данные из поиска +- dailyForecast: прогноз НА КАЖДЫЙ ДЕНЬ поездки с конкретными температурами и условиями +- Если точный прогноз недоступен — используй климатические данные для этого периода, но старайся варьировать по дням реалистично +- icon: одно из значений sun/cloud/cloud-sun/rain/storm/snow/fog/wind +- weather.summary: общее описание, упомяни если ожидаются дождливые дни +- safety: объективная оценка, не преувеличивай - restrictions: визовые требования, медицинские ограничения, локальные правила -- tips: 3-5 практичных советов для туриста -- Если данных нет — используй свои знания о регионе, но отмечай это +- tips: 3-5 практичных советов - Температуры в градусах Цельсия`, dest, brief.StartDate, brief.EndDate, currentDate, + dailyForecastNote, contextBuilder.String(), time.Now().Format("2006"), ) - llmCtx, cancel := context.WithTimeout(ctx, 25*time.Second) + llmCtx, cancel := context.WithTimeout(ctx, 35*time.Second) defer cancel() + maxTokens := 3000 + if tripDays > 5 { + maxTokens = 4000 + } + response, err := llmClient.GenerateText(llmCtx, llm.StreamRequest{ Messages: []llm.Message{{Role: llm.RoleUser, Content: prompt}}, - Options: llm.StreamOptions{MaxTokens: 2000, Temperature: 0.2}, + Options: llm.StreamOptions{MaxTokens: maxTokens, Temperature: 0.3}, }) if err != nil { log.Printf("[travel-context] LLM extraction failed: %v", err) @@ -233,9 +287,31 @@ func extractContextWithLLM(ctx context.Context, llmClient llm.Client, brief *Tri travelCtx.Safety.EmergencyNo = "112" } - log.Printf("[travel-context] extracted context: weather=%s, safety=%s, restrictions=%d, tips=%d", - travelCtx.Weather.Conditions, travelCtx.Safety.Level, - len(travelCtx.Restrictions), len(travelCtx.Tips)) + log.Printf("[travel-context] extracted context: weather=%s (%d daily), safety=%s, restrictions=%d, tips=%d", + travelCtx.Weather.Conditions, len(travelCtx.Weather.DailyForecast), + travelCtx.Safety.Level, len(travelCtx.Restrictions), len(travelCtx.Tips)) return &travelCtx } + +func computeTripDays(startDate, endDate string) int { + if startDate == "" || endDate == "" { + return 0 + } + start, err := time.Parse("2006-01-02", startDate) + if err != nil { + return 0 + } + end, err := time.Parse("2006-01-02", endDate) + if err != nil { + return 0 + } + days := int(end.Sub(start).Hours()/24) + 1 + if days < 1 { + return 1 + } + if days > 30 { + return 30 + } + return days +} diff --git a/backend/internal/agent/travel_data_client.go b/backend/internal/agent/travel_data_client.go index 6666e73..f56f285 100644 --- a/backend/internal/agent/travel_data_client.go +++ b/backend/internal/agent/travel_data_client.go @@ -1,6 +1,7 @@ package agent import ( + "bytes" "context" "encoding/json" "fmt" @@ -40,6 +41,24 @@ func NewTravelDataClient(baseURL string) *TravelDataClient { } func (c *TravelDataClient) doWithRetry(ctx context.Context, req *http.Request) (*http.Response, error) { + // http.Request.Clone does NOT recreate the Body. If we retry a request with a Body, + // we must be able to recreate it; otherwise retries will send an empty body and may + // fail with ContentLength/body length mismatch. + var bodyCopy []byte + if req.Body != nil && req.GetBody == nil { + b, err := io.ReadAll(req.Body) + if err != nil { + return nil, fmt.Errorf("read request body for retry: %w", err) + } + _ = req.Body.Close() + bodyCopy = b + req.GetBody = func() (io.ReadCloser, error) { + return io.NopCloser(bytes.NewReader(bodyCopy)), nil + } + req.Body = io.NopCloser(bytes.NewReader(bodyCopy)) + req.ContentLength = int64(len(bodyCopy)) + } + var lastErr error for attempt := 0; attempt <= c.maxRetries; attempt++ { if attempt > 0 { @@ -51,7 +70,18 @@ func (c *TravelDataClient) doWithRetry(ctx context.Context, req *http.Request) ( } } - resp, err := c.httpClient.Do(req.Clone(ctx)) + reqAttempt := req.Clone(ctx) + if req.GetBody != nil { + rc, err := req.GetBody() + if err != nil { + lastErr = err + continue + } + reqAttempt.Body = rc + reqAttempt.ContentLength = req.ContentLength + } + + resp, err := c.httpClient.Do(reqAttempt) if err != nil { lastErr = err continue @@ -306,13 +336,16 @@ func (c *TravelDataClient) SearchHotels(ctx context.Context, lat, lng float64, c // PlaceResult represents a place from 2GIS Places API. type PlaceResult struct { - ID string `json:"id"` - Name string `json:"name"` - Address string `json:"address"` - Lat float64 `json:"lat"` - Lng float64 `json:"lng"` - Type string `json:"type"` - Purpose string `json:"purpose"` + ID string `json:"id"` + Name string `json:"name"` + Address string `json:"address"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Type string `json:"type"` + Purpose string `json:"purpose"` + Rating float64 `json:"rating"` + ReviewCount int `json:"reviewCount"` + Schedule map[string]string `json:"schedule,omitempty"` } func (c *TravelDataClient) SearchPlaces(ctx context.Context, query string, lat, lng float64, radius int) ([]PlaceResult, error) { diff --git a/backend/internal/agent/travel_events_collector.go b/backend/internal/agent/travel_events_collector.go index a7667b1..f8bc769 100644 --- a/backend/internal/agent/travel_events_collector.go +++ b/backend/internal/agent/travel_events_collector.go @@ -39,12 +39,16 @@ func CollectEventsEnriched(ctx context.Context, cfg TravelOrchestratorConfig, br events := extractEventsWithLLM(ctx, cfg.LLM, brief, rawResults, crawledContent) - events = geocodeEvents(ctx, cfg, events) + events = geocodeEvents(ctx, cfg, brief, events) events = deduplicateEvents(events) events = filterFreshEvents(events, brief.StartDate) + // Hard filter: drop events that ended up in another city/country due to ambiguous geocoding. + destGeo := geocodeDestinations(ctx, cfg, brief) + events = filterEventsNearDestinations(events, destGeo, 250) + if len(events) > 15 { events = events[:15] } @@ -425,28 +429,74 @@ func tryPartialEventParse(jsonStr string) []EventCard { return events } -func geocodeEvents(ctx context.Context, cfg TravelOrchestratorConfig, events []EventCard) []EventCard { +func geocodeEvents(ctx context.Context, cfg TravelOrchestratorConfig, brief *TripBrief, events []EventCard) []EventCard { + destSuffix := strings.Join(brief.Destinations, ", ") for i := range events { if events[i].Address == "" || (events[i].Lat != 0 && events[i].Lng != 0) { continue } - geoCtx, cancel := context.WithTimeout(ctx, 5*time.Second) - geo, err := cfg.TravelData.Geocode(geoCtx, events[i].Address) - cancel() + queries := []string{events[i].Address} + if destSuffix != "" && !strings.Contains(strings.ToLower(events[i].Address), strings.ToLower(destSuffix)) { + queries = append(queries, fmt.Sprintf("%s, %s", events[i].Address, destSuffix)) + } + queries = append(queries, fmt.Sprintf("%s, %s", events[i].Title, destSuffix)) - if err != nil { - log.Printf("[travel-events] geocode failed for '%s': %v", events[i].Address, err) - continue + var lastErr error + for _, q := range queries { + geoCtx, cancel := context.WithTimeout(ctx, 5*time.Second) + geo, err := cfg.TravelData.Geocode(geoCtx, q) + cancel() + if err != nil { + lastErr = err + continue + } + events[i].Lat = geo.Lat + events[i].Lng = geo.Lng + break } - events[i].Lat = geo.Lat - events[i].Lng = geo.Lng + if events[i].Lat == 0 && events[i].Lng == 0 { + if lastErr != nil { + log.Printf("[travel-events] geocode failed for '%s': %v", events[i].Address, lastErr) + } else { + log.Printf("[travel-events] geocode failed for '%s'", events[i].Address) + } + continue + } } return events } +func filterEventsNearDestinations(events []EventCard, destinations []destGeoEntry, maxKm float64) []EventCard { + if len(destinations) == 0 { + return events + } + filtered := make([]EventCard, 0, len(events)) + for _, e := range events { + if e.Lat == 0 && e.Lng == 0 { + continue + } + minD := 1e18 + for _, d := range destinations { + if d.Lat == 0 && d.Lng == 0 { + continue + } + dd := distanceKm(e.Lat, e.Lng, d.Lat, d.Lng) + if dd < minD { + minD = dd + } + } + if minD <= maxKm { + filtered = append(filtered, e) + } else { + log.Printf("[travel-events] dropped far event '%s' (%.0fkm from destinations)", e.Title, minD) + } + } + return filtered +} + func deduplicateEvents(events []EventCard) []EventCard { seen := make(map[string]bool) var unique []EventCard diff --git a/backend/internal/agent/travel_hotels_collector.go b/backend/internal/agent/travel_hotels_collector.go index afbdd03..6069b88 100644 --- a/backend/internal/agent/travel_hotels_collector.go +++ b/backend/internal/agent/travel_hotels_collector.go @@ -36,6 +36,8 @@ func CollectHotelsEnriched(ctx context.Context, cfg TravelOrchestratorConfig, br hotels = deduplicateHotels(hotels) + hotels = filterHotelsNearDestinations(hotels, destinations, 250) + if len(hotels) > 10 { hotels = hotels[:10] } @@ -400,6 +402,34 @@ func geocodeHotels(ctx context.Context, cfg TravelOrchestratorConfig, hotels []H return hotels } +func filterHotelsNearDestinations(hotels []HotelCard, destinations []destGeoEntry, maxKm float64) []HotelCard { + if len(destinations) == 0 { + return hotels + } + filtered := make([]HotelCard, 0, len(hotels)) + for _, h := range hotels { + if h.Lat == 0 && h.Lng == 0 { + continue + } + minD := 1e18 + for _, d := range destinations { + if d.Lat == 0 && d.Lng == 0 { + continue + } + dd := distanceKm(h.Lat, h.Lng, d.Lat, d.Lng) + if dd < minD { + minD = dd + } + } + if minD <= maxKm { + filtered = append(filtered, h) + } else { + log.Printf("[travel-hotels] dropped far hotel '%s' (%.0fkm from destinations)", h.Name, minD) + } + } + return filtered +} + func deduplicateHotels(hotels []HotelCard) []HotelCard { seen := make(map[string]bool) var unique []HotelCard diff --git a/backend/internal/agent/travel_orchestrator.go b/backend/internal/agent/travel_orchestrator.go index 8109950..6e31a64 100644 --- a/backend/internal/agent/travel_orchestrator.go +++ b/backend/internal/agent/travel_orchestrator.go @@ -5,8 +5,10 @@ import ( "encoding/json" "fmt" "log" + "math" "regexp" "strings" + "sync" "time" "github.com/gooseek/backend/internal/llm" @@ -22,6 +24,7 @@ type TravelOrchestratorConfig struct { LLM llm.Client SearchClient *search.SearXNGClient TravelData *TravelDataClient + PhotoCache *PhotoCacheService Crawl4AIURL string Locale string TravelPayoutsToken string @@ -35,6 +38,7 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc LLM: input.Config.LLM, SearchClient: input.Config.SearchClient, TravelData: NewTravelDataClient(input.Config.TravelSvcURL), + PhotoCache: input.Config.PhotoCache, Crawl4AIURL: input.Config.Crawl4AIURL, Locale: input.Config.Locale, TravelPayoutsToken: input.Config.TravelPayoutsToken, @@ -66,6 +70,7 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc } brief.ApplyDefaults() + enforceDefaultSingleDay(brief, input.FollowUp) // Geocode origin if we have a name but no coordinates if brief.Origin != "" && brief.OriginLat == 0 && brief.OriginLng == 0 { @@ -79,6 +84,7 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc // --- Phase 2: Geocode destinations --- destGeo := geocodeDestinations(ctx, travelCfg, brief) + destGeo = enforceOneDayFeasibility(ctx, &travelCfg, brief, destGeo) sess.UpdateBlock(researchBlockID, []session.Patch{{ Op: "replace", @@ -103,6 +109,62 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc CreatedAt: time.Now(), UpdatedAt: time.Now(), } + var draftMu sync.Mutex + var emitMu sync.Mutex + + emitCandidatesWidget := func(kind string) { + emitMu.Lock() + defer emitMu.Unlock() + + draftMu.Lock() + defer draftMu.Unlock() + + switch kind { + case "context": + if draft.Context == nil { + return + } + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelContext), map[string]interface{}{ + "weather": draft.Context.Weather, + "safety": draft.Context.Safety, + "restrictions": draft.Context.Restrictions, + "tips": draft.Context.Tips, + "bestTimeInfo": draft.Context.BestTimeInfo, + })) + case "events": + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelEvents), map[string]interface{}{ + "events": draft.Candidates.Events, + "count": len(draft.Candidates.Events), + })) + case "pois": + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelPOI), map[string]interface{}{ + "pois": draft.Candidates.POIs, + "count": len(draft.Candidates.POIs), + })) + case "hotels": + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelHotels), map[string]interface{}{ + "hotels": draft.Candidates.Hotels, + "count": len(draft.Candidates.Hotels), + })) + case "transport": + flights := make([]TransportOption, 0) + ground := make([]TransportOption, 0) + for _, t := range draft.Candidates.Transport { + if t.Mode == "flight" { + flights = append(flights, t) + } else { + ground = append(ground, t) + } + } + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelTransport), map[string]interface{}{ + "flights": flights, + "ground": ground, + "passengers": draft.Brief.Travelers, + })) + default: + return + } + } collectCtx, collectCancel := context.WithTimeout(ctx, 90*time.Second) defer collectCancel() @@ -116,7 +178,10 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc log.Printf("[travel] events collection error: %v", err) return nil } + draftMu.Lock() draft.Candidates.Events = events + draftMu.Unlock() + emitCandidatesWidget("events") return nil }) @@ -127,7 +192,10 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc log.Printf("[travel] POI collection error: %v", err) return nil } + draftMu.Lock() draft.Candidates.POIs = pois + draftMu.Unlock() + emitCandidatesWidget("pois") return nil }) @@ -138,7 +206,10 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc log.Printf("[travel] hotels collection error: %v", err) return nil } + draftMu.Lock() draft.Candidates.Hotels = hotels + draftMu.Unlock() + emitCandidatesWidget("hotels") return nil }) @@ -149,7 +220,10 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc log.Printf("[travel] transport collection error: %v", err) return nil } + draftMu.Lock() draft.Candidates.Transport = transport + draftMu.Unlock() + emitCandidatesWidget("transport") return nil }) @@ -160,7 +234,10 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc log.Printf("[travel] context collection error: %v", err) return nil } + draftMu.Lock() draft.Context = travelCtx + draftMu.Unlock() + emitCandidatesWidget("context") return nil }) @@ -197,6 +274,55 @@ func RunTravelOrchestrator(ctx context.Context, sess *session.Session, input Orc return nil } +func userExplicitlyProvidedDateRange(text string) bool { + t := strings.ToLower(text) + + isoDate := regexp.MustCompile(`\b20\d{2}-\d{2}-\d{2}\b`) + if len(isoDate.FindAllString(t, -1)) >= 2 { + return true + } + + loose := regexp.MustCompile(`\b\d{1,2}[./-]\d{1,2}([./-]\d{2,4})?\b`) + if strings.Contains(t, "с ") && strings.Contains(t, " по ") && len(loose.FindAllString(t, -1)) >= 2 { + return true + } + + return false +} + +func enforceDefaultSingleDay(brief *TripBrief, userText string) { + // Product rule: default to ONE day unless user explicitly provided start+end dates. + if !userExplicitlyProvidedDateRange(userText) { + brief.EndDate = brief.StartDate + } +} + +func enforceOneDayFeasibility(ctx context.Context, cfg *TravelOrchestratorConfig, brief *TripBrief, destGeo []destGeoEntry) []destGeoEntry { + // If it's a one-day request and origin+destination are far apart, + // plan locally around origin (user is already there). + if brief.StartDate == "" || brief.EndDate == "" || brief.StartDate != brief.EndDate { + return destGeo + } + if brief.Origin == "" { + return destGeo + } + if brief.OriginLat == 0 && brief.OriginLng == 0 { + return destGeo + } + if len(destGeo) == 0 || (destGeo[0].Lat == 0 && destGeo[0].Lng == 0) { + return destGeo + } + + d := distanceKm(brief.OriginLat, brief.OriginLng, destGeo[0].Lat, destGeo[0].Lng) + if d <= 250 { + return destGeo + } + + log.Printf("[travel] one-day request but destination is far (%.0fkm) — switching destination to origin %q", d, brief.Origin) + brief.Destinations = []string{brief.Origin} + return geocodeDestinations(ctx, *cfg, brief) +} + // --- Phase 1: Planner Agent --- func runPlannerAgent(ctx context.Context, cfg TravelOrchestratorConfig, input OrchestratorInput) (*TripBrief, error) { @@ -219,9 +345,13 @@ func runPlannerAgent(ctx context.Context, cfg TravelOrchestratorConfig, input Or } Правила: -- Если пользователь говорит "сегодня" — startDate = текущая дата (` + time.Now().Format("2006-01-02") + `) -- Для однодневных поездок endDate = startDate -- Если дата не указана, оставь пустую строку "" +- Сегодняшняя дата: ` + time.Now().Format("2006-01-02") + ` +- Если пользователь говорит "сегодня" — startDate = сегодняшняя дата +- Если пользователь говорит "завтра" — startDate = завтрашняя дата (` + time.Now().AddDate(0, 0, 1).Format("2006-01-02") + `) +- Если пользователь говорит "послезавтра" — startDate = послезавтрашняя дата (` + time.Now().AddDate(0, 0, 2).Format("2006-01-02") + `) +- ВАЖНО: По умолчанию планируем ОДИН день. Если пользователь не указал конечную дату явно — endDate оставь пустой строкой "" +- endDate заполняй ТОЛЬКО если пользователь явно указал диапазон дат (дата начала И дата конца) +- Если дата не указана вообще, оставь пустую строку "" - Если бюджет не указан, поставь 0 - Если количество путешественников не указано, поставь 0 - ВАЖНО: Если в сообщении есть координаты "Моё текущее местоположение: lat, lng", используй их: @@ -257,9 +387,18 @@ func runPlannerAgent(ctx context.Context, cfg TravelOrchestratorConfig, input Or var brief TripBrief if err := json.Unmarshal([]byte(jsonMatch), &brief); err != nil { - return &TripBrief{ - Destinations: extractDestinationsFromText(input.FollowUp), - }, nil + repaired := repairJSON(jsonMatch) + if repaired != "" { + if err2 := json.Unmarshal([]byte(repaired), &brief); err2 != nil { + return &TripBrief{ + Destinations: extractDestinationsFromText(input.FollowUp), + }, nil + } + } else { + return &TripBrief{ + Destinations: extractDestinationsFromText(input.FollowUp), + }, nil + } } if len(brief.Destinations) == 0 { @@ -362,19 +501,21 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf } type poiCompact struct { - ID string `json:"id"` - Name string `json:"name"` - Category string `json:"category"` - Lat float64 `json:"lat"` - Lng float64 `json:"lng"` - Duration int `json:"duration"` - Price float64 `json:"price"` - Address string `json:"address"` + ID string `json:"id"` + Name string `json:"name"` + Category string `json:"category"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Duration int `json:"duration"` + Price float64 `json:"price"` + Address string `json:"address"` + Schedule map[string]string `json:"schedule,omitempty"` } type eventCompact struct { ID string `json:"id"` Title string `json:"title"` DateStart string `json:"dateStart"` + DateEnd string `json:"dateEnd,omitempty"` Lat float64 `json:"lat"` Lng float64 `json:"lng"` Price float64 `json:"price"` @@ -398,6 +539,7 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf ID: p.ID, Name: p.Name, Category: p.Category, Lat: p.Lat, Lng: p.Lng, Duration: dur, Price: p.Price, Address: p.Address, + Schedule: p.Schedule, }) } @@ -405,6 +547,7 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf for _, e := range draft.Candidates.Events { compactEvents = append(compactEvents, eventCompact{ ID: e.ID, Title: e.Title, DateStart: e.DateStart, + DateEnd: e.DateEnd, Lat: e.Lat, Lng: e.Lng, Price: e.Price, Address: e.Address, }) } @@ -428,10 +571,26 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf "hotels": compactHotels, } if draft.Context != nil { + weatherCtx := map[string]interface{}{ + "summary": draft.Context.Weather.Summary, + "tempRange": fmt.Sprintf("%.0f..%.0f°C", draft.Context.Weather.TempMin, draft.Context.Weather.TempMax), + "conditions": draft.Context.Weather.Conditions, + } + if len(draft.Context.Weather.DailyForecast) > 0 { + dailyWeather := make([]map[string]interface{}, 0, len(draft.Context.Weather.DailyForecast)) + for _, d := range draft.Context.Weather.DailyForecast { + dailyWeather = append(dailyWeather, map[string]interface{}{ + "date": d.Date, + "tempMin": d.TempMin, + "tempMax": d.TempMax, + "conditions": d.Conditions, + "rainChance": d.RainChance, + }) + } + weatherCtx["dailyForecast"] = dailyWeather + } candidateData["context"] = map[string]interface{}{ - "weather": draft.Context.Weather.Summary, - "tempRange": fmt.Sprintf("%.0f..%.0f°C", draft.Context.Weather.TempMin, draft.Context.Weather.TempMax), - "conditions": draft.Context.Weather.Conditions, + "weather": weatherCtx, "safetyLevel": draft.Context.Safety.Level, "restrictions": draft.Context.Restrictions, } @@ -442,6 +601,8 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf Данные (с координатами для расчёта расстояний): %s +ВАЖНО: Если startDate == endDate — это ОДНОДНЕВНЫЙ план. Верни РОВНО 1 день в массиве "days" и поставь date=startDate. + КРИТИЧЕСКИЕ ПРАВИЛА РАСЧЁТА ВРЕМЕНИ: 1. Используй координаты (lat, lng) для оценки расстояний между точками. 2. Средняя скорость передвижения по городу: 15-20 км/ч (пробки, пешком, общественный транспорт). @@ -453,6 +614,12 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf 8. Максимум 4-5 основных активностей в день (не считая еду и переезды). 9. День начинается в 09:00, заканчивается в 21:00. С детьми — до 19:00. +ПРАВИЛА ПОГОДЫ (если есть dailyForecast в context): +1. В дождливые дни (conditions: "дождь"/"гроза") — ставь крытые активности: музеи, торговые центры, рестораны, театры. +2. В солнечные дни — парки, смотровые площадки, прогулки, набережные. +3. В холодные дни (tempMax < 5°C) — больше крытых мест, меньше прогулок. +4. Если есть tip для дня — учитывай его при планировании. + ПРАВИЛА ЦЕН: 1. cost — цена НА ОДНОГО человека за эту активность. 2. Для бесплатных мест (парки, площади, улицы) — cost = 0. @@ -488,6 +655,8 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf `+"```"+` Дополнительные правила: +- Для refType="poi"|"event"|"hotel" ЗАПРЕЩЕНО выдумывать места. Используй ТОЛЬКО объекты из данных и ставь их "refId" из списка. +- Если подходящего POI/события/отеля в данных нет — используй refType="custom" (или "food" для еды) и ставь lat/lng = 0. - Между точками ОБЯЗАТЕЛЬНО вставляй элемент "transfer" с refType="transfer" если расстояние > 1 км - В note для transfer указывай расстояние и примерное время - Начинай день с отеля/завтрака @@ -506,14 +675,7 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf summaryText := extractTextBeforeJSON(response) - jsonMatch := regexp.MustCompile("```(?:json)?\\s*([\\s\\S]*?)```").FindStringSubmatch(response) - var jsonStr string - if len(jsonMatch) > 1 { - jsonStr = strings.TrimSpace(jsonMatch[1]) - } else { - jsonStr = regexp.MustCompile(`\{[\s\S]*"days"[\s\S]*\}`).FindString(response) - } - + jsonStr := extractJSONFromResponse(response) if jsonStr == "" { return nil, summaryText, nil } @@ -522,15 +684,119 @@ func runItineraryBuilder(ctx context.Context, cfg TravelOrchestratorConfig, draf Days []ItineraryDay `json:"days"` } if err := json.Unmarshal([]byte(jsonStr), &result); err != nil { - log.Printf("[travel] itinerary JSON parse error: %v", err) - return nil, summaryText, nil + repaired := repairJSON(jsonStr) + if repaired != "" { + if err2 := json.Unmarshal([]byte(repaired), &result); err2 != nil { + log.Printf("[travel] itinerary JSON parse error (after repair): %v", err2) + return nil, summaryText, nil + } + } else { + log.Printf("[travel] itinerary JSON parse error: %v", err) + return nil, summaryText, nil + } } result.Days = validateItineraryTimes(result.Days) + result.Days = postValidateItinerary(result.Days, draft) + if draft.Brief != nil && draft.Brief.StartDate != "" && draft.Brief.EndDate == draft.Brief.StartDate && len(result.Days) > 1 { + // Defensive clamp: for one-day plans keep only the first day. + result.Days = result.Days[:1] + result.Days[0].Date = draft.Brief.StartDate + } return result.Days, summaryText, nil } +func postValidateItinerary(days []ItineraryDay, draft *TripDraft) []ItineraryDay { + poiByID := make(map[string]*POICard) + for i := range draft.Candidates.POIs { + poiByID[draft.Candidates.POIs[i].ID] = &draft.Candidates.POIs[i] + } + eventByID := make(map[string]*EventCard) + for i := range draft.Candidates.Events { + eventByID[draft.Candidates.Events[i].ID] = &draft.Candidates.Events[i] + } + hotelByID := make(map[string]*HotelCard) + for i := range draft.Candidates.Hotels { + hotelByID[draft.Candidates.Hotels[i].ID] = &draft.Candidates.Hotels[i] + } + + // Build a centroid of "known-good" coordinates to detect out-of-area hallucinations. + var sumLat, sumLng float64 + var cnt float64 + addPoint := func(lat, lng float64) { + if lat == 0 && lng == 0 { + return + } + sumLat += lat + sumLng += lng + cnt++ + } + for _, p := range draft.Candidates.POIs { + addPoint(p.Lat, p.Lng) + } + for _, e := range draft.Candidates.Events { + addPoint(e.Lat, e.Lng) + } + for _, h := range draft.Candidates.Hotels { + addPoint(h.Lat, h.Lng) + } + centLat, centLng := 0.0, 0.0 + if cnt > 0 { + centLat = sumLat / cnt + centLng = sumLng / cnt + } + + for d := range days { + for i := range days[d].Items { + item := &days[d].Items[i] + + // If refId exists, always trust coordinates from candidates (even if LLM provided something else). + if item.RefID != "" { + if poi, ok := poiByID[item.RefID]; ok { + item.Lat, item.Lng = poi.Lat, poi.Lng + } else if ev, ok := eventByID[item.RefID]; ok { + item.Lat, item.Lng = ev.Lat, ev.Lng + } else if h, ok := hotelByID[item.RefID]; ok { + item.Lat, item.Lng = h.Lat, h.Lng + } else if item.RefType == "poi" || item.RefType == "event" || item.RefType == "hotel" { + // Unknown refId for these types → convert to custom to avoid cross-city junk. + item.RefType = "custom" + item.RefID = "" + item.Lat = 0 + item.Lng = 0 + if item.Note == "" { + item.Note = "Уточнить место: не найдено среди вариантов для города" + } + } + } + + // Clamp out-of-area coordinates (e.g., another country) if we have a centroid. + if centLat != 0 || centLng != 0 { + if item.Lat != 0 || item.Lng != 0 { + if distanceKm(item.Lat, item.Lng, centLat, centLng) > 250 { + item.Lat = 0 + item.Lng = 0 + if item.RefType == "poi" || item.RefType == "event" || item.RefType == "hotel" { + item.RefType = "custom" + item.RefID = "" + } + if item.Note == "" { + item.Note = "Уточнить место: координаты вне города/маршрута" + } + } + } + } + + if item.Currency == "" { + item.Currency = draft.Brief.Currency + } + } + } + + return days +} + func validateItineraryTimes(days []ItineraryDay) []ItineraryDay { for d := range days { items := days[d].Items @@ -577,6 +843,58 @@ func formatMinutesTime(minutes int) string { return fmt.Sprintf("%02d:%02d", minutes/60, minutes%60) } +func extractJSONFromResponse(response string) string { + codeBlockRe := regexp.MustCompile("```(?:json)?\\s*([\\s\\S]*?)```") + if m := codeBlockRe.FindStringSubmatch(response); len(m) > 1 { + return strings.TrimSpace(m[1]) + } + + if idx := strings.Index(response, `"days"`); idx >= 0 { + braceStart := strings.LastIndex(response[:idx], "{") + if braceStart >= 0 { + depth := 0 + for i := braceStart; i < len(response); i++ { + switch response[i] { + case '{': + depth++ + case '}': + depth-- + if depth == 0 { + return response[braceStart : i+1] + } + } + } + } + } + + return regexp.MustCompile(`\{[\s\S]*"days"[\s\S]*\}`).FindString(response) +} + +func repairJSON(s string) string { + s = strings.TrimSpace(s) + + s = regexp.MustCompile(`,\s*}`).ReplaceAllString(s, "}") + s = regexp.MustCompile(`,\s*]`).ReplaceAllString(s, "]") + + openBraces := strings.Count(s, "{") - strings.Count(s, "}") + for openBraces > 0 { + s += "}" + openBraces-- + } + + openBrackets := strings.Count(s, "[") - strings.Count(s, "]") + for openBrackets > 0 { + s += "]" + openBrackets-- + } + + var test json.RawMessage + if json.Unmarshal([]byte(s), &test) == nil { + return s + } + return "" +} + func extractTextBeforeJSON(response string) string { idx := strings.Index(response, "```") if idx > 0 { @@ -674,44 +992,39 @@ func emitTravelWidgets(ctx context.Context, sess *session.Session, cfg *TravelOr sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelMap), widgetParams)) } - // Events widget - if len(draft.Candidates.Events) > 0 { - sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelEvents), map[string]interface{}{ - "events": draft.Candidates.Events, - })) - } + // Events widget (always emit — UI shows empty state) + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelEvents), map[string]interface{}{ + "events": draft.Candidates.Events, + "count": len(draft.Candidates.Events), + })) - // POI widget - if len(draft.Candidates.POIs) > 0 { - sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelPOI), map[string]interface{}{ - "pois": draft.Candidates.POIs, - })) - } + // POI widget (always emit) + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelPOI), map[string]interface{}{ + "pois": draft.Candidates.POIs, + "count": len(draft.Candidates.POIs), + })) - // Hotels widget - if len(draft.Candidates.Hotels) > 0 { - sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelHotels), map[string]interface{}{ - "hotels": draft.Candidates.Hotels, - })) - } + // Hotels widget (always emit) + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelHotels), map[string]interface{}{ + "hotels": draft.Candidates.Hotels, + "count": len(draft.Candidates.Hotels), + })) - // Transport widget - if len(draft.Candidates.Transport) > 0 { - flights := make([]TransportOption, 0) - ground := make([]TransportOption, 0) - for _, t := range draft.Candidates.Transport { - if t.Mode == "flight" { - flights = append(flights, t) - } else { - ground = append(ground, t) - } + // Transport widget (always emit) + flights := make([]TransportOption, 0) + ground := make([]TransportOption, 0) + for _, t := range draft.Candidates.Transport { + if t.Mode == "flight" { + flights = append(flights, t) + } else { + ground = append(ground, t) } - sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelTransport), map[string]interface{}{ - "flights": flights, - "ground": ground, - "passengers": draft.Brief.Travelers, - })) } + sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelTransport), map[string]interface{}{ + "flights": flights, + "ground": ground, + "passengers": draft.Brief.Travelers, + })) // Itinerary widget if len(draft.Selected.Itinerary) > 0 { @@ -723,6 +1036,9 @@ func emitTravelWidgets(ctx context.Context, sess *session.Session, cfg *TravelOr if len(segments) > 0 { itineraryParams["segments"] = segments } + if draft.Context != nil && len(draft.Context.Weather.DailyForecast) > 0 { + itineraryParams["dailyForecast"] = draft.Context.Weather.DailyForecast + } sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelItinerary), itineraryParams)) } @@ -735,15 +1051,6 @@ func emitTravelWidgets(ctx context.Context, sess *session.Session, cfg *TravelOr "perPerson": budget.PerPerson, })) } - - // Actions widget - sess.EmitBlock(types.NewWidgetBlock(uuid.New().String(), string(types.WidgetTravelActions), map[string]interface{}{ - "actions": []map[string]interface{}{ - {"id": "save_trip", "label": "Сохранить поездку", "kind": "save", "payload": map[string]interface{}{}}, - {"id": "modify_route", "label": "Изменить маршрут", "kind": "modify", "payload": map[string]interface{}{}}, - {"id": "add_more", "label": "Найти ещё варианты", "kind": "search", "payload": map[string]interface{}{}}, - }, - })) } func buildMapPoints(draft *TripDraft, destGeo []destGeoEntry) []MapPoint { @@ -1020,90 +1327,135 @@ func buildRoadRoute(ctx context.Context, cfg *TravelOrchestratorConfig, points [ return nil, nil } - routeCtx, cancel := context.WithTimeout(ctx, 60*time.Second) + routeCtx, cancel := context.WithTimeout(ctx, 90*time.Second) defer cancel() - log.Printf("[travel] building road route segment-by-segment for %d points", len(points)) + segments := buildSegmentCosts(points) + + // 2GIS supports up to 10 waypoints per request; batch accordingly + const maxWaypoints = 10 + log.Printf("[travel] building batched multi-point route for %d points (batch size %d)", len(points), maxWaypoints) var allCoords [][2]float64 var allSteps []RouteStepResult var totalDistance, totalDuration float64 - segments := make([]routeSegmentWithCosts, 0, len(points)-1) + batchOK := true - for i := 0; i < len(points)-1; i++ { - if i > 0 { + for batchStart := 0; batchStart < len(points)-1; batchStart += maxWaypoints - 1 { + batchEnd := batchStart + maxWaypoints + if batchEnd > len(points) { + batchEnd = len(points) + } + batch := points[batchStart:batchEnd] + if len(batch) < 2 { + break + } + + if batchStart > 0 { select { case <-routeCtx.Done(): + batchOK = false + case <-time.After(1500 * time.Millisecond): + } + if !batchOK { break - case <-time.After(300 * time.Millisecond): } } - pair := []MapPoint{points[i], points[i+1]} - - var segDir *RouteDirectionResult + var batchRoute *RouteDirectionResult var err error for attempt := 0; attempt < 3; attempt++ { - segDir, err = cfg.TravelData.GetRoute(routeCtx, pair, "driving") - if err == nil || !strings.Contains(err.Error(), "429") { + batchRoute, err = cfg.TravelData.GetRoute(routeCtx, batch, "driving") + if err == nil { break } - log.Printf("[travel] segment %d->%d rate limited, retry %d", i, i+1, attempt+1) + if !strings.Contains(err.Error(), "429") { + break + } + log.Printf("[travel] batch %d-%d rate limited, retry %d", batchStart, batchEnd-1, attempt+1) select { case <-routeCtx.Done(): + batchOK = false + case <-time.After(time.Duration(2+attempt*2) * time.Second): + } + if !batchOK { break - case <-time.After(time.Duration(1+attempt) * time.Second): } } - var distanceM, durationS float64 if err != nil { - log.Printf("[travel] segment %d->%d routing failed: %v", i, i+1, err) - } else if segDir != nil { - distanceM = segDir.Distance - durationS = segDir.Duration - totalDistance += distanceM - totalDuration += durationS - - if len(segDir.Geometry.Coordinates) > 0 { - if len(allCoords) > 0 && len(segDir.Geometry.Coordinates) > 0 { - allCoords = append(allCoords, segDir.Geometry.Coordinates[1:]...) - } else { - allCoords = append(allCoords, segDir.Geometry.Coordinates...) - } - } - allSteps = append(allSteps, segDir.Steps...) + log.Printf("[travel] batch %d-%d routing failed: %v", batchStart, batchEnd-1, err) + batchOK = false + break } + if batchRoute == nil || len(batchRoute.Geometry.Coordinates) < 2 { + log.Printf("[travel] batch %d-%d returned empty geometry", batchStart, batchEnd-1) + batchOK = false + break + } + + totalDistance += batchRoute.Distance + totalDuration += batchRoute.Duration + if len(allCoords) > 0 { + allCoords = append(allCoords, batchRoute.Geometry.Coordinates[1:]...) + } else { + allCoords = append(allCoords, batchRoute.Geometry.Coordinates...) + } + allSteps = append(allSteps, batchRoute.Steps...) + log.Printf("[travel] batch %d-%d OK: +%.0fm, +%d coords", batchStart, batchEnd-1, batchRoute.Distance, len(batchRoute.Geometry.Coordinates)) + } + + if batchOK && len(allCoords) > 1 { + fullRoute := &RouteDirectionResult{ + Geometry: RouteGeometryResult{ + Coordinates: allCoords, + Type: "LineString", + }, + Distance: totalDistance, + Duration: totalDuration, + Steps: allSteps, + } + log.Printf("[travel] road route OK: distance=%.0fm, coords=%d, segments=%d", totalDistance, len(allCoords), len(segments)) + return fullRoute, segments + } + + log.Printf("[travel] batched routing failed, no road coordinates collected") + return nil, segments +} + +func buildSegmentCosts(points []MapPoint) []routeSegmentWithCosts { + segments := make([]routeSegmentWithCosts, 0, len(points)-1) + for i := 0; i < len(points)-1; i++ { + distKm := haversineDistance(points[i].Lat, points[i].Lng, points[i+1].Lat, points[i+1].Lng) + distM := distKm * 1000 + durationS := distKm / 40.0 * 3600 // ~40 km/h average seg := routeSegmentWithCosts{ From: points[i].Label, To: points[i+1].Label, - Distance: distanceM, + Distance: distM, Duration: durationS, } - if distanceM > 0 { - seg.TransportOptions = calculateTransportCosts(distanceM, durationS) + if distM > 0 { + seg.TransportOptions = calculateTransportCosts(distM, durationS) } segments = append(segments, seg) } + return segments +} - if len(allCoords) == 0 { - log.Printf("[travel] no road coordinates collected") - return nil, segments - } +func haversineDistance(lat1, lng1, lat2, lng2 float64) float64 { + const R = 6371.0 + dLat := (lat2 - lat1) * math.Pi / 180 + dLng := (lng2 - lng1) * math.Pi / 180 + lat1Rad := lat1 * math.Pi / 180 + lat2Rad := lat2 * math.Pi / 180 - fullRoute := &RouteDirectionResult{ - Geometry: RouteGeometryResult{ - Coordinates: allCoords, - Type: "LineString", - }, - Distance: totalDistance, - Duration: totalDuration, - Steps: allSteps, - } - log.Printf("[travel] road route OK: distance=%.0fm, coords=%d, segments=%d", totalDistance, len(allCoords), len(segments)) - - return fullRoute, segments + a := math.Sin(dLat/2)*math.Sin(dLat/2) + + math.Cos(lat1Rad)*math.Cos(lat2Rad)* + math.Sin(dLng/2)*math.Sin(dLng/2) + c := 2 * math.Asin(math.Sqrt(a)) + return R * c } func calculateTransportCosts(distanceMeters float64, durationSeconds float64) []transportCostOption { diff --git a/backend/internal/agent/travel_photo_cache.go b/backend/internal/agent/travel_photo_cache.go new file mode 100644 index 0000000..9a1a5d3 --- /dev/null +++ b/backend/internal/agent/travel_photo_cache.go @@ -0,0 +1,194 @@ +package agent + +import ( + "context" + "crypto/sha256" + "fmt" + "io" + "log" + "net/http" + "strings" + "sync" + "time" + + "github.com/gooseek/backend/pkg/storage" +) + +const ( + photoCachePrefix = "poi-photos" + maxPhotoSize = 5 * 1024 * 1024 // 5MB + photoDownloadTimeout = 8 * time.Second +) + +type PhotoCacheService struct { + storage *storage.MinioStorage + client *http.Client + mu sync.RWMutex + memCache map[string]string // sourceURL -> publicURL (in-memory for current session) +} + +func NewPhotoCacheService(s *storage.MinioStorage) *PhotoCacheService { + return &PhotoCacheService{ + storage: s, + client: &http.Client{ + Timeout: photoDownloadTimeout, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + if len(via) >= 3 { + return http.ErrUseLastResponse + } + return nil + }, + }, + memCache: make(map[string]string, 128), + } +} + +func (pc *PhotoCacheService) CachePhoto(ctx context.Context, citySlug, sourceURL string) (string, error) { + pc.mu.RLock() + if cached, ok := pc.memCache[sourceURL]; ok { + pc.mu.RUnlock() + return cached, nil + } + pc.mu.RUnlock() + + key := pc.buildKey(citySlug, sourceURL) + + exists, err := pc.storage.ObjectExists(ctx, key) + if err == nil && exists { + publicURL := pc.storage.GetPublicURL(key) + if publicURL != "" { + pc.mu.Lock() + pc.memCache[sourceURL] = publicURL + pc.mu.Unlock() + return publicURL, nil + } + } + + body, contentType, err := pc.downloadImage(ctx, sourceURL) + if err != nil { + return "", fmt.Errorf("download failed: %w", err) + } + defer body.Close() + + limitedReader := io.LimitReader(body, maxPhotoSize) + + result, err := pc.storage.UploadWithKey(ctx, key, limitedReader, -1, contentType) + if err != nil { + return "", fmt.Errorf("upload to minio failed: %w", err) + } + + publicURL := pc.storage.GetPublicURL(result.Key) + if publicURL == "" { + return "", fmt.Errorf("no public URL configured for storage") + } + + pc.mu.Lock() + pc.memCache[sourceURL] = publicURL + pc.mu.Unlock() + + return publicURL, nil +} + +func (pc *PhotoCacheService) CachePhotoBatch(ctx context.Context, citySlug string, sourceURLs []string) []string { + results := make([]string, len(sourceURLs)) + var wg sync.WaitGroup + + for i, url := range sourceURLs { + wg.Add(1) + go func(idx int, srcURL string) { + defer wg.Done() + + cacheCtx, cancel := context.WithTimeout(ctx, photoDownloadTimeout+2*time.Second) + defer cancel() + + cached, err := pc.CachePhoto(cacheCtx, citySlug, srcURL) + if err != nil { + log.Printf("[photo-cache] failed to cache %s: %v", truncateURL(srcURL), err) + results[idx] = srcURL + return + } + results[idx] = cached + }(i, url) + } + + wg.Wait() + return results +} + +func (pc *PhotoCacheService) buildKey(citySlug, sourceURL string) string { + hash := sha256.Sum256([]byte(sourceURL)) + hashStr := fmt.Sprintf("%x", hash[:12]) + + ext := ".jpg" + lower := strings.ToLower(sourceURL) + switch { + case strings.Contains(lower, ".png"): + ext = ".png" + case strings.Contains(lower, ".webp"): + ext = ".webp" + case strings.Contains(lower, ".gif"): + ext = ".gif" + } + + slug := sanitizeSlug(citySlug) + return fmt.Sprintf("%s/%s/%s%s", photoCachePrefix, slug, hashStr, ext) +} + +func (pc *PhotoCacheService) downloadImage(ctx context.Context, url string) (io.ReadCloser, string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, "", err + } + + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; GooSeek/1.0)") + req.Header.Set("Accept", "image/*") + + resp, err := pc.client.Do(req) + if err != nil { + return nil, "", err + } + + if resp.StatusCode != http.StatusOK { + resp.Body.Close() + return nil, "", fmt.Errorf("HTTP %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + if contentType == "" { + contentType = "image/jpeg" + } + + if !strings.HasPrefix(contentType, "image/") { + resp.Body.Close() + return nil, "", fmt.Errorf("not an image: %s", contentType) + } + + return resp.Body, contentType, nil +} + +func sanitizeSlug(s string) string { + s = strings.ToLower(strings.TrimSpace(s)) + s = strings.Map(func(r rune) rune { + if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' { + return r + } + if (r >= 0x0400 && r <= 0x04FF) || r == '_' { + return r + } + if r == ' ' { + return '-' + } + return -1 + }, s) + for strings.Contains(s, "--") { + s = strings.ReplaceAll(s, "--", "-") + } + return strings.Trim(s, "-") +} + +func truncateURL(u string) string { + if len(u) > 80 { + return u[:80] + "..." + } + return u +} diff --git a/backend/internal/agent/travel_poi_collector.go b/backend/internal/agent/travel_poi_collector.go index 90a1bb5..f8dc5ba 100644 --- a/backend/internal/agent/travel_poi_collector.go +++ b/backend/internal/agent/travel_poi_collector.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" "log" + "math" "regexp" "strings" "sync" @@ -103,7 +104,11 @@ func CollectPOIsEnriched(ctx context.Context, cfg TravelOrchestratorConfig, brie } // Phase 4: Fallback geocoding for POIs without coordinates - allPOIs = geocodePOIs(ctx, cfg, allPOIs) + allPOIs = geocodePOIs(ctx, cfg, brief, allPOIs) + + // Hard filter: drop POIs that are far away from any destination center. + // This prevents ambiguous geocoding from pulling in other cities/countries. + allPOIs = filterPOIsNearDestinations(allPOIs, destinations, 250) allPOIs = deduplicatePOIs(allPOIs) @@ -453,6 +458,14 @@ func enrichPOIPhotos(ctx context.Context, cfg TravelOrchestratorConfig, brief *T } if len(photos) > 0 { + if cfg.PhotoCache != nil { + citySlug := dest + if citySlug == "" { + citySlug = "unknown" + } + photos = cfg.PhotoCache.CachePhotoBatch(ctx, citySlug, photos) + } + mu.Lock() pois[idx].Photos = photos mu.Unlock() @@ -463,12 +476,18 @@ func enrichPOIPhotos(ctx context.Context, cfg TravelOrchestratorConfig, brief *T wg.Wait() photosFound := 0 + cachedCount := 0 for _, p := range pois { if len(p.Photos) > 0 { photosFound++ + for _, ph := range p.Photos { + if strings.Contains(ph, "storage.gooseek") || strings.Contains(ph, "minio") { + cachedCount++ + } + } } } - log.Printf("[travel-poi] enriched %d/%d POIs with photos", photosFound, len(pois)) + log.Printf("[travel-poi] enriched %d/%d POIs with photos (%d cached in MinIO)", photosFound, len(pois), cachedCount) return pois } @@ -636,19 +655,27 @@ func extractPOIsWithLLM(ctx context.Context, llmClient llm.Client, brief *TripBr return pois } -func geocodePOIs(ctx context.Context, cfg TravelOrchestratorConfig, pois []POICard) []POICard { +func geocodePOIs(ctx context.Context, cfg TravelOrchestratorConfig, brief *TripBrief, pois []POICard) []POICard { + destSuffix := strings.Join(brief.Destinations, ", ") for i := range pois { if pois[i].Lat != 0 && pois[i].Lng != 0 { continue } - // Try geocoding by address first, then by name + city + // Try geocoding by address first, then by name+destination. queries := []string{} if pois[i].Address != "" { queries = append(queries, pois[i].Address) + if destSuffix != "" && !strings.Contains(strings.ToLower(pois[i].Address), strings.ToLower(destSuffix)) { + queries = append(queries, fmt.Sprintf("%s, %s", pois[i].Address, destSuffix)) + } } if pois[i].Name != "" { - queries = append(queries, pois[i].Name) + if destSuffix != "" { + queries = append(queries, fmt.Sprintf("%s, %s", pois[i].Name, destSuffix)) + } else { + queries = append(queries, pois[i].Name) + } } for _, query := range queries { @@ -674,6 +701,46 @@ func geocodePOIs(ctx context.Context, cfg TravelOrchestratorConfig, pois []POICa return pois } +func distanceKm(lat1, lng1, lat2, lng2 float64) float64 { + const earthRadiusKm = 6371.0 + toRad := func(d float64) float64 { return d * math.Pi / 180 } + lat1r := toRad(lat1) + lat2r := toRad(lat2) + dLat := toRad(lat2 - lat1) + dLng := toRad(lng2 - lng1) + a := math.Sin(dLat/2)*math.Sin(dLat/2) + math.Cos(lat1r)*math.Cos(lat2r)*math.Sin(dLng/2)*math.Sin(dLng/2) + c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) + return earthRadiusKm * c +} + +func filterPOIsNearDestinations(pois []POICard, destinations []destGeoEntry, maxKm float64) []POICard { + if len(destinations) == 0 { + return pois + } + filtered := make([]POICard, 0, len(pois)) + for _, p := range pois { + if p.Lat == 0 && p.Lng == 0 { + continue + } + minD := math.MaxFloat64 + for _, d := range destinations { + if d.Lat == 0 && d.Lng == 0 { + continue + } + dd := distanceKm(p.Lat, p.Lng, d.Lat, d.Lng) + if dd < minD { + minD = dd + } + } + if minD <= maxKm { + filtered = append(filtered, p) + } else { + log.Printf("[travel-poi] dropped far POI '%s' (%.0fkm from destinations)", p.Name, minD) + } + } + return filtered +} + func deduplicatePOIs(pois []POICard) []POICard { seen := make(map[string]bool) var unique []POICard diff --git a/backend/internal/agent/travel_types.go b/backend/internal/agent/travel_types.go index e41847c..2397521 100644 --- a/backend/internal/agent/travel_types.go +++ b/backend/internal/agent/travel_types.go @@ -30,12 +30,7 @@ func (b *TripBrief) ApplyDefaults() { b.StartDate = time.Now().Format("2006-01-02") } if b.EndDate == "" { - start, err := time.Parse("2006-01-02", b.StartDate) - if err == nil { - b.EndDate = start.AddDate(0, 0, 3).Format("2006-01-02") - } else { - b.EndDate = b.StartDate - } + b.EndDate = b.StartDate } if b.Travelers == 0 { b.Travelers = 2 diff --git a/backend/internal/db/learning_repo.go b/backend/internal/db/learning_repo.go new file mode 100644 index 0000000..ac04917 --- /dev/null +++ b/backend/internal/db/learning_repo.go @@ -0,0 +1,642 @@ +package db + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "time" +) + +type LearningRepository struct { + db *PostgresDB +} + +func NewLearningRepository(db *PostgresDB) *LearningRepository { + return &LearningRepository{db: db} +} + +func (r *LearningRepository) RunMigrations(ctx context.Context) error { + migrations := []string{ + `CREATE TABLE IF NOT EXISTS learning_user_profiles ( + user_id UUID PRIMARY KEY, + display_name VARCHAR(255), + profile JSONB NOT NULL DEFAULT '{}', + resume_file_id UUID, + resume_extracted_text TEXT, + onboarding_completed BOOLEAN DEFAULT FALSE, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + + `CREATE TABLE IF NOT EXISTS learning_courses ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + slug VARCHAR(255) NOT NULL UNIQUE, + title VARCHAR(500) NOT NULL, + short_description TEXT, + category VARCHAR(100) NOT NULL DEFAULT 'general', + tags TEXT[] DEFAULT '{}', + difficulty VARCHAR(50) NOT NULL DEFAULT 'beginner', + duration_hours INT DEFAULT 0, + base_outline JSONB NOT NULL DEFAULT '{}', + landing JSONB NOT NULL DEFAULT '{}', + cover_image TEXT, + fingerprint VARCHAR(128) UNIQUE, + status VARCHAR(50) NOT NULL DEFAULT 'draft', + enrolled_count INT DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_learning_courses_status ON learning_courses(status)`, + `CREATE INDEX IF NOT EXISTS idx_learning_courses_category ON learning_courses(category)`, + + `CREATE TABLE IF NOT EXISTS learning_enrollments ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + course_id UUID NOT NULL REFERENCES learning_courses(id) ON DELETE CASCADE, + status VARCHAR(50) NOT NULL DEFAULT 'active', + plan JSONB NOT NULL DEFAULT '{}', + progress JSONB NOT NULL DEFAULT '{"completed_modules": [], "current_module": 0, "score": 0}', + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW(), + UNIQUE(user_id, course_id) + )`, + `CREATE INDEX IF NOT EXISTS idx_learning_enrollments_user ON learning_enrollments(user_id)`, + + `CREATE TABLE IF NOT EXISTS learning_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + enrollment_id UUID NOT NULL REFERENCES learning_enrollments(id) ON DELETE CASCADE, + module_index INT NOT NULL DEFAULT 0, + title VARCHAR(500) NOT NULL, + task_type VARCHAR(50) NOT NULL DEFAULT 'code', + instructions_md TEXT NOT NULL, + rubric JSONB NOT NULL DEFAULT '{}', + sandbox_template JSONB NOT NULL DEFAULT '{}', + verification_cmd TEXT, + status VARCHAR(50) NOT NULL DEFAULT 'pending', + created_at TIMESTAMPTZ DEFAULT NOW(), + updated_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_learning_tasks_enrollment ON learning_tasks(enrollment_id)`, + + `CREATE TABLE IF NOT EXISTS learning_submissions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + task_id UUID NOT NULL REFERENCES learning_tasks(id) ON DELETE CASCADE, + sandbox_session_id UUID, + result JSONB NOT NULL DEFAULT '{}', + score INT DEFAULT 0, + max_score INT DEFAULT 100, + feedback_md TEXT, + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_learning_submissions_task ON learning_submissions(task_id)`, + + `CREATE TABLE IF NOT EXISTS learning_trend_candidates ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + topic VARCHAR(500) NOT NULL, + category VARCHAR(100) NOT NULL DEFAULT 'general', + signals JSONB NOT NULL DEFAULT '{}', + score FLOAT DEFAULT 0, + fingerprint VARCHAR(128) UNIQUE, + fail_count INT NOT NULL DEFAULT 0, + last_error TEXT, + last_failed_at TIMESTAMPTZ, + picked_at TIMESTAMPTZ, + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_learning_trends_score ON learning_trend_candidates(score DESC)`, + `CREATE INDEX IF NOT EXISTS idx_learning_trends_fail ON learning_trend_candidates(fail_count, last_failed_at)`, + + // Backward-compatible schema upgrades (older DBs) + `ALTER TABLE learning_trend_candidates ADD COLUMN IF NOT EXISTS fail_count INT NOT NULL DEFAULT 0`, + `ALTER TABLE learning_trend_candidates ADD COLUMN IF NOT EXISTS last_error TEXT`, + `ALTER TABLE learning_trend_candidates ADD COLUMN IF NOT EXISTS last_failed_at TIMESTAMPTZ`, + + `CREATE TABLE IF NOT EXISTS sandbox_sessions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL, + task_id UUID REFERENCES learning_tasks(id) ON DELETE SET NULL, + opensandbox_id VARCHAR(255), + status VARCHAR(50) NOT NULL DEFAULT 'creating', + last_active_at TIMESTAMPTZ DEFAULT NOW(), + metadata JSONB NOT NULL DEFAULT '{}', + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_sandbox_sessions_user ON sandbox_sessions(user_id)`, + + `CREATE TABLE IF NOT EXISTS sandbox_events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + session_id UUID NOT NULL REFERENCES sandbox_sessions(id) ON DELETE CASCADE, + event_type VARCHAR(50) NOT NULL, + payload JSONB NOT NULL DEFAULT '{}', + created_at TIMESTAMPTZ DEFAULT NOW() + )`, + `CREATE INDEX IF NOT EXISTS idx_sandbox_events_session ON sandbox_events(session_id)`, + } + + for _, m := range migrations { + if _, err := r.db.db.ExecContext(ctx, m); err != nil { + return fmt.Errorf("learning migration failed: %w", err) + } + } + return nil +} + +// --- Course types --- + +type LearningCourse struct { + ID string `json:"id"` + Slug string `json:"slug"` + Title string `json:"title"` + ShortDescription string `json:"shortDescription"` + Category string `json:"category"` + Tags []string `json:"tags"` + Difficulty string `json:"difficulty"` + DurationHours int `json:"durationHours"` + BaseOutline json.RawMessage `json:"baseOutline"` + Landing json.RawMessage `json:"landing"` + CoverImage string `json:"coverImage,omitempty"` + Fingerprint string `json:"fingerprint,omitempty"` + Status string `json:"status"` + EnrolledCount int `json:"enrolledCount"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type LearningUserProfile struct { + UserID string `json:"userId"` + DisplayName string `json:"displayName"` + Profile json.RawMessage `json:"profile"` + ResumeFileID *string `json:"resumeFileId,omitempty"` + ResumeExtractedText string `json:"resumeExtractedText,omitempty"` + OnboardingCompleted bool `json:"onboardingCompleted"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type LearningEnrollment struct { + ID string `json:"id"` + UserID string `json:"userId"` + CourseID string `json:"courseId"` + Status string `json:"status"` + Plan json.RawMessage `json:"plan"` + Progress json.RawMessage `json:"progress"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Course *LearningCourse `json:"course,omitempty"` +} + +type LearningTask struct { + ID string `json:"id"` + EnrollmentID string `json:"enrollmentId"` + ModuleIndex int `json:"moduleIndex"` + Title string `json:"title"` + TaskType string `json:"taskType"` + InstructionsMD string `json:"instructionsMd"` + Rubric json.RawMessage `json:"rubric"` + SandboxTemplate json.RawMessage `json:"sandboxTemplate"` + VerificationCmd string `json:"verificationCmd,omitempty"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` +} + +type LearningSubmission struct { + ID string `json:"id"` + TaskID string `json:"taskId"` + SandboxSessionID *string `json:"sandboxSessionId,omitempty"` + Result json.RawMessage `json:"result"` + Score int `json:"score"` + MaxScore int `json:"maxScore"` + FeedbackMD string `json:"feedbackMd,omitempty"` + CreatedAt time.Time `json:"createdAt"` +} + +type LearningTrendCandidate struct { + ID string `json:"id"` + Topic string `json:"topic"` + Category string `json:"category"` + Signals json.RawMessage `json:"signals"` + Score float64 `json:"score"` + Fingerprint string `json:"fingerprint"` + FailCount int `json:"failCount,omitempty"` + LastError *string `json:"lastError,omitempty"` + LastFailedAt *time.Time `json:"lastFailedAt,omitempty"` + PickedAt *time.Time `json:"pickedAt,omitempty"` + CreatedAt time.Time `json:"createdAt"` +} + +type SandboxSession struct { + ID string `json:"id"` + UserID string `json:"userId"` + TaskID *string `json:"taskId,omitempty"` + OpenSandboxID string `json:"opensandboxId,omitempty"` + Status string `json:"status"` + LastActiveAt time.Time `json:"lastActiveAt"` + Metadata json.RawMessage `json:"metadata"` + CreatedAt time.Time `json:"createdAt"` +} + +// --- Courses --- + +func (r *LearningRepository) ListCourses(ctx context.Context, category, difficulty, search string, limit, offset int) ([]*LearningCourse, int, error) { + where := "status = 'published'" + args := make([]interface{}, 0) + argIdx := 1 + + if category != "" { + where += fmt.Sprintf(" AND category = $%d", argIdx) + args = append(args, category) + argIdx++ + } + if difficulty != "" { + where += fmt.Sprintf(" AND difficulty = $%d", argIdx) + args = append(args, difficulty) + argIdx++ + } + if search != "" { + where += fmt.Sprintf(" AND (title ILIKE $%d OR short_description ILIKE $%d)", argIdx, argIdx) + args = append(args, "%"+search+"%") + argIdx++ + } + + countQuery := fmt.Sprintf("SELECT COUNT(*) FROM learning_courses WHERE %s", where) + var total int + if err := r.db.db.QueryRowContext(ctx, countQuery, args...).Scan(&total); err != nil { + return nil, 0, err + } + + query := fmt.Sprintf(`SELECT id, slug, title, short_description, category, tags, difficulty, duration_hours, + base_outline, landing, cover_image, status, enrolled_count, created_at, updated_at + FROM learning_courses WHERE %s ORDER BY enrolled_count DESC, created_at DESC LIMIT $%d OFFSET $%d`, + where, argIdx, argIdx+1) + args = append(args, limit, offset) + + rows, err := r.db.db.QueryContext(ctx, query, args...) + if err != nil { + return nil, 0, err + } + defer rows.Close() + + var courses []*LearningCourse + for rows.Next() { + c := &LearningCourse{} + var tags []byte + var coverImg sql.NullString + if err := rows.Scan(&c.ID, &c.Slug, &c.Title, &c.ShortDescription, &c.Category, &tags, + &c.Difficulty, &c.DurationHours, &c.BaseOutline, &c.Landing, &coverImg, + &c.Status, &c.EnrolledCount, &c.CreatedAt, &c.UpdatedAt); err != nil { + return nil, 0, err + } + if coverImg.Valid { + c.CoverImage = coverImg.String + } + json.Unmarshal(tags, &c.Tags) + courses = append(courses, c) + } + return courses, total, nil +} + +func (r *LearningRepository) GetCourseBySlug(ctx context.Context, slug string) (*LearningCourse, error) { + c := &LearningCourse{} + var coverImg sql.NullString + var tags []byte + err := r.db.db.QueryRowContext(ctx, `SELECT id, slug, title, short_description, category, tags, difficulty, duration_hours, + base_outline, landing, cover_image, fingerprint, status, enrolled_count, created_at, updated_at + FROM learning_courses WHERE slug = $1`, slug).Scan( + &c.ID, &c.Slug, &c.Title, &c.ShortDescription, &c.Category, &tags, + &c.Difficulty, &c.DurationHours, &c.BaseOutline, &c.Landing, &coverImg, + &c.Fingerprint, &c.Status, &c.EnrolledCount, &c.CreatedAt, &c.UpdatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if coverImg.Valid { + c.CoverImage = coverImg.String + } + json.Unmarshal(tags, &c.Tags) + return c, nil +} + +func (r *LearningRepository) GetCourseByID(ctx context.Context, id string) (*LearningCourse, error) { + c := &LearningCourse{} + var coverImg sql.NullString + var tags []byte + err := r.db.db.QueryRowContext(ctx, `SELECT id, slug, title, short_description, category, tags, difficulty, duration_hours, + base_outline, landing, cover_image, fingerprint, status, enrolled_count, created_at, updated_at + FROM learning_courses WHERE id = $1`, id).Scan( + &c.ID, &c.Slug, &c.Title, &c.ShortDescription, &c.Category, &tags, + &c.Difficulty, &c.DurationHours, &c.BaseOutline, &c.Landing, &coverImg, + &c.Fingerprint, &c.Status, &c.EnrolledCount, &c.CreatedAt, &c.UpdatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if coverImg.Valid { + c.CoverImage = coverImg.String + } + json.Unmarshal(tags, &c.Tags) + return c, nil +} + +func (r *LearningRepository) CreateCourse(ctx context.Context, c *LearningCourse) error { + tagsJSON, _ := json.Marshal(c.Tags) + return r.db.db.QueryRowContext(ctx, `INSERT INTO learning_courses + (slug, title, short_description, category, tags, difficulty, duration_hours, base_outline, landing, cover_image, fingerprint, status) + VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12) RETURNING id, created_at, updated_at`, + c.Slug, c.Title, c.ShortDescription, c.Category, string(tagsJSON), c.Difficulty, c.DurationHours, + c.BaseOutline, c.Landing, sql.NullString{String: c.CoverImage, Valid: c.CoverImage != ""}, + sql.NullString{String: c.Fingerprint, Valid: c.Fingerprint != ""}, c.Status, + ).Scan(&c.ID, &c.CreatedAt, &c.UpdatedAt) +} + +func (r *LearningRepository) UpdateCourseStatus(ctx context.Context, id, status string) error { + _, err := r.db.db.ExecContext(ctx, "UPDATE learning_courses SET status=$2, updated_at=NOW() WHERE id=$1", id, status) + return err +} + +func (r *LearningRepository) FingerprintExists(ctx context.Context, fp string) (bool, error) { + var exists bool + err := r.db.db.QueryRowContext(ctx, "SELECT EXISTS(SELECT 1 FROM learning_courses WHERE fingerprint=$1)", fp).Scan(&exists) + return exists, err +} + +// --- User profiles --- + +func (r *LearningRepository) GetProfile(ctx context.Context, userID string) (*LearningUserProfile, error) { + p := &LearningUserProfile{} + var resumeFileID sql.NullString + var resumeText sql.NullString + err := r.db.db.QueryRowContext(ctx, `SELECT user_id, display_name, profile, resume_file_id, resume_extracted_text, + onboarding_completed, created_at, updated_at FROM learning_user_profiles WHERE user_id=$1`, userID).Scan( + &p.UserID, &p.DisplayName, &p.Profile, &resumeFileID, &resumeText, + &p.OnboardingCompleted, &p.CreatedAt, &p.UpdatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if resumeFileID.Valid { + p.ResumeFileID = &resumeFileID.String + } + if resumeText.Valid { + p.ResumeExtractedText = resumeText.String + } + return p, nil +} + +func (r *LearningRepository) UpsertProfile(ctx context.Context, p *LearningUserProfile) error { + _, err := r.db.db.ExecContext(ctx, `INSERT INTO learning_user_profiles (user_id, display_name, profile, resume_file_id, resume_extracted_text, onboarding_completed) + VALUES ($1,$2,$3,$4,$5,$6) ON CONFLICT (user_id) DO UPDATE SET + display_name=EXCLUDED.display_name, profile=EXCLUDED.profile, resume_file_id=EXCLUDED.resume_file_id, + resume_extracted_text=EXCLUDED.resume_extracted_text, onboarding_completed=EXCLUDED.onboarding_completed, updated_at=NOW()`, + p.UserID, p.DisplayName, p.Profile, + sql.NullString{String: func() string { if p.ResumeFileID != nil { return *p.ResumeFileID }; return "" }(), Valid: p.ResumeFileID != nil}, + sql.NullString{String: p.ResumeExtractedText, Valid: p.ResumeExtractedText != ""}, + p.OnboardingCompleted) + return err +} + +// --- Enrollments --- + +func (r *LearningRepository) CreateEnrollment(ctx context.Context, e *LearningEnrollment) error { + err := r.db.db.QueryRowContext(ctx, `INSERT INTO learning_enrollments (user_id, course_id, status, plan, progress) + VALUES ($1,$2,$3,$4,$5) RETURNING id, created_at, updated_at`, + e.UserID, e.CourseID, e.Status, e.Plan, e.Progress).Scan(&e.ID, &e.CreatedAt, &e.UpdatedAt) + if err != nil { + return err + } + r.db.db.ExecContext(ctx, "UPDATE learning_courses SET enrolled_count = enrolled_count + 1 WHERE id=$1", e.CourseID) + return nil +} + +func (r *LearningRepository) GetEnrollment(ctx context.Context, id string) (*LearningEnrollment, error) { + e := &LearningEnrollment{} + err := r.db.db.QueryRowContext(ctx, `SELECT id, user_id, course_id, status, plan, progress, created_at, updated_at + FROM learning_enrollments WHERE id=$1`, id).Scan( + &e.ID, &e.UserID, &e.CourseID, &e.Status, &e.Plan, &e.Progress, &e.CreatedAt, &e.UpdatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return e, nil +} + +func (r *LearningRepository) ListEnrollments(ctx context.Context, userID string) ([]*LearningEnrollment, error) { + rows, err := r.db.db.QueryContext(ctx, `SELECT e.id, e.user_id, e.course_id, e.status, e.plan, e.progress, e.created_at, e.updated_at, + c.id, c.slug, c.title, c.short_description, c.category, c.difficulty, c.duration_hours, c.cover_image, c.status + FROM learning_enrollments e JOIN learning_courses c ON e.course_id=c.id WHERE e.user_id=$1 ORDER BY e.updated_at DESC`, userID) + if err != nil { + return nil, err + } + defer rows.Close() + + var enrollments []*LearningEnrollment + for rows.Next() { + e := &LearningEnrollment{} + c := &LearningCourse{} + var coverImg sql.NullString + if err := rows.Scan(&e.ID, &e.UserID, &e.CourseID, &e.Status, &e.Plan, &e.Progress, &e.CreatedAt, &e.UpdatedAt, + &c.ID, &c.Slug, &c.Title, &c.ShortDescription, &c.Category, &c.Difficulty, &c.DurationHours, &coverImg, &c.Status); err != nil { + return nil, err + } + if coverImg.Valid { + c.CoverImage = coverImg.String + } + e.Course = c + enrollments = append(enrollments, e) + } + return enrollments, nil +} + +func (r *LearningRepository) UpdateEnrollmentProgress(ctx context.Context, id string, progress json.RawMessage) error { + _, err := r.db.db.ExecContext(ctx, "UPDATE learning_enrollments SET progress=$2, updated_at=NOW() WHERE id=$1", id, progress) + return err +} + +func (r *LearningRepository) UpdateEnrollmentPlan(ctx context.Context, id string, plan json.RawMessage) error { + _, err := r.db.db.ExecContext(ctx, "UPDATE learning_enrollments SET plan=$2, updated_at=NOW() WHERE id=$1", id, plan) + return err +} + +// --- Tasks --- + +func (r *LearningRepository) CreateTask(ctx context.Context, t *LearningTask) error { + return r.db.db.QueryRowContext(ctx, `INSERT INTO learning_tasks + (enrollment_id, module_index, title, task_type, instructions_md, rubric, sandbox_template, verification_cmd, status) + VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) RETURNING id, created_at, updated_at`, + t.EnrollmentID, t.ModuleIndex, t.Title, t.TaskType, t.InstructionsMD, t.Rubric, + t.SandboxTemplate, t.VerificationCmd, t.Status).Scan(&t.ID, &t.CreatedAt, &t.UpdatedAt) +} + +func (r *LearningRepository) GetTask(ctx context.Context, id string) (*LearningTask, error) { + t := &LearningTask{} + err := r.db.db.QueryRowContext(ctx, `SELECT id, enrollment_id, module_index, title, task_type, instructions_md, + rubric, sandbox_template, verification_cmd, status, created_at, updated_at + FROM learning_tasks WHERE id=$1`, id).Scan( + &t.ID, &t.EnrollmentID, &t.ModuleIndex, &t.Title, &t.TaskType, &t.InstructionsMD, + &t.Rubric, &t.SandboxTemplate, &t.VerificationCmd, &t.Status, &t.CreatedAt, &t.UpdatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + return t, nil +} + +func (r *LearningRepository) ListTasksByEnrollment(ctx context.Context, enrollmentID string) ([]*LearningTask, error) { + rows, err := r.db.db.QueryContext(ctx, `SELECT id, enrollment_id, module_index, title, task_type, instructions_md, + rubric, sandbox_template, verification_cmd, status, created_at, updated_at + FROM learning_tasks WHERE enrollment_id=$1 ORDER BY module_index, created_at`, enrollmentID) + if err != nil { + return nil, err + } + defer rows.Close() + + var tasks []*LearningTask + for rows.Next() { + t := &LearningTask{} + if err := rows.Scan(&t.ID, &t.EnrollmentID, &t.ModuleIndex, &t.Title, &t.TaskType, &t.InstructionsMD, + &t.Rubric, &t.SandboxTemplate, &t.VerificationCmd, &t.Status, &t.CreatedAt, &t.UpdatedAt); err != nil { + return nil, err + } + tasks = append(tasks, t) + } + return tasks, nil +} + +func (r *LearningRepository) UpdateTaskStatus(ctx context.Context, id, status string) error { + _, err := r.db.db.ExecContext(ctx, "UPDATE learning_tasks SET status=$2, updated_at=NOW() WHERE id=$1", id, status) + return err +} + +// --- Submissions --- + +func (r *LearningRepository) CreateSubmission(ctx context.Context, s *LearningSubmission) error { + return r.db.db.QueryRowContext(ctx, `INSERT INTO learning_submissions + (task_id, sandbox_session_id, result, score, max_score, feedback_md) VALUES ($1,$2,$3,$4,$5,$6) RETURNING id, created_at`, + s.TaskID, sql.NullString{String: func() string { if s.SandboxSessionID != nil { return *s.SandboxSessionID }; return "" }(), Valid: s.SandboxSessionID != nil}, + s.Result, s.Score, s.MaxScore, s.FeedbackMD).Scan(&s.ID, &s.CreatedAt) +} + +func (r *LearningRepository) GetLatestSubmission(ctx context.Context, taskID string) (*LearningSubmission, error) { + s := &LearningSubmission{} + var sessID sql.NullString + err := r.db.db.QueryRowContext(ctx, `SELECT id, task_id, sandbox_session_id, result, score, max_score, feedback_md, created_at + FROM learning_submissions WHERE task_id=$1 ORDER BY created_at DESC LIMIT 1`, taskID).Scan( + &s.ID, &s.TaskID, &sessID, &s.Result, &s.Score, &s.MaxScore, &s.FeedbackMD, &s.CreatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if sessID.Valid { + s.SandboxSessionID = &sessID.String + } + return s, nil +} + +// --- Trends --- + +func (r *LearningRepository) CreateTrend(ctx context.Context, t *LearningTrendCandidate) error { + err := r.db.db.QueryRowContext(ctx, `INSERT INTO learning_trend_candidates (topic, category, signals, score, fingerprint) + VALUES ($1,$2,$3,$4,$5) ON CONFLICT (fingerprint) DO NOTHING RETURNING id, created_at`, + t.Topic, t.Category, t.Signals, t.Score, t.Fingerprint).Scan(&t.ID, &t.CreatedAt) + if err == sql.ErrNoRows { + return nil + } + return err +} + +func (r *LearningRepository) PickTopTrend(ctx context.Context) (*LearningTrendCandidate, error) { + t := &LearningTrendCandidate{} + var lastErr sql.NullString + var lastFailed sql.NullTime + err := r.db.db.QueryRowContext(ctx, `UPDATE learning_trend_candidates SET picked_at=NOW() + WHERE id = ( + SELECT id FROM learning_trend_candidates + WHERE picked_at IS NULL + AND fail_count < 5 + AND (last_failed_at IS NULL OR last_failed_at < NOW() - INTERVAL '15 minutes') + ORDER BY score DESC, fail_count ASC, created_at ASC + LIMIT 1 + ) + RETURNING id, topic, category, signals, score, fingerprint, fail_count, last_error, last_failed_at, created_at`).Scan( + &t.ID, &t.Topic, &t.Category, &t.Signals, &t.Score, &t.Fingerprint, &t.FailCount, &lastErr, &lastFailed, &t.CreatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if lastErr.Valid { + t.LastError = &lastErr.String + } + if lastFailed.Valid { + t.LastFailedAt = &lastFailed.Time + } + return t, nil +} + +func (r *LearningRepository) MarkTrendFailed(ctx context.Context, id, errMsg string) error { + _, err := r.db.db.ExecContext(ctx, `UPDATE learning_trend_candidates + SET fail_count = fail_count + 1, + last_error = $2, + last_failed_at = NOW(), + picked_at = NULL + WHERE id = $1`, id, errMsg) + return err +} + +func (r *LearningRepository) SlugExists(ctx context.Context, slug string) (bool, error) { + var exists bool + err := r.db.db.QueryRowContext(ctx, "SELECT EXISTS(SELECT 1 FROM learning_courses WHERE slug=$1)", slug).Scan(&exists) + return exists, err +} + +// --- Sandbox sessions --- + +func (r *LearningRepository) CreateSandboxSession(ctx context.Context, s *SandboxSession) error { + return r.db.db.QueryRowContext(ctx, `INSERT INTO sandbox_sessions (user_id, task_id, opensandbox_id, status, metadata) + VALUES ($1,$2,$3,$4,$5) RETURNING id, created_at`, + s.UserID, sql.NullString{String: func() string { if s.TaskID != nil { return *s.TaskID }; return "" }(), Valid: s.TaskID != nil}, + s.OpenSandboxID, s.Status, s.Metadata).Scan(&s.ID, &s.CreatedAt) +} + +func (r *LearningRepository) GetSandboxSession(ctx context.Context, id string) (*SandboxSession, error) { + s := &SandboxSession{} + var taskID sql.NullString + err := r.db.db.QueryRowContext(ctx, `SELECT id, user_id, task_id, opensandbox_id, status, last_active_at, metadata, created_at + FROM sandbox_sessions WHERE id=$1`, id).Scan( + &s.ID, &s.UserID, &taskID, &s.OpenSandboxID, &s.Status, &s.LastActiveAt, &s.Metadata, &s.CreatedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, err + } + if taskID.Valid { + s.TaskID = &taskID.String + } + return s, nil +} + +func (r *LearningRepository) UpdateSandboxSessionStatus(ctx context.Context, id, status string) error { + _, err := r.db.db.ExecContext(ctx, "UPDATE sandbox_sessions SET status=$2, last_active_at=NOW() WHERE id=$1", id, status) + return err +} + +func (r *LearningRepository) CreateSandboxEvent(ctx context.Context, sessionID, eventType string, payload json.RawMessage) error { + _, err := r.db.db.ExecContext(ctx, `INSERT INTO sandbox_events (session_id, event_type, payload) VALUES ($1,$2,$3)`, + sessionID, eventType, payload) + return err +} diff --git a/backend/internal/db/learning_repo_test.go b/backend/internal/db/learning_repo_test.go new file mode 100644 index 0000000..db82767 --- /dev/null +++ b/backend/internal/db/learning_repo_test.go @@ -0,0 +1,43 @@ +package db + +import ( + "encoding/json" + "testing" +) + +func TestNewLearningRepository(t *testing.T) { + pg := &PostgresDB{} + repo := NewLearningRepository(pg) + if repo == nil { + t.Fatalf("expected repository instance") + } + if repo.db != pg { + t.Fatalf("repository must keep provided db pointer") + } +} + +func TestLearningUserProfileJSONContract(t *testing.T) { + profile := LearningUserProfile{ + UserID: "u-1", + DisplayName: "Alex", + Profile: json.RawMessage(`{"target_track":"backend"}`), + ResumeExtractedText: "resume text", + OnboardingCompleted: true, + } + + raw, err := json.Marshal(profile) + if err != nil { + t.Fatalf("marshal profile: %v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(raw, &decoded); err != nil { + t.Fatalf("unmarshal profile json: %v", err) + } + if decoded["userId"] != "u-1" { + t.Fatalf("unexpected userId: %v", decoded["userId"]) + } + if decoded["onboardingCompleted"] != true { + t.Fatalf("unexpected onboardingCompleted: %v", decoded["onboardingCompleted"]) + } +} diff --git a/backend/internal/learning/course_autogen.go b/backend/internal/learning/course_autogen.go new file mode 100644 index 0000000..c9b5214 --- /dev/null +++ b/backend/internal/learning/course_autogen.go @@ -0,0 +1,556 @@ +package learning + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "log" + "regexp" + "strings" + "time" + "unicode" + + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/llm" + "github.com/gooseek/backend/internal/search" +) + +type CourseAutoGenConfig struct { + LLM llm.Client + Repo *db.LearningRepository + SearchClient *search.SearXNGClient +} + +type CourseAutoGenerator struct { + cfg CourseAutoGenConfig +} + +func NewCourseAutoGenerator(cfg CourseAutoGenConfig) *CourseAutoGenerator { + return &CourseAutoGenerator{cfg: cfg} +} + +func (g *CourseAutoGenerator) StartBackground(ctx context.Context) { + log.Println("[course-autogen] starting background course generation") + + time.Sleep(30 * time.Second) + + ticker := time.NewTicker(2 * time.Hour) + defer ticker.Stop() + + g.runCycle(ctx) + + for { + select { + case <-ctx.Done(): + return + case <-ticker.C: + g.runCycle(ctx) + } + } +} + +func (g *CourseAutoGenerator) runCycle(ctx context.Context) { + log.Println("[course-autogen] running generation cycle") + + cycleCtx, cancel := context.WithTimeout(ctx, 30*time.Minute) + defer cancel() + + if err := g.collectTrends(cycleCtx); err != nil { + log.Printf("[course-autogen] trend collection error: %v", err) + } + + for i := 0; i < 3; i++ { + trend, err := g.cfg.Repo.PickTopTrend(cycleCtx) + if err != nil || trend == nil { + log.Printf("[course-autogen] no more trends to process") + break + } + + if err := g.designAndPublishCourse(cycleCtx, trend); err != nil { + log.Printf("[course-autogen] course design error for '%s': %v", trend.Topic, err) + continue + } + + time.Sleep(5 * time.Second) + } +} + +func (g *CourseAutoGenerator) collectTrends(ctx context.Context) error { + var webContext string + + if g.cfg.SearchClient != nil { + webContext = g.searchTrendData(ctx) + } + + prompt := `Ты — аналитик трендов IT-индустрии и образования в России и мире.` + + if webContext != "" { + prompt += "\n\nРЕАЛЬНЫЕ ДАННЫЕ ИЗ ИНТЕРНЕТА:\n" + webContext + } + + prompt += ` + +На основе реальных данных выбери 5 уникальных тем для курсов: + +КРИТЕРИИ: +1. Актуальны на рынке РФ (вакансии hh.ru, habr, стеки) +2. НЕ банальные ("Основы Python", "HTML для начинающих" — НЕТ) +3. Практическая ценность для карьеры и зарплаты +4. Уникальность — чего нет на Stepik/Coursera/Skillbox +5. Тренды 2025-2026: AI/ML ops, platform engineering, Rust, WebAssembly, edge computing и т.д. + +Категории: programming, devops, data, ai_ml, security, product, design, management, fintech, gamedev, mobile, blockchain, iot, other + +Ответь строго JSON: +{ + "trends": [ + { + "topic": "Конкретное название курса", + "category": "категория", + "why_unique": "Почему этот курс уникален и привлечёт пользователей", + "demand_signals": ["сигнал спроса 1", "сигнал спроса 2"], + "target_salary": "ожидаемая зарплата после курса", + "score": 0.85 + } + ] +}` + + result, err := generateTextWithRetry(ctx, g.cfg.LLM, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }, 2, 2*time.Second) + if err != nil { + return err + } + + jsonStr := extractJSONBlock(result) + var parsed struct { + Trends []struct { + Topic string `json:"topic"` + Category string `json:"category"` + WhyUnique string `json:"why_unique"` + DemandSignals []string `json:"demand_signals"` + TargetSalary string `json:"target_salary"` + Score float64 `json:"score"` + } `json:"trends"` + } + if err := json.Unmarshal([]byte(jsonStr), &parsed); err != nil || len(parsed.Trends) == 0 { + // Try a strict repair prompt once (common provider failure mode: extra prose / malformed JSON) + repairPrompt := "Верни ответ СТРОГО как JSON без текста. " + prompt + repaired, rerr := generateTextWithRetry(ctx, g.cfg.LLM, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: repairPrompt}}, + }, 1, 2*time.Second) + if rerr != nil { + return fmt.Errorf("failed to parse trends: %w", err) + } + jsonStr = extractJSONBlock(repaired) + if uerr := json.Unmarshal([]byte(jsonStr), &parsed); uerr != nil || len(parsed.Trends) == 0 { + if uerr != nil { + return fmt.Errorf("failed to parse trends: %w", uerr) + } + return fmt.Errorf("failed to parse trends: empty trends") + } + } + + saved := 0 + for _, t := range parsed.Trends { + fp := generateFingerprint(t.Topic) + + exists, _ := g.cfg.Repo.FingerprintExists(ctx, fp) + if exists { + continue + } + + signals, _ := json.Marshal(map[string]interface{}{ + "why_unique": t.WhyUnique, + "demand_signals": t.DemandSignals, + "target_salary": t.TargetSalary, + }) + + trend := &db.LearningTrendCandidate{ + Topic: t.Topic, + Category: t.Category, + Signals: signals, + Score: t.Score, + Fingerprint: fp, + } + if err := g.cfg.Repo.CreateTrend(ctx, trend); err == nil { + saved++ + } + } + + log.Printf("[course-autogen] saved %d new trend candidates", saved) + return nil +} + +func (g *CourseAutoGenerator) searchTrendData(ctx context.Context) string { + queries := []string{ + "IT тренды обучение 2025 2026 Россия", + "самые востребованные IT навыки вакансии hh.ru", + "новые технологии программирование курсы", + } + + var results []string + for _, q := range queries { + searchCtx, cancel := context.WithTimeout(ctx, 15*time.Second) + resp, err := g.cfg.SearchClient.Search(searchCtx, q, &search.SearchOptions{ + Categories: []string{"general"}, + PageNo: 1, + }) + cancel() + + if err != nil { + continue + } + for _, r := range resp.Results { + snippet := r.Title + ": " + r.Content + if len(snippet) > 300 { + snippet = snippet[:300] + } + results = append(results, snippet) + } + } + + if len(results) == 0 { + return "" + } + + combined := strings.Join(results, "\n---\n") + if len(combined) > 3000 { + combined = combined[:3000] + } + return combined +} + +func (g *CourseAutoGenerator) designAndPublishCourse(ctx context.Context, trend *db.LearningTrendCandidate) error { + log.Printf("[course-autogen] designing course: %s", trend.Topic) + + fp := generateFingerprint(trend.Topic) + exists, _ := g.cfg.Repo.FingerprintExists(ctx, fp) + if exists { + return nil + } + + var marketResearch string + if g.cfg.SearchClient != nil { + marketResearch = g.researchCourseTopic(ctx, trend.Topic) + } + + var lastErr error + for attempt := 0; attempt < 3; attempt++ { + prompt := fmt.Sprintf(`Ты — ведущий методолог обучения в IT. Спроектируй профессиональный курс. + +Тема: %s +Категория: %s`, trend.Topic, trend.Category) + + if marketResearch != "" { + prompt += "\n\nИССЛЕДОВАНИЕ РЫНКА:\n" + marketResearch + } + + prompt += ` + +ТРЕБОВАНИЯ: +1. Минимум теории, максимум боевой практики (как на реальных проектах в РФ) +2. Каждый модуль — практическое задание из реального проекта +3. Уровень: от базового до продвинутого +4. Курс должен быть уникальным — не копия Stepik/Coursera +5. Лендинг должен ПРОДАВАТЬ — конкретные выгоды, зарплаты, результаты +6. Outline должен быть детальным — 8-12 модулей + +Ответь строго JSON: +{ + "title": "Привлекательное название курса", + "slug": "slug-without-spaces", + "short_description": "Краткое описание 2-3 предложения. Конкретика, не вода.", + "difficulty": "beginner|intermediate|advanced", + "duration_hours": 40, + "tags": ["тег1", "тег2"], + "outline": { + "modules": [ + { + "index": 0, + "title": "Название модуля", + "description": "Описание + что делаем на практике", + "skills": ["навык"], + "estimated_hours": 4, + "practice_focus": "Конкретная практическая задача" + } + ] + }, + "landing": { + "hero_title": "Заголовок лендинга (продающий)", + "hero_subtitle": "Подзаголовок с конкретной выгодой", + "benefits": ["Конкретная выгода 1", "Выгода 2", "Выгода 3", "Выгода 4"], + "target_audience": "Для кого этот курс — конкретно", + "outcomes": ["Результат 1 с цифрами", "Результат 2"], + "salary_range": "Ожидаемая зарплата после курса", + "prerequisites": "Что нужно знать заранее", + "faq": [ + {"question": "Вопрос?", "answer": "Ответ"} + ] + } +}` + + result, err := generateTextWithRetry(ctx, g.cfg.LLM, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }, 2, 2*time.Second) + if err != nil { + lastErr = err + continue + } + + jsonStr := extractJSONBlock(result) + var parsed struct { + Title string `json:"title"` + Slug string `json:"slug"` + ShortDescription string `json:"short_description"` + Difficulty string `json:"difficulty"` + DurationHours int `json:"duration_hours"` + Tags []string `json:"tags"` + Outline json.RawMessage `json:"outline"` + Landing json.RawMessage `json:"landing"` + } + if err := json.Unmarshal([]byte(jsonStr), &parsed); err != nil { + lastErr = fmt.Errorf("failed to parse course design: %w", err) + continue + } + + outlineJSON := parsed.Outline + if outlineJSON == nil { + outlineJSON = json.RawMessage("{}") + } + landingJSON := parsed.Landing + if landingJSON == nil { + landingJSON = json.RawMessage("{}") + } + + if err := validateCourseArtifacts(parsed.Title, parsed.ShortDescription, outlineJSON, landingJSON); err != nil { + lastErr = err + continue + } + + slug := sanitizeSlug(parsed.Slug) + if slug == "" { + slug = sanitizeSlug(parsed.Title) + } + slug = g.ensureUniqueSlug(ctx, slug) + + if parsed.DurationHours == 0 { + parsed.DurationHours = 40 + } + parsed.Difficulty = normalizeDifficulty(parsed.Difficulty) + + course := &db.LearningCourse{ + Slug: slug, + Title: strings.TrimSpace(parsed.Title), + ShortDescription: strings.TrimSpace(parsed.ShortDescription), + Category: trend.Category, + Tags: parsed.Tags, + Difficulty: parsed.Difficulty, + DurationHours: parsed.DurationHours, + BaseOutline: outlineJSON, + Landing: landingJSON, + Fingerprint: fp, + Status: "published", + } + + if err := g.cfg.Repo.CreateCourse(ctx, course); err != nil { + lastErr = fmt.Errorf("failed to save course: %w", err) + continue + } + + log.Printf("[course-autogen] published course: %s (%s)", course.Title, course.Slug) + return nil + } + + if lastErr == nil { + lastErr = errors.New("unknown course design failure") + } + _ = g.cfg.Repo.MarkTrendFailed(ctx, trend.ID, truncateErr(lastErr.Error(), 800)) + return lastErr +} + +func (g *CourseAutoGenerator) researchCourseTopic(ctx context.Context, topic string) string { + queries := []string{ + topic + " курс программа обучение", + topic + " вакансии зарплата Россия", + } + + var results []string + for _, q := range queries { + searchCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + resp, err := g.cfg.SearchClient.Search(searchCtx, q, &search.SearchOptions{ + Categories: []string{"general"}, + PageNo: 1, + }) + cancel() + + if err != nil { + continue + } + for _, r := range resp.Results { + snippet := r.Title + ": " + r.Content + if len(snippet) > 250 { + snippet = snippet[:250] + } + results = append(results, snippet) + } + } + + if len(results) == 0 { + return "" + } + + combined := strings.Join(results, "\n---\n") + if len(combined) > 2000 { + combined = combined[:2000] + } + return combined +} + +func generateFingerprint(topic string) string { + normalized := strings.ToLower(strings.TrimSpace(topic)) + hash := sha256.Sum256([]byte(normalized)) + return hex.EncodeToString(hash[:16]) +} + +func sanitizeSlug(s string) string { + s = strings.ToLower(strings.TrimSpace(s)) + var result []rune + for _, r := range s { + if unicode.IsLetter(r) || unicode.IsDigit(r) { + result = append(result, r) + } else if r == ' ' || r == '-' || r == '_' { + result = append(result, '-') + } + } + slug := string(result) + re := regexp.MustCompile(`-+`) + slug = re.ReplaceAllString(slug, "-") + slug = strings.Trim(slug, "-") + if len(slug) > 100 { + slug = slug[:100] + } + return slug +} + +func (g *CourseAutoGenerator) ensureUniqueSlug(ctx context.Context, base string) string { + if base == "" { + base = "course" + } + slug := base + for i := 0; i < 20; i++ { + exists, err := g.cfg.Repo.SlugExists(ctx, slug) + if err == nil && !exists { + return slug + } + slug = fmt.Sprintf("%s-%d", base, i+2) + } + return fmt.Sprintf("%s-%d", base, time.Now().Unix()%10000) +} + +func normalizeDifficulty(d string) string { + switch strings.ToLower(strings.TrimSpace(d)) { + case "beginner", "intermediate", "advanced": + return strings.ToLower(strings.TrimSpace(d)) + default: + return "intermediate" + } +} + +func validateCourseArtifacts(title, short string, outlineJSON, landingJSON json.RawMessage) error { + if strings.TrimSpace(title) == "" { + return errors.New("course title is empty") + } + if len(strings.TrimSpace(short)) < 40 { + return errors.New("short_description слишком короткое (нужна конкретика)") + } + + // Outline validation + var outline struct { + Modules []struct { + Index int `json:"index"` + Title string `json:"title"` + Description string `json:"description"` + Skills []string `json:"skills"` + EstimatedHours int `json:"estimated_hours"` + PracticeFocus string `json:"practice_focus"` + } `json:"modules"` + } + if err := json.Unmarshal(outlineJSON, &outline); err != nil { + return fmt.Errorf("outline JSON invalid: %w", err) + } + if len(outline.Modules) < 8 || len(outline.Modules) > 12 { + return fmt.Errorf("outline modules count must be 8-12, got %d", len(outline.Modules)) + } + for i, m := range outline.Modules { + if strings.TrimSpace(m.Title) == "" || strings.TrimSpace(m.PracticeFocus) == "" { + return fmt.Errorf("outline module[%d] missing title/practice_focus", i) + } + } + + // Landing validation + var landing struct { + HeroTitle string `json:"hero_title"` + HeroSubtitle string `json:"hero_subtitle"` + Benefits []string `json:"benefits"` + Outcomes []string `json:"outcomes"` + SalaryRange string `json:"salary_range"` + FAQ []struct { + Question string `json:"question"` + Answer string `json:"answer"` + } `json:"faq"` + } + if err := json.Unmarshal(landingJSON, &landing); err != nil { + return fmt.Errorf("landing JSON invalid: %w", err) + } + if strings.TrimSpace(landing.HeroTitle) == "" || strings.TrimSpace(landing.HeroSubtitle) == "" { + return errors.New("landing missing hero_title/hero_subtitle") + } + if len(landing.Benefits) < 3 || len(landing.Outcomes) < 2 { + return errors.New("landing benefits/outcomes недостаточно конкретные") + } + if strings.TrimSpace(landing.SalaryRange) == "" { + return errors.New("landing missing salary_range") + } + if len(landing.FAQ) < 1 || strings.TrimSpace(landing.FAQ[0].Question) == "" { + return errors.New("landing FAQ missing") + } + return nil +} + +func generateTextWithRetry(ctx context.Context, client llm.Client, req llm.StreamRequest, retries int, baseDelay time.Duration) (string, error) { + var lastErr error + for attempt := 0; attempt <= retries; attempt++ { + if attempt > 0 { + delay := baseDelay * time.Duration(1< 4000 { + outlineStr = outlineStr[:4000] + } + + prompt := fmt.Sprintf(`Ты — ведущий методолог обучения в IT с 10-летним опытом. Адаптируй базовый план курса под конкретного ученика. + +Курс: %s +Описание: %s +Сложность: %s +Длительность: %d часов + +Базовый план: +%s + +%s + +ТРЕБОВАНИЯ: +1. Минимум теории, максимум боевой практики (как на реальных проектах в РФ) +2. Каждый модуль = конкретное практическое задание из реального проекта +3. Прогрессия: от простого к сложному, учитывая текущий уровень ученика +4. Каждый 3-й модуль — checkpoint (мини-проект для проверки навыков) +5. Финальный milestone project — полноценный проект для портфолио +6. Учитывай стек и опыт ученика — не повторяй то, что он уже знает + +Ответь строго JSON: +{ + "modules": [ + { + "index": 0, + "title": "Название модуля", + "description": "Что изучаем и делаем", + "skills": ["навык1"], + "estimated_hours": 4, + "practice_focus": "Конкретная практическая задача из реального проекта", + "task_count": 3, + "is_checkpoint": false + } + ], + "total_hours": 40, + "difficulty_adjusted": "intermediate", + "personalization_notes": "Как план адаптирован под ученика", + "milestone_project": "Описание финального проекта для портфолио" +}`, course.Title, course.ShortDescription, course.Difficulty, course.DurationHours, outlineStr, profileInfo) + + var plan PersonalPlan + if err := generateAndParse(ctx, llmClient, prompt, &plan, 2); err != nil { + return course.BaseOutline, fmt.Errorf("plan generation failed, using base outline: %w", err) + } + + if len(plan.Modules) == 0 { + return course.BaseOutline, nil + } + + for i := range plan.Modules { + plan.Modules[i].Index = i + if plan.Modules[i].TaskCount == 0 { + plan.Modules[i].TaskCount = 2 + } + } + + if plan.TotalHours == 0 { + total := 0 + for _, m := range plan.Modules { + total += m.EstimatedHrs + } + plan.TotalHours = total + } + + result, err := json.Marshal(plan) + if err != nil { + return course.BaseOutline, err + } + return result, nil +} + +func truncateStr(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] + "..." +} diff --git a/backend/internal/learning/plan_builder_test.go b/backend/internal/learning/plan_builder_test.go new file mode 100644 index 0000000..771b489 --- /dev/null +++ b/backend/internal/learning/plan_builder_test.go @@ -0,0 +1,66 @@ +package learning + +import ( + "context" + "encoding/json" + "testing" + + "github.com/gooseek/backend/internal/db" + "github.com/gooseek/backend/internal/llm" +) + +func TestBuildPersonalPlanAppliesDefaults(t *testing.T) { + course := &db.LearningCourse{ + Title: "Go Backend", + ShortDescription: "Курс по backend-разработке на Go", + Difficulty: "intermediate", + DurationHours: 24, + BaseOutline: json.RawMessage(`{"modules":[{"index":0,"title":"base","description":"base","skills":["go"],"estimated_hours":4,"practice_focus":"api"}]}`), + } + + client := &mockLLMClient{ + generateFunc: func(ctx context.Context, req llm.StreamRequest) (string, error) { + return `{ + "modules": [ + {"index": 999, "title": "API design", "description": "design REST", "skills": ["http"], "estimated_hours": 6, "practice_focus": "build handlers", "task_count": 0}, + {"index": 999, "title": "DB layer", "description": "storage", "skills": ["sql"], "estimated_hours": 8, "practice_focus": "repository pattern", "task_count": 3} + ], + "total_hours": 0, + "difficulty_adjusted": "intermediate", + "personalization_notes": "adapted" + }`, nil + }, + } + + planJSON, err := BuildPersonalPlan(context.Background(), client, course, `{"level":"junior"}`) + if err != nil { + t.Fatalf("BuildPersonalPlan error: %v", err) + } + + var plan PersonalPlan + if err := json.Unmarshal(planJSON, &plan); err != nil { + t.Fatalf("unmarshal plan: %v", err) + } + + if len(plan.Modules) != 2 { + t.Fatalf("expected 2 modules, got %d", len(plan.Modules)) + } + if plan.Modules[0].Index != 0 || plan.Modules[1].Index != 1 { + t.Fatalf("module indexes were not normalized: %+v", plan.Modules) + } + if plan.Modules[0].TaskCount != 2 { + t.Fatalf("expected default task_count=2, got %d", plan.Modules[0].TaskCount) + } + if plan.TotalHours != 14 { + t.Fatalf("expected total_hours=14, got %d", plan.TotalHours) + } +} + +func TestTruncateStr(t *testing.T) { + if got := truncateStr("abc", 5); got != "abc" { + t.Fatalf("truncateStr should keep short string, got %q", got) + } + if got := truncateStr("abcdef", 3); got != "abc..." { + t.Fatalf("truncateStr should truncate with ellipsis, got %q", got) + } +} diff --git a/backend/internal/learning/profile_builder.go b/backend/internal/learning/profile_builder.go new file mode 100644 index 0000000..16423ef --- /dev/null +++ b/backend/internal/learning/profile_builder.go @@ -0,0 +1,203 @@ +package learning + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" +) + +type UserProfile struct { + Name string `json:"name"` + ExperienceYears float64 `json:"experience_years"` + CurrentRole string `json:"current_role"` + Skills []string `json:"skills"` + ProgrammingLangs []string `json:"programming_languages"` + Frameworks []string `json:"frameworks"` + Education string `json:"education"` + Industries []string `json:"industries"` + Strengths []string `json:"strengths"` + GrowthAreas []string `json:"growth_areas"` + CareerGoals string `json:"career_goals"` + RecommendedTracks []string `json:"recommended_tracks"` + Level string `json:"level"` + Summary string `json:"summary"` +} + +func BuildProfileFromResume(ctx context.Context, llmClient llm.Client, extractedText string) (json.RawMessage, error) { + if strings.TrimSpace(extractedText) == "" { + return json.RawMessage("{}"), fmt.Errorf("empty resume text") + } + + if len(extractedText) > 12000 { + extractedText = extractedText[:12000] + } + + prompt := `Ты — senior HR-аналитик с 15-летним опытом в IT-рекрутинге в РФ. Проанализируй резюме и создай детальный профиль. + +Резюме: +` + extractedText + ` + +ЗАДАЧА: Извлеки максимум информации. Определи реальный уровень кандидата (не завышай). + +Ответь строго JSON (без markdown, без комментариев): +{ + "name": "Имя Фамилия", + "experience_years": 3.5, + "current_role": "текущая должность или последняя", + "skills": ["навык1", "навык2", "навык3"], + "programming_languages": ["Go", "Python"], + "frameworks": ["React", "Fiber"], + "education": "образование кратко", + "industries": ["fintech", "ecommerce"], + "strengths": ["сильная сторона 1", "сильная сторона 2"], + "growth_areas": ["зона роста 1", "зона роста 2"], + "career_goals": "предположительные цели на основе опыта", + "recommended_tracks": ["рекомендуемый трек 1", "трек 2"], + "level": "junior|middle|senior|lead|expert", + "summary": "Краткая характеристика кандидата в 2-3 предложения" +}` + + var profile UserProfile + err := generateAndParse(ctx, llmClient, prompt, &profile, 2) + if err != nil { + return json.RawMessage("{}"), fmt.Errorf("profile extraction failed: %w", err) + } + + if profile.Level == "" { + profile.Level = inferLevel(profile.ExperienceYears) + } + if profile.Summary == "" { + profile.Summary = fmt.Sprintf("%s, %s, опыт %.0f лет", profile.Name, profile.CurrentRole, profile.ExperienceYears) + } + + result, err := json.Marshal(profile) + if err != nil { + return json.RawMessage("{}"), err + } + return result, nil +} + +func BuildProfileFromOnboarding(ctx context.Context, llmClient llm.Client, answers map[string]string) (json.RawMessage, error) { + answersJSON, _ := json.Marshal(answers) + + prompt := `Ты — методолог обучения. На основе ответов пользователя на онбординг-вопросы, построй профиль. + +Ответы пользователя: +` + string(answersJSON) + ` + +Ответь строго JSON: +{ + "name": "", + "experience_years": 0, + "current_role": "", + "skills": [], + "programming_languages": [], + "frameworks": [], + "education": "", + "industries": [], + "strengths": [], + "growth_areas": [], + "career_goals": "", + "recommended_tracks": [], + "level": "beginner|junior|middle|senior", + "summary": "Краткая характеристика" +}` + + var profile UserProfile + if err := generateAndParse(ctx, llmClient, prompt, &profile, 2); err != nil { + return json.RawMessage("{}"), err + } + + if profile.Level == "" { + profile.Level = "beginner" + } + + result, _ := json.Marshal(profile) + return result, nil +} + +func inferLevel(years float64) string { + switch { + case years < 1: + return "beginner" + case years < 3: + return "junior" + case years < 5: + return "middle" + case years < 8: + return "senior" + default: + return "lead" + } +} + +func generateAndParse(ctx context.Context, llmClient llm.Client, prompt string, target interface{}, maxRetries int) error { + var lastErr error + for attempt := 0; attempt <= maxRetries; attempt++ { + attemptCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + + result, err := llmClient.GenerateText(attemptCtx, llm.StreamRequest{ + Messages: []llm.Message{{Role: "user", Content: prompt}}, + }) + cancel() + + if err != nil { + lastErr = err + continue + } + + jsonStr := extractJSONBlock(result) + if err := json.Unmarshal([]byte(jsonStr), target); err != nil { + lastErr = fmt.Errorf("attempt %d: JSON parse error: %w", attempt, err) + continue + } + + return nil + } + return fmt.Errorf("all %d attempts failed: %w", maxRetries+1, lastErr) +} + +func extractJSONBlock(text string) string { + if strings.Contains(text, "```json") { + start := strings.Index(text, "```json") + 7 + end := strings.Index(text[start:], "```") + if end > 0 { + return strings.TrimSpace(text[start : start+end]) + } + } + if strings.Contains(text, "```") { + start := strings.Index(text, "```") + 3 + if nl := strings.Index(text[start:], "\n"); nl >= 0 { + start += nl + 1 + } + end := strings.Index(text[start:], "```") + if end > 0 { + candidate := strings.TrimSpace(text[start : start+end]) + if len(candidate) > 2 && candidate[0] == '{' { + return candidate + } + } + } + + depth := 0 + startIdx := -1 + for i, ch := range text { + if ch == '{' { + if depth == 0 { + startIdx = i + } + depth++ + } else if ch == '}' { + depth-- + if depth == 0 && startIdx >= 0 { + return text[startIdx : i+1] + } + } + } + + return "{}" +} diff --git a/backend/internal/learning/profile_builder_test.go b/backend/internal/learning/profile_builder_test.go new file mode 100644 index 0000000..7647593 --- /dev/null +++ b/backend/internal/learning/profile_builder_test.go @@ -0,0 +1,65 @@ +package learning + +import ( + "context" + "encoding/json" + "testing" + + "github.com/gooseek/backend/internal/llm" +) + +func TestInferLevel(t *testing.T) { + tests := []struct { + years float64 + want string + }{ + {0, "beginner"}, + {1.5, "junior"}, + {3.2, "middle"}, + {6.5, "senior"}, + {10, "lead"}, + } + + for _, tc := range tests { + got := inferLevel(tc.years) + if got != tc.want { + t.Fatalf("inferLevel(%v) = %q, want %q", tc.years, got, tc.want) + } + } +} + +func TestExtractJSONBlockFromMarkdown(t *testing.T) { + input := "text before\n```json\n{\"name\":\"Alex\",\"level\":\"junior\"}\n```\ntext after" + got := extractJSONBlock(input) + if got != "{\"name\":\"Alex\",\"level\":\"junior\"}" { + t.Fatalf("unexpected json block: %q", got) + } +} + +func TestBuildProfileFromOnboarding(t *testing.T) { + llmClient := &mockLLMClient{ + generateFunc: func(ctx context.Context, req llm.StreamRequest) (string, error) { + return `{"name":"Иван","experience_years":1.5,"current_role":"qa","skills":["testing"],"programming_languages":["Go"],"frameworks":[],"education":"BS","industries":["it"],"strengths":["аналитика"],"growth_areas":["backend"],"career_goals":"backend","recommended_tracks":["backend go"],"level":"junior","summary":"Начинающий специалист"}`, nil + }, + } + + profileJSON, err := BuildProfileFromOnboarding(context.Background(), llmClient, map[string]string{ + "experience_level": "junior", + "target_track": "backend go", + "weekly_hours": "10", + }) + if err != nil { + t.Fatalf("BuildProfileFromOnboarding error: %v", err) + } + + var profile map[string]interface{} + if err := json.Unmarshal(profileJSON, &profile); err != nil { + t.Fatalf("profile json unmarshal: %v", err) + } + if profile["name"] != "Иван" { + t.Fatalf("unexpected name: %v", profile["name"]) + } + if profile["level"] != "junior" { + t.Fatalf("unexpected level: %v", profile["level"]) + } +} diff --git a/backend/internal/learning/stepper.go b/backend/internal/learning/stepper.go index 1122ecf..fc661f7 100644 --- a/backend/internal/learning/stepper.go +++ b/backend/internal/learning/stepper.go @@ -4,7 +4,6 @@ import ( "context" "encoding/json" "fmt" - "regexp" "time" "github.com/gooseek/backend/internal/llm" @@ -680,12 +679,7 @@ func (l *StepByStepLesson) SubmitQuizAnswer(stepIndex int, selectedOptions []str } func extractJSON(text string) string { - re := regexp.MustCompile(`(?s)\{.*\}`) - match := re.FindString(text) - if match != "" { - return match - } - return "{}" + return extractJSONBlock(text) } func (l *StepByStepLesson) ToJSON() ([]byte, error) { diff --git a/backend/internal/medicine/service.go b/backend/internal/medicine/service.go new file mode 100644 index 0000000..ef3e9fc --- /dev/null +++ b/backend/internal/medicine/service.go @@ -0,0 +1,671 @@ +package medicine + +import ( + "bufio" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sort" + "strings" + "time" + + "github.com/gooseek/backend/internal/llm" + "github.com/google/uuid" +) + +type ServiceConfig struct { + LLM llm.Client + SearXNGURL string + Timeout time.Duration +} + +type Service struct { + llm llm.Client + searxngURL string + httpClient *http.Client +} + +type ConsultRequest struct { + Symptoms string `json:"symptoms"` + City string `json:"city,omitempty"` + History [][2]string `json:"history,omitempty"` + Age int `json:"age,omitempty"` + Gender string `json:"gender,omitempty"` + ChatID string `json:"chatId,omitempty"` + Meta map[string]any `json:"meta,omitempty"` +} + +type ConditionItem struct { + Name string `json:"name"` + Likelihood string `json:"likelihood"` + Why string `json:"why"` +} + +type SpecialtyItem struct { + Specialty string `json:"specialty"` + Reason string `json:"reason"` + Priority string `json:"priority"` +} + +type MedicationInfo struct { + Name string `json:"name"` + ForWhat string `json:"forWhat"` + Notes string `json:"notes"` +} + +type SupplementInfo struct { + Name string `json:"name"` + Purpose string `json:"purpose"` + Evidence string `json:"evidence"` + Notes string `json:"notes"` +} + +type ProcedureInfo struct { + Name string `json:"name"` + Purpose string `json:"purpose"` + WhenUseful string `json:"whenUseful"` +} + +type Assessment struct { + TriageLevel string `json:"triageLevel"` + UrgentSigns []string `json:"urgentSigns"` + PossibleConditions []ConditionItem `json:"possibleConditions"` + RecommendedSpecialists []SpecialtyItem `json:"recommendedSpecialists"` + QuestionsToClarify []string `json:"questionsToClarify"` + HomeCare []string `json:"homeCare"` + MedicationInfo []MedicationInfo `json:"medicationInfo"` + SupplementInfo []SupplementInfo `json:"supplementInfo"` + ProcedureInfo []ProcedureInfo `json:"procedureInfo"` + Disclaimer string `json:"disclaimer"` +} + +type DoctorOption struct { + ID string `json:"id"` + Name string `json:"name"` + Specialty string `json:"specialty"` + Clinic string `json:"clinic"` + City string `json:"city"` + Address string `json:"address,omitempty"` + SourceURL string `json:"sourceUrl"` + SourceName string `json:"sourceName"` + Snippet string `json:"snippet,omitempty"` +} + +type AppointmentOption struct { + ID string `json:"id"` + DoctorID string `json:"doctorId"` + Doctor string `json:"doctor"` + Specialty string `json:"specialty"` + StartsAt string `json:"startsAt"` + EndsAt string `json:"endsAt"` + Clinic string `json:"clinic"` + BookURL string `json:"bookUrl"` + Remote bool `json:"remote"` +} + +type searxResponse struct { + Results []struct { + Title string `json:"title"` + URL string `json:"url"` + Content string `json:"content"` + Engine string `json:"engine"` + } `json:"results"` +} + +func NewService(cfg ServiceConfig) *Service { + timeout := cfg.Timeout + if timeout <= 0 { + timeout = 20 * time.Second + } + return &Service{ + llm: cfg.LLM, + searxngURL: strings.TrimSuffix(cfg.SearXNGURL, "/"), + httpClient: &http.Client{Timeout: timeout}, + } +} + +func (s *Service) StreamConsult(ctx context.Context, req ConsultRequest, writer io.Writer) error { + writeEvent := func(eventType string, data any) { + payload := map[string]any{"type": eventType} + if data != nil { + payload["data"] = data + } + encoded, _ := json.Marshal(payload) + _, _ = writer.Write(encoded) + _, _ = writer.Write([]byte("\n")) + if bw, ok := writer.(*bufio.Writer); ok { + _ = bw.Flush() + } + } + + writeBlock := func(blockID, blockType string, data any) { + event := map[string]any{ + "type": "block", + "block": map[string]any{ + "id": blockID, + "type": blockType, + "data": data, + }, + } + encoded, _ := json.Marshal(event) + _, _ = writer.Write(encoded) + _, _ = writer.Write([]byte("\n")) + if bw, ok := writer.(*bufio.Writer); ok { + _ = bw.Flush() + } + } + + writeEvent("messageStart", nil) + + assessment, err := s.buildAssessment(ctx, req) + if err != nil { + return err + } + + city := strings.TrimSpace(req.City) + if city == "" { + city = "Москва" + } + doctors := s.searchDoctors(ctx, assessment.RecommendedSpecialists, city) + bookingLinks := buildBookingLinks(doctors) + + summary := buildSummaryText(req.Symptoms, city, assessment, doctors, bookingLinks) + streamText(summary, writeEvent) + + writeBlock(uuid.NewString(), "widget", map[string]any{ + "widgetType": "medicine_assessment", + "params": map[string]any{ + "triageLevel": assessment.TriageLevel, + "urgentSigns": assessment.UrgentSigns, + "possibleConditions": assessment.PossibleConditions, + "recommendedSpecialists": assessment.RecommendedSpecialists, + "questionsToClarify": assessment.QuestionsToClarify, + "homeCare": assessment.HomeCare, + "disclaimer": assessment.Disclaimer, + }, + }) + + writeBlock(uuid.NewString(), "widget", map[string]any{ + "widgetType": "medicine_doctors", + "params": map[string]any{ + "city": city, + "doctors": doctors, + "specialists": assessment.RecommendedSpecialists, + }, + }) + + writeBlock(uuid.NewString(), "widget", map[string]any{ + "widgetType": "medicine_appointments", + "params": map[string]any{ + "bookingLinks": bookingLinks, + }, + }) + + writeBlock(uuid.NewString(), "widget", map[string]any{ + "widgetType": "medicine_reference", + "params": map[string]any{ + "medicationInfo": assessment.MedicationInfo, + "supplementInfo": assessment.SupplementInfo, + "procedureInfo": assessment.ProcedureInfo, + "note": "Справочная информация. Назначения и схемы лечения определяет только врач после очного осмотра.", + }, + }) + + writeEvent("messageEnd", nil) + return nil +} + +func streamText(text string, writeEvent func(string, any)) { + chunks := splitTextByChunks(text, 120) + for _, chunk := range chunks { + writeEvent("textChunk", map[string]any{ + "chunk": chunk, + }) + } +} + +func splitTextByChunks(text string, size int) []string { + if len(text) <= size { + return []string{text} + } + parts := make([]string, 0, len(text)/size+1) + runes := []rune(text) + for i := 0; i < len(runes); i += size { + end := i + size + if end > len(runes) { + end = len(runes) + } + parts = append(parts, string(runes[i:end])) + } + return parts +} + +func (s *Service) buildAssessment(ctx context.Context, req ConsultRequest) (*Assessment, error) { + if s.llm == nil { + return buildFallbackAssessment(req.Symptoms), nil + } + + historyContext := "" + if len(req.History) > 0 { + var hb strings.Builder + hb.WriteString("\nИстория диалога:\n") + for _, pair := range req.History { + hb.WriteString(fmt.Sprintf("Пациент: %s\nВрач: %s\n", pair[0], pair[1])) + } + historyContext = hb.String() + } + + ageInfo := "не указан" + if req.Age > 0 { + ageInfo = fmt.Sprintf("%d", req.Age) + } + genderInfo := "не указан" + if req.Gender != "" { + genderInfo = req.Gender + } + + prompt := fmt.Sprintf(`Ты опытный врач-терапевт, работающий в системе GooSeek. Веди себя как настоящий доктор на приёме. + +ПРАВИЛА: +1. Дай ДИФФЕРЕНЦИАЛЬНУЮ оценку — перечисли вероятные состояния с обоснованием, от наиболее вероятного к менее. +2. Для каждого состояния укажи likelihood (low/medium/high) и подробное "why" — почему именно эти симптомы указывают на это. +3. Подбери конкретных специалистов с чёткой причиной направления. +4. НЕ назначай таблетки, дозировки, схемы лечения. Только справочная информация: "для чего применяется" и "при каких состояниях назначают". +5. Дай конкретные рекомендации по домашнему уходу до визита к врачу. +6. Укажи красные флаги — при каких симптомах вызывать скорую немедленно. +7. Задай уточняющие вопросы, которые помогут сузить диф-диагноз. + +Симптомы пациента: +%s +%s +Возраст: %s +Пол: %s + +Верни строго JSON (без markdown-обёрток): +{ + "triageLevel": "low|medium|high|emergency", + "urgentSigns": ["конкретный симптом при котором вызывать 103"], + "possibleConditions": [{"name":"Название", "likelihood":"low|medium|high", "why":"Подробное обоснование на основе симптомов"}], + "recommendedSpecialists": [{"specialty":"Название специальности", "reason":"Почему именно этот врач", "priority":"high|normal"}], + "questionsToClarify": ["Конкретный вопрос пациенту"], + "homeCare": ["Конкретная рекомендация что делать дома до визита"], + "medicationInfo": [{"name":"Название", "forWhat":"При каких состояниях применяется", "notes":"Важные особенности"}], + "supplementInfo": [{"name":"Название", "purpose":"Для чего", "evidence":"low|medium|high", "notes":"Примечания"}], + "procedureInfo": [{"name":"Название обследования/процедуры", "purpose":"Что покажет/зачем", "whenUseful":"В каких случаях назначают"}], + "disclaimer": "..." +}`, req.Symptoms, historyContext, ageInfo, genderInfo) + + resp, err := s.llm.GenerateText(ctx, llm.StreamRequest{ + Messages: []llm.Message{ + {Role: llm.RoleSystem, Content: "Ты опытный врач-диагност. Отвечай на русском. Только валидный JSON. Никаких назначений лекарств и дозировок — только справочная информация."}, + {Role: llm.RoleUser, Content: prompt}, + }, + Options: llm.StreamOptions{ + Temperature: 0.3, + MaxTokens: 2800, + }, + }) + if err != nil { + return buildFallbackAssessment(req.Symptoms), nil + } + + jsonBlock := extractJSONBlock(resp) + if jsonBlock == "" { + return buildFallbackAssessment(req.Symptoms), nil + } + + var result Assessment + if err := json.Unmarshal([]byte(jsonBlock), &result); err != nil { + return buildFallbackAssessment(req.Symptoms), nil + } + + normalizeAssessment(&result) + return &result, nil +} + +func normalizeAssessment(a *Assessment) { + if a.TriageLevel == "" { + a.TriageLevel = "medium" + } + if a.Disclaimer == "" { + a.Disclaimer = "Информация носит справочный характер и не заменяет очный осмотр врача." + } + if len(a.RecommendedSpecialists) == 0 { + a.RecommendedSpecialists = []SpecialtyItem{ + {Specialty: "Терапевт", Reason: "Первичный очный осмотр и маршрутизация", Priority: "high"}, + } + } +} + +func (s *Service) searchDoctors(ctx context.Context, specialists []SpecialtyItem, city string) []DoctorOption { + if s.searxngURL == "" { + return fallbackDoctors(specialists, city) + } + + unique := make(map[string]struct{}) + out := make([]DoctorOption, 0, 9) + + for _, sp := range specialists { + if strings.TrimSpace(sp.Specialty) == "" { + continue + } + query := fmt.Sprintf("%s %s запись на прием", sp.Specialty, city) + results, err := s.searchWeb(ctx, query) + if err != nil { + continue + } + for _, r := range results { + key := r.URL + "|" + sp.Specialty + if _, ok := unique[key]; ok { + continue + } + unique[key] = struct{}{} + clinic := extractClinicName(r.Title) + out = append(out, DoctorOption{ + ID: uuid.NewString(), + Name: fmt.Sprintf("%s (%s)", sp.Specialty, clinic), + Specialty: sp.Specialty, + Clinic: clinic, + City: city, + SourceURL: r.URL, + SourceName: sourceNameFromURL(r.URL), + Snippet: trimText(r.Content, 220), + }) + if len(out) >= 12 { + sortDoctors(out) + return out[:12] + } + } + } + + if len(out) == 0 { + return fallbackDoctors(specialists, city) + } + sortDoctors(out) + return out +} + +func (s *Service) searchWeb(ctx context.Context, query string) ([]struct { + Title string + URL string + Content string +}, error) { + values := url.Values{} + values.Set("q", query) + values.Set("format", "json") + values.Set("language", "ru-RU") + values.Set("safesearch", "1") + + reqURL := s.searxngURL + "/search?" + values.Encode() + req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqURL, nil) + if err != nil { + return nil, err + } + + resp, err := s.httpClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode >= 300 { + return nil, fmt.Errorf("search status %d", resp.StatusCode) + } + + var parsed searxResponse + if err := json.NewDecoder(resp.Body).Decode(&parsed); err != nil { + return nil, err + } + + items := make([]struct { + Title string + URL string + Content string + }, 0, len(parsed.Results)) + for _, r := range parsed.Results { + if r.URL == "" || r.Title == "" { + continue + } + items = append(items, struct { + Title string + URL string + Content string + }{ + Title: r.Title, + URL: r.URL, + Content: r.Content, + }) + if len(items) >= 5 { + break + } + } + return items, nil +} + +func buildFallbackAssessment(symptoms string) *Assessment { + base := &Assessment{ + TriageLevel: "medium", + UrgentSigns: []string{ + "резкая боль в груди", "затруднение дыхания", "потеря сознания", "кровотечение", + }, + PossibleConditions: []ConditionItem{ + {Name: "ОРВИ/вирусная инфекция", Likelihood: "medium", Why: "Часто проявляется общими симптомами и слабостью"}, + {Name: "Воспалительный процесс", Likelihood: "low", Why: "Требует очной диагностики и анализа"}, + }, + RecommendedSpecialists: []SpecialtyItem{ + {Specialty: "Терапевт", Reason: "Первичный осмотр и назначение базовой диагностики", Priority: "high"}, + }, + QuestionsToClarify: []string{ + "Когда начались симптомы?", + "Есть ли температура и как меняется в течение дня?", + "Есть ли хронические заболевания и аллергии?", + }, + HomeCare: []string{ + "Контролируйте температуру и самочувствие каждые 6-8 часов", + "Поддерживайте питьевой режим", + "При ухудшении состояния обращайтесь в неотложную помощь", + }, + MedicationInfo: []MedicationInfo{ + {Name: "Парацетамол", ForWhat: "Снижение температуры и облегчение боли", Notes: "Только общая справка, дозировку определяет врач"}, + }, + SupplementInfo: []SupplementInfo{ + {Name: "Витамин D", Purpose: "Поддержка общего метаболизма", Evidence: "medium", Notes: "Эффективность зависит от дефицита по анализам"}, + }, + ProcedureInfo: []ProcedureInfo{ + {Name: "Общий анализ крови", Purpose: "Оценка воспалительного ответа", WhenUseful: "При сохраняющихся симптомах более 2-3 дней"}, + }, + Disclaimer: "Информация носит справочный характер и не заменяет консультацию врача.", + } + + lowered := strings.ToLower(symptoms) + if strings.Contains(lowered, "груд") || strings.Contains(lowered, "дыш") || strings.Contains(lowered, "онем") { + base.TriageLevel = "high" + base.RecommendedSpecialists = append(base.RecommendedSpecialists, + SpecialtyItem{Specialty: "Кардиолог", Reason: "Исключение кардиологических причин", Priority: "high"}, + ) + } + if strings.Contains(lowered, "живот") || strings.Contains(lowered, "тошн") { + base.RecommendedSpecialists = append(base.RecommendedSpecialists, + SpecialtyItem{Specialty: "Гастроэнтеролог", Reason: "Оценка ЖКТ-симптомов", Priority: "normal"}, + ) + } + return base +} + +func fallbackDoctors(specialists []SpecialtyItem, city string) []DoctorOption { + if len(specialists) == 0 { + specialists = []SpecialtyItem{{Specialty: "Терапевт"}} + } + out := make([]DoctorOption, 0, len(specialists)) + for i, sp := range specialists { + out = append(out, DoctorOption{ + ID: uuid.NewString(), + Name: fmt.Sprintf("%s, приём онлайн/очно", sp.Specialty), + Specialty: sp.Specialty, + Clinic: "Проверенные клиники", + City: city, + SourceURL: fmt.Sprintf("https://yandex.ru/search/?text=%s+%s+запись", url.QueryEscape(sp.Specialty), url.QueryEscape(city)), + SourceName: "yandex", + Snippet: "Подбор по агрегаторам клиник и медицинских центров.", + }) + if i >= 5 { + break + } + } + return out +} + +func buildBookingLinks(doctors []DoctorOption) []AppointmentOption { + out := make([]AppointmentOption, 0, len(doctors)) + for _, d := range doctors { + if d.SourceURL == "" { + continue + } + out = append(out, AppointmentOption{ + ID: uuid.NewString(), + DoctorID: d.ID, + Doctor: d.Name, + Specialty: d.Specialty, + Clinic: d.Clinic, + BookURL: d.SourceURL, + Remote: strings.Contains(strings.ToLower(d.Snippet), "онлайн"), + }) + } + return out +} + +func buildSummaryText(symptoms, city string, assessment *Assessment, doctors []DoctorOption, bookings []AppointmentOption) string { + var b strings.Builder + b.WriteString("### Медицинская навигация\n\n") + + triageEmoji := map[string]string{"low": "🟢", "medium": "🟡", "high": "🟠", "emergency": "🔴"} + emoji := triageEmoji[assessment.TriageLevel] + if emoji == "" { + emoji = "🟡" + } + b.WriteString(fmt.Sprintf("%s **Приоритет: %s**\n\n", emoji, strings.ToUpper(assessment.TriageLevel))) + + if assessment.TriageLevel == "emergency" || assessment.TriageLevel == "high" { + b.WriteString("⚠️ **Рекомендуется срочное обращение к врачу.**\n\n") + } + + if len(assessment.PossibleConditions) > 0 { + b.WriteString("**Вероятные состояния:**\n") + for _, c := range assessment.PossibleConditions { + likelihood := map[string]string{"low": "маловероятно", "medium": "возможно", "high": "вероятно"} + lbl := likelihood[c.Likelihood] + if lbl == "" { + lbl = c.Likelihood + } + b.WriteString(fmt.Sprintf("- **%s** (%s) — %s\n", c.Name, lbl, c.Why)) + } + b.WriteString("\n") + } + + if len(assessment.RecommendedSpecialists) > 0 { + b.WriteString("**К кому обратиться:**\n") + for _, sp := range assessment.RecommendedSpecialists { + prio := "" + if sp.Priority == "high" { + prio = " ⚡" + } + b.WriteString(fmt.Sprintf("- **%s**%s — %s\n", sp.Specialty, prio, sp.Reason)) + } + b.WriteString("\n") + } + + if len(assessment.QuestionsToClarify) > 0 { + b.WriteString("**Уточните для более точной оценки:**\n") + for _, q := range assessment.QuestionsToClarify { + b.WriteString(fmt.Sprintf("- %s\n", q)) + } + b.WriteString("\n") + } + + if len(doctors) > 0 { + b.WriteString(fmt.Sprintf("Найдено **%d** вариантов записи в городе **%s**. ", len(doctors), city)) + b.WriteString("Подробности — на панели справа.\n\n") + } + + if len(assessment.UrgentSigns) > 0 { + b.WriteString("🚨 **При появлении:** ") + b.WriteString(strings.Join(assessment.UrgentSigns[:min(3, len(assessment.UrgentSigns))], ", ")) + b.WriteString(" — **немедленно вызывайте 103/112.**\n\n") + } + + b.WriteString("---\n") + b.WriteString("*Информация носит справочный характер. Таблетки и схемы лечения не назначаются.*\n") + return b.String() +} + +func extractJSONBlock(text string) string { + if text == "" { + return "" + } + if start := strings.Index(text, "```json"); start >= 0 { + start += len("```json") + if end := strings.Index(text[start:], "```"); end >= 0 { + return strings.TrimSpace(text[start : start+end]) + } + } + if start := strings.Index(text, "{"); start >= 0 { + if end := strings.LastIndex(text, "}"); end > start { + return strings.TrimSpace(text[start : end+1]) + } + } + return "" +} + +func extractClinicName(title string) string { + trimmed := strings.TrimSpace(title) + if trimmed == "" { + return "Клиника" + } + for _, sep := range []string{" - ", " | ", " — "} { + if idx := strings.Index(trimmed, sep); idx > 0 { + return strings.TrimSpace(trimmed[:idx]) + } + } + return trimText(trimmed, 56) +} + +func sourceNameFromURL(raw string) string { + u, err := url.Parse(raw) + if err != nil { + return "web" + } + host := strings.TrimPrefix(u.Hostname(), "www.") + if host == "" { + return "web" + } + return host +} + +func trimText(v string, max int) string { + r := []rune(strings.TrimSpace(v)) + if len(r) <= max { + return string(r) + } + return string(r[:max]) + "..." +} + +func sortDoctors(items []DoctorOption) { + sort.SliceStable(items, func(i, j int) bool { + a := strings.ToLower(items[i].SourceName) + b := strings.ToLower(items[j].SourceName) + if a == b { + return strings.ToLower(items[i].Name) < strings.ToLower(items[j].Name) + } + return a < b + }) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/backend/internal/travel/service.go b/backend/internal/travel/service.go index 8b31aea..ba72eca 100644 --- a/backend/internal/travel/service.go +++ b/backend/internal/travel/service.go @@ -628,6 +628,141 @@ func (s *Service) BuildRouteFromPoints(ctx context.Context, trip *Trip) (*RouteD return s.openRoute.GetDirections(ctx, points, "driving-car") } +// ValidateItineraryRequest is the input for itinerary validation. +type ValidateItineraryRequest struct { + Days []ValidateDay `json:"days"` + POIs []ValidatePOI `json:"pois,omitempty"` + Events []ValidateEvent `json:"events,omitempty"` +} + +type ValidateDay struct { + Date string `json:"date"` + Items []ValidateItem `json:"items"` +} + +type ValidateItem struct { + RefType string `json:"refType"` + RefID string `json:"refId"` + Title string `json:"title"` + StartTime string `json:"startTime,omitempty"` + EndTime string `json:"endTime,omitempty"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Note string `json:"note,omitempty"` + Cost float64 `json:"cost,omitempty"` + Currency string `json:"currency,omitempty"` +} + +type ValidatePOI struct { + ID string `json:"id"` + Name string `json:"name"` + Category string `json:"category"` + Schedule map[string]string `json:"schedule,omitempty"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` +} + +type ValidateEvent struct { + ID string `json:"id"` + Title string `json:"title"` + DateStart string `json:"dateStart,omitempty"` + DateEnd string `json:"dateEnd,omitempty"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` +} + +type ValidationWarning struct { + DayIdx int `json:"dayIdx"` + ItemIdx int `json:"itemIdx,omitempty"` + Message string `json:"message"` +} + +type ValidateItineraryResponse struct { + Valid bool `json:"valid"` + Warnings []ValidationWarning `json:"warnings"` + Suggestions []ValidationWarning `json:"suggestions"` +} + +func (s *Service) ValidateItinerary(ctx context.Context, req ValidateItineraryRequest) (*ValidateItineraryResponse, error) { + if s.llmClient == nil { + return &ValidateItineraryResponse{Valid: true, Warnings: []ValidationWarning{}, Suggestions: []ValidationWarning{}}, nil + } + + daysJSON, _ := json.Marshal(req.Days) + poisJSON, _ := json.Marshal(req.POIs) + eventsJSON, _ := json.Marshal(req.Events) + + prompt := fmt.Sprintf(`Проверь маршрут путешествия на логистические ошибки и предложи улучшения. + +Маршрут по дням: %s + +Доступные POI (с расписанием): %s + +Доступные события (с датами): %s + +Проверь: +1. Логистику: нет ли точек в разных концах города подряд без достаточного времени на переезд +2. Расписание: если POI имеет schedule и стоит в день когда закрыт — это ошибка +3. Даты событий: если событие стоит в день вне его dateStart-dateEnd — это ошибка +4. Реалистичность: не слишком ли много активностей в день (>6 основных) +5. Время: нет ли пересечений по времени + +Верни ТОЛЬКО JSON: +{ + "valid": true/false, + "warnings": [{"dayIdx": 0, "itemIdx": 2, "message": "причина"}], + "suggestions": [{"dayIdx": 0, "message": "рекомендация"}] +} + +Если всё хорошо — warnings пустой массив, valid=true. Suggestions — необязательные рекомендации.`, string(daysJSON), string(poisJSON), string(eventsJSON)) + + var fullResponse strings.Builder + err := s.llmClient.StreamChat(ctx, []ChatMessage{ + {Role: "user", Content: prompt}, + }, func(chunk string) { + fullResponse.WriteString(chunk) + }) + if err != nil { + return nil, fmt.Errorf("LLM validation failed: %w", err) + } + + responseText := fullResponse.String() + + jsonStart := strings.Index(responseText, "{") + jsonEnd := strings.LastIndex(responseText, "}") + if jsonStart < 0 || jsonEnd < 0 || jsonEnd <= jsonStart { + return &ValidateItineraryResponse{ + Valid: false, + Warnings: []ValidationWarning{{ + DayIdx: 0, + Message: "Не удалось проверить маршрут — повторите попытку", + }}, + Suggestions: []ValidationWarning{}, + }, nil + } + + var result ValidateItineraryResponse + if err := json.Unmarshal([]byte(responseText[jsonStart:jsonEnd+1]), &result); err != nil { + return &ValidateItineraryResponse{ + Valid: false, + Warnings: []ValidationWarning{{ + DayIdx: 0, + Message: "Ошибка анализа маршрута — попробуйте ещё раз", + }}, + Suggestions: []ValidationWarning{}, + }, nil + } + + if result.Warnings == nil { + result.Warnings = []ValidationWarning{} + } + if result.Suggestions == nil { + result.Suggestions = []ValidationWarning{} + } + + return &result, nil +} + func (s *Service) EnrichTripWithAI(ctx context.Context, trip *Trip) error { if len(trip.Route) == 0 { return nil diff --git a/backend/pkg/config/config.go b/backend/pkg/config/config.go index e61bbe4..a525449 100644 --- a/backend/pkg/config/config.go +++ b/backend/pkg/config/config.go @@ -46,6 +46,8 @@ type Config struct { FinanceHeatmapURL string LearningSvcURL string TravelSvcURL string + SandboxSvcURL string + MedicineSvcURL string // TravelPayouts TravelPayoutsToken string @@ -57,6 +59,7 @@ type Config struct { MinioSecretKey string MinioBucket string MinioUseSSL bool + MinioPublicURL string // Auth JWTSecret string @@ -130,15 +133,18 @@ func Load() (*Config, error) { FinanceHeatmapURL: getEnv("FINANCE_HEATMAP_SVC_URL", "http://localhost:3033"), LearningSvcURL: getEnv("LEARNING_SVC_URL", "http://localhost:3034"), TravelSvcURL: getEnv("TRAVEL_SVC_URL", "http://localhost:3035"), + SandboxSvcURL: getEnv("SANDBOX_SVC_URL", "http://localhost:3036"), + MedicineSvcURL: getEnv("MEDICINE_SVC_URL", "http://localhost:3037"), TravelPayoutsToken: getEnv("TRAVELPAYOUTS_TOKEN", ""), TravelPayoutsMarker: getEnv("TRAVELPAYOUTS_MARKER", ""), - MinioEndpoint: getEnv("MINIO_ENDPOINT", "minio:9000"), - MinioAccessKey: getEnv("MINIO_ACCESS_KEY", "minioadmin"), - MinioSecretKey: getEnv("MINIO_SECRET_KEY", "minioadmin"), - MinioBucket: getEnv("MINIO_BUCKET", "gooseek"), - MinioUseSSL: getEnv("MINIO_USE_SSL", "false") == "true", + MinioEndpoint: getEnv("MINIO_ENDPOINT", getEnv("S3_ENDPOINT", "minio:9000")), + MinioAccessKey: getEnv("MINIO_ACCESS_KEY", getEnv("S3_ACCESS_KEY", "minioadmin")), + MinioSecretKey: getEnv("MINIO_SECRET_KEY", getEnv("S3_SECRET_KEY", "minioadmin")), + MinioBucket: getEnv("MINIO_BUCKET", getEnv("S3_BUCKET", "gooseek")), + MinioUseSSL: getEnv("MINIO_USE_SSL", getEnv("S3_USE_SSL", "false")) == "true", + MinioPublicURL: getEnv("MINIO_PUBLIC_URL", getEnv("S3_PUBLIC_URL", "")), JWTSecret: getEnv("JWT_SECRET", ""), AuthSvcURL: getEnv("AUTH_SVC_URL", ""), diff --git a/backend/pkg/storage/minio.go b/backend/pkg/storage/minio.go index bcbacb2..bc91e52 100644 --- a/backend/pkg/storage/minio.go +++ b/backend/pkg/storage/minio.go @@ -20,11 +20,13 @@ type MinioConfig struct { SecretKey string Bucket string UseSSL bool + PublicURL string } type MinioStorage struct { - client *minio.Client - bucket string + client *minio.Client + bucket string + publicURL string } type UploadResult struct { @@ -57,11 +59,23 @@ func NewMinioStorage(cfg MinioConfig) (*MinioStorage, error) { } return &MinioStorage{ - client: client, - bucket: cfg.Bucket, + client: client, + bucket: cfg.Bucket, + publicURL: strings.TrimRight(cfg.PublicURL, "/"), }, nil } +func (s *MinioStorage) GetPublicURL(key string) string { + if s.publicURL != "" { + return s.publicURL + "/" + s.bucket + "/" + key + } + return "" +} + +func (s *MinioStorage) Bucket() string { + return s.bucket +} + func (s *MinioStorage) Upload(ctx context.Context, reader io.Reader, size int64, filename, contentType string) (*UploadResult, error) { ext := filepath.Ext(filename) key := generateStorageKey(ext) @@ -83,6 +97,25 @@ func (s *MinioStorage) Upload(ctx context.Context, reader io.Reader, size int64, }, nil } +func (s *MinioStorage) UploadWithKey(ctx context.Context, key string, reader io.Reader, size int64, contentType string) (*UploadResult, error) { + opts := minio.PutObjectOptions{ + ContentType: contentType, + CacheControl: "public, max-age=2592000", + } + + info, err := s.client.PutObject(ctx, s.bucket, key, reader, size, opts) + if err != nil { + return nil, fmt.Errorf("failed to upload file: %w", err) + } + + return &UploadResult{ + Key: key, + Bucket: s.bucket, + Size: info.Size, + ETag: info.ETag, + }, nil +} + func (s *MinioStorage) UploadUserFile(ctx context.Context, userID string, reader io.Reader, size int64, filename, contentType string) (*UploadResult, error) { ext := filepath.Ext(filename) key := fmt.Sprintf("users/%s/%s%s", userID, uuid.New().String(), ext) diff --git a/backend/webui/.env.example b/backend/webui/.env.example deleted file mode 100644 index 00cf3c0..0000000 --- a/backend/webui/.env.example +++ /dev/null @@ -1,16 +0,0 @@ -# GooSeek WebUI Configuration - -# API Gateway URL (internal Docker network) -API_URL=http://api-gateway:3015 - -# Public API URL (for browser requests) -NEXT_PUBLIC_API_URL= - -# ============================================ -# === MENU VISIBILITY === -# ============================================ - -# Отключённые маршруты (через запятую) -# Страницы в разработке можно скрыть из меню -# Пример: /travel,/medicine,/finance,/learning -NEXT_PUBLIC_DISABLED_ROUTES= diff --git a/backend/webui/Dockerfile b/backend/webui/Dockerfile index 3d46ca7..32951ae 100644 --- a/backend/webui/Dockerfile +++ b/backend/webui/Dockerfile @@ -15,9 +15,11 @@ COPY . . ENV NEXT_TELEMETRY_DISABLED=1 ENV NODE_ENV=production -# Build-time переменные для Next.js (NEXT_PUBLIC_* должны быть доступны во время сборки) -ARG NEXT_PUBLIC_DISABLED_ROUTES -ENV NEXT_PUBLIC_DISABLED_ROUTES=${NEXT_PUBLIC_DISABLED_ROUTES} +ARG NEXT_PUBLIC_ENABLED_ROUTES +ENV NEXT_PUBLIC_ENABLED_ROUTES=${NEXT_PUBLIC_ENABLED_ROUTES} + +ARG NEXT_PUBLIC_TWOGIS_API_KEY +ENV NEXT_PUBLIC_TWOGIS_API_KEY=${NEXT_PUBLIC_TWOGIS_API_KEY} RUN npm run build diff --git a/backend/webui/next.config.mjs b/backend/webui/next.config.mjs index 9dda544..6fa4c9c 100644 --- a/backend/webui/next.config.mjs +++ b/backend/webui/next.config.mjs @@ -5,6 +5,7 @@ const nextConfig = { env: { API_URL: process.env.API_URL || 'http://localhost:3015', NEXT_PUBLIC_TWOGIS_API_KEY: process.env.NEXT_PUBLIC_TWOGIS_API_KEY || process.env.TWOGIS_API_KEY || '', + NEXT_PUBLIC_ENABLED_ROUTES: process.env.NEXT_PUBLIC_ENABLED_ROUTES || '', }, }; diff --git a/backend/webui/package-lock.json b/backend/webui/package-lock.json index dd73fc9..082dabd 100644 --- a/backend/webui/package-lock.json +++ b/backend/webui/package-lock.json @@ -9,6 +9,10 @@ "version": "1.0.0", "dependencies": { "@2gis/mapgl": "^1.70.1", + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "@monaco-editor/react": "^4.7.0", "@radix-ui/react-avatar": "^1.1.11", "@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-dropdown-menu": "^2.1.16", @@ -21,7 +25,7 @@ "framer-motion": "^12.34.3", "leaflet": "^1.9.4", "lucide-react": "^0.454.0", - "next": "^14.2.26", + "next": "^14.2.35", "react": "^18.3.1", "react-dom": "^18.3.1", "react-leaflet": "^4.2.1", @@ -64,6 +68,59 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "license": "MIT", + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/@floating-ui/core": { "version": "1.7.4", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz", @@ -141,16 +198,39 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@monaco-editor/loader": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.7.0.tgz", + "integrity": "sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA==", + "license": "MIT", + "dependencies": { + "state-local": "^1.0.6" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.7.0.tgz", + "integrity": "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==", + "license": "MIT", + "dependencies": { + "@monaco-editor/loader": "^1.5.0" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, "node_modules/@next/env": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.26.tgz", - "integrity": "sha512-vO//GJ/YBco+H7xdQhzJxF7ub3SUwft76jwaeOyVVQFHCi5DCnkP16WHB+JBylo4vOKPoZBlR94Z8xBxNBdNJA==", + "version": "14.2.35", + "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.35.tgz", + "integrity": "sha512-DuhvCtj4t9Gwrx80dmz2F4t/zKQ4ktN8WrMwOuVzkJfBilwAwGr6v16M5eI8yCuZ63H9TTuEU09Iu2HqkzFPVQ==", "license": "MIT" }, "node_modules/@next/swc-darwin-arm64": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.26.tgz", - "integrity": "sha512-zDJY8gsKEseGAxG+C2hTMT0w9Nk9N1Sk1qV7vXYz9MEiyRoF5ogQX2+vplyUMIfygnjn9/A04I6yrUTRTuRiyQ==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.33.tgz", + "integrity": "sha512-HqYnb6pxlsshoSTubdXKu15g3iivcbsMXg4bYpjL2iS/V6aQot+iyF4BUc2qA/J/n55YtvE4PHMKWBKGCF/+wA==", "cpu": [ "arm64" ], @@ -164,9 +244,9 @@ } }, "node_modules/@next/swc-darwin-x64": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.26.tgz", - "integrity": "sha512-U0adH5ryLfmTDkahLwG9sUQG2L0a9rYux8crQeC92rPhi3jGQEY47nByQHrVrt3prZigadwj/2HZ1LUUimuSbg==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.33.tgz", + "integrity": "sha512-8HGBeAE5rX3jzKvF593XTTFg3gxeU4f+UWnswa6JPhzaR6+zblO5+fjltJWIZc4aUalqTclvN2QtTC37LxvZAA==", "cpu": [ "x64" ], @@ -180,9 +260,9 @@ } }, "node_modules/@next/swc-linux-arm64-gnu": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.26.tgz", - "integrity": "sha512-SINMl1I7UhfHGM7SoRiw0AbwnLEMUnJ/3XXVmhyptzriHbWvPPbbm0OEVG24uUKhuS1t0nvN/DBvm5kz6ZIqpg==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.33.tgz", + "integrity": "sha512-JXMBka6lNNmqbkvcTtaX8Gu5by9547bukHQvPoLe9VRBx1gHwzf5tdt4AaezW85HAB3pikcvyqBToRTDA4DeLw==", "cpu": [ "arm64" ], @@ -196,9 +276,9 @@ } }, "node_modules/@next/swc-linux-arm64-musl": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.26.tgz", - "integrity": "sha512-s6JaezoyJK2DxrwHWxLWtJKlqKqTdi/zaYigDXUJ/gmx/72CrzdVZfMvUc6VqnZ7YEvRijvYo+0o4Z9DencduA==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.33.tgz", + "integrity": "sha512-Bm+QulsAItD/x6Ih8wGIMfRJy4G73tu1HJsrccPW6AfqdZd0Sfm5Imhgkgq2+kly065rYMnCOxTBvmvFY1BKfg==", "cpu": [ "arm64" ], @@ -212,9 +292,9 @@ } }, "node_modules/@next/swc-linux-x64-gnu": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.26.tgz", - "integrity": "sha512-FEXeUQi8/pLr/XI0hKbe0tgbLmHFRhgXOUiPScz2hk0hSmbGiU8aUqVslj/6C6KA38RzXnWoJXo4FMo6aBxjzg==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.33.tgz", + "integrity": "sha512-FnFn+ZBgsVMbGDsTqo8zsnRzydvsGV8vfiWwUo1LD8FTmPTdV+otGSWKc4LJec0oSexFnCYVO4hX8P8qQKaSlg==", "cpu": [ "x64" ], @@ -228,9 +308,9 @@ } }, "node_modules/@next/swc-linux-x64-musl": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.26.tgz", - "integrity": "sha512-BUsomaO4d2DuXhXhgQCVt2jjX4B4/Thts8nDoIruEJkhE5ifeQFtvW5c9JkdOtYvE5p2G0hcwQ0UbRaQmQwaVg==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.33.tgz", + "integrity": "sha512-345tsIWMzoXaQndUTDv1qypDRiebFxGYx9pYkhwY4hBRaOLt8UGfiWKr9FSSHs25dFIf8ZqIFaPdy5MljdoawA==", "cpu": [ "x64" ], @@ -244,9 +324,9 @@ } }, "node_modules/@next/swc-win32-arm64-msvc": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.26.tgz", - "integrity": "sha512-5auwsMVzT7wbB2CZXQxDctpWbdEnEW/e66DyXO1DcgHxIyhP06awu+rHKshZE+lPLIGiwtjo7bsyeuubewwxMw==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.33.tgz", + "integrity": "sha512-nscpt0G6UCTkrT2ppnJnFsYbPDQwmum4GNXYTeoTIdsmMydSKFz9Iny2jpaRupTb+Wl298+Rh82WKzt9LCcqSQ==", "cpu": [ "arm64" ], @@ -260,9 +340,9 @@ } }, "node_modules/@next/swc-win32-ia32-msvc": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.26.tgz", - "integrity": "sha512-GQWg/Vbz9zUGi9X80lOeGsz1rMH/MtFO/XqigDznhhhTfDlDoynCM6982mPCbSlxJ/aveZcKtTlwfAjwhyxDpg==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.33.tgz", + "integrity": "sha512-pc9LpGNKhJ0dXQhZ5QMmYxtARwwmWLpeocFmVG5Z0DzWq5Uf0izcI8tLc+qOpqxO1PWqZ5A7J1blrUIKrIFc7Q==", "cpu": [ "ia32" ], @@ -276,9 +356,9 @@ } }, "node_modules/@next/swc-win32-x64-msvc": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.26.tgz", - "integrity": "sha512-2rdB3T1/Gp7bv1eQTTm9d1Y1sv9UuJ2LAwOE0Pe2prHKe32UNscj7YS13fRB37d0GAiGNR+Y7ZcW8YjDI8Ns0w==", + "version": "14.2.33", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.33.tgz", + "integrity": "sha512-nOjfZMy8B94MdisuzZo9/57xuFVLHJaDj5e/xrduJp9CV2/HrfxTRH2fbyLe+K9QT41WBLUd4iXX3R7jBp0EUg==", "cpu": [ "x64" ], @@ -1774,6 +1854,14 @@ "@types/react": "^18.0.0" } }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true, + "peer": true + }, "node_modules/@types/unist": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", @@ -2201,6 +2289,16 @@ "dev": true, "license": "MIT" }, + "node_modules/dompurify": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz", + "integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==", + "license": "(MPL-2.0 OR Apache-2.0)", + "peer": true, + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, "node_modules/electron-to-chromium": { "version": "1.5.302", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", @@ -2641,6 +2739,19 @@ "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc" } }, + "node_modules/marked": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-14.0.0.tgz", + "integrity": "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==", + "license": "MIT", + "peer": true, + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/mdast-util-from-markdown": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.3.tgz", @@ -3260,6 +3371,17 @@ "node": ">=8.6" } }, + "node_modules/monaco-editor": { + "version": "0.55.1", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.55.1.tgz", + "integrity": "sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==", + "license": "MIT", + "peer": true, + "dependencies": { + "dompurify": "3.2.7", + "marked": "14.0.0" + } + }, "node_modules/motion-dom": { "version": "12.34.3", "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.34.3.tgz", @@ -3312,13 +3434,12 @@ } }, "node_modules/next": { - "version": "14.2.26", - "resolved": "https://registry.npmjs.org/next/-/next-14.2.26.tgz", - "integrity": "sha512-b81XSLihMwCfwiUVRRja3LphLo4uBBMZEzBBWMaISbKTwOmq3wPknIETy/8000tr7Gq4WmbuFYPS7jOYIf+ZJw==", - "deprecated": "This version has a security vulnerability. Please upgrade to a patched version. See https://nextjs.org/blog/security-update-2025-12-11 for more details.", + "version": "14.2.35", + "resolved": "https://registry.npmjs.org/next/-/next-14.2.35.tgz", + "integrity": "sha512-KhYd2Hjt/O1/1aZVX3dCwGXM1QmOV4eNM2UTacK5gipDdPN/oHHK/4oVGy7X8GMfPMsUTUEmGlsy0EY1YGAkig==", "license": "MIT", "dependencies": { - "@next/env": "14.2.26", + "@next/env": "14.2.35", "@swc/helpers": "0.5.5", "busboy": "1.6.0", "caniuse-lite": "^1.0.30001579", @@ -3333,15 +3454,15 @@ "node": ">=18.17.0" }, "optionalDependencies": { - "@next/swc-darwin-arm64": "14.2.26", - "@next/swc-darwin-x64": "14.2.26", - "@next/swc-linux-arm64-gnu": "14.2.26", - "@next/swc-linux-arm64-musl": "14.2.26", - "@next/swc-linux-x64-gnu": "14.2.26", - "@next/swc-linux-x64-musl": "14.2.26", - "@next/swc-win32-arm64-msvc": "14.2.26", - "@next/swc-win32-ia32-msvc": "14.2.26", - "@next/swc-win32-x64-msvc": "14.2.26" + "@next/swc-darwin-arm64": "14.2.33", + "@next/swc-darwin-x64": "14.2.33", + "@next/swc-linux-arm64-gnu": "14.2.33", + "@next/swc-linux-arm64-musl": "14.2.33", + "@next/swc-linux-x64-gnu": "14.2.33", + "@next/swc-linux-x64-musl": "14.2.33", + "@next/swc-win32-arm64-msvc": "14.2.33", + "@next/swc-win32-ia32-msvc": "14.2.33", + "@next/swc-win32-x64-msvc": "14.2.33" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", @@ -3967,6 +4088,12 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==", + "license": "MIT" + }, "node_modules/streamsearch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", diff --git a/backend/webui/package.json b/backend/webui/package.json index 0f99f9f..304deac 100644 --- a/backend/webui/package.json +++ b/backend/webui/package.json @@ -10,6 +10,10 @@ }, "dependencies": { "@2gis/mapgl": "^1.70.1", + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "@dnd-kit/utilities": "^3.2.2", + "@monaco-editor/react": "^4.7.0", "@radix-ui/react-avatar": "^1.1.11", "@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-dropdown-menu": "^2.1.16", @@ -22,7 +26,7 @@ "framer-motion": "^12.34.3", "leaflet": "^1.9.4", "lucide-react": "^0.454.0", - "next": "^14.2.26", + "next": "^14.2.35", "react": "^18.3.1", "react-dom": "^18.3.1", "react-leaflet": "^4.2.1", diff --git a/backend/webui/src/app/(main)/admin/posts/page.tsx b/backend/webui/src/app/(main)/admin/posts/page.tsx index d2928d9..fdff436 100644 --- a/backend/webui/src/app/(main)/admin/posts/page.tsx +++ b/backend/webui/src/app/(main)/admin/posts/page.tsx @@ -100,7 +100,7 @@ function PostModal({ post, onClose, onSave }: PostModalProps) { setFormData((f) => ({ ...f, topic: e.target.value }))} - placeholder="Например: Основы Python" - className="w-full px-4 py-3 text-sm bg-elevated/60 border border-border rounded-xl text-primary placeholder:text-muted focus:outline-none input-gradient transition-colors" - autoFocus - /> - - -
- -
- {difficulties.map((d) => ( - - ))} -
-
- -
- - -
- -
- -
- - -
-
- - {/* Actions */} -
- - Отмена - - -
- - - - ); -} diff --git a/backend/webui/src/app/(main)/learning/page.tsx b/backend/webui/src/app/(main)/learning/page.tsx index 70d8c0c..af78096 100644 --- a/backend/webui/src/app/(main)/learning/page.tsx +++ b/backend/webui/src/app/(main)/learning/page.tsx @@ -1,218 +1,1704 @@ 'use client'; -import { useState, useEffect, useCallback, useMemo } from 'react'; +import { + useState, + useCallback, + useEffect, + useMemo, + useRef, + type ChangeEvent, + type DragEvent, + type KeyboardEvent, +} from 'react'; import Link from 'next/link'; -import { motion } from 'framer-motion'; -import { BookOpen, Play, CheckCircle2, Clock, Plus, Trash2, Loader2, RefreshCw, GraduationCap } from 'lucide-react'; -import { fetchLessons, deleteLesson } from '@/lib/api'; -import type { Lesson } from '@/lib/types'; +import { motion, AnimatePresence } from 'framer-motion'; +import { + GraduationCap, + Search, + Loader2, + ArrowUp, + Square, + Sparkles, + User, + CheckCircle2, + Clock, + BookOpen, + Target, + BarChart3, + Upload, + Play, + FileText, + RefreshCw, + AlertCircle, + MessageCircle, + ChevronRight, + Layers, + PanelRightOpen, + PanelRightClose, + FlaskConical, + Plus, + ListChecks, + Filter, +} from 'lucide-react'; +import ReactMarkdown from 'react-markdown'; +import { + fetchLearningCourses, + fetchLearningProfile, + fetchEnrollments, + enrollInCourse, + saveLearningProfile, + submitLearningOnboarding, + uploadUserFile, + fetchUploadedFileContent, + analyzeUploadedFile, + submitResume, + createSandboxSession, +} from '@/lib/api'; +import { useLearningChat, type LearningChatMessage } from '@/lib/hooks/useLearningChat'; +import type { + LearningCourse, + LearningEnrollment, + LearningPhase, + LearningUserProfile, +} from '@/lib/types'; +import SandboxPanel from '@/components/SandboxPanel'; -function formatTime(time: string): string { - return time || '~30 мин'; +type PanelTab = 'course' | 'widgets' | 'profile' | 'practice'; +type LearningWidgetPayload = { type: string; data: Record }; + +const quickPrompts = [ + { icon: '🧠', text: 'Построй план на 2 недели', prompt: 'Построй для меня учебный план на 2 недели под стажировку' }, + { icon: '💻', text: 'Дай практическое задание', prompt: 'Дай мне практическое задание уровня junior' }, + { icon: '📚', text: 'Объясни тему просто', prompt: 'Объясни тему простыми словами и приведи пример' }, + { icon: '✅', text: 'Проверь решение', prompt: 'Проверь мой подход и дай разбор как на собеседовании' }, +]; + +const onboardingQuestions = [ + { key: 'experience_level', label: 'Ваш уровень сейчас', placeholder: 'Например: junior, middle, начинающий' }, + { key: 'current_role', label: 'Чем занимаетесь сейчас', placeholder: 'Например: студент, QA, backend intern' }, + { key: 'target_track', label: 'Целевой трек', placeholder: 'Например: backend Go, frontend React, data' }, + { key: 'weekly_hours', label: 'Сколько часов в неделю готовы уделять', placeholder: 'Например: 8-10 часов' }, + { key: 'target_result', label: 'Какой результат хотите получить', placeholder: 'Например: стажировка через 3 месяца' }, +] as const; + +const difficultyLabels: Record = { + beginner: 'Начинающий', + intermediate: 'Средний', + advanced: 'Продвинутый', + expert: 'Эксперт', +}; + +const difficultyColors: Record = { + beginner: 'bg-success/10 text-success border-success/25', + intermediate: 'bg-blue-500/10 text-blue-400 border-blue-500/25', + advanced: 'bg-orange-500/10 text-orange-400 border-orange-500/25', + expert: 'bg-purple-500/10 text-purple-400 border-purple-500/25', +}; + +const categoryLabels: Record = { + programming: 'Программирование', + devops: 'DevOps', + data: 'Данные', + ai_ml: 'AI/ML', + security: 'Безопасность', + product: 'Продукт', + design: 'Дизайн', + management: 'Менеджмент', + fintech: 'Финтех', + gamedev: 'Геймдев', + mobile: 'Мобильная разработка', + blockchain: 'Блокчейн', + iot: 'IoT', + general: 'Общее', + other: 'Другое', +}; + +const phaseLabels: Record = { + idle: '', + onboarding: 'Проводим онбординг...', + resumeParsing: 'Разбираем резюме...', + profiling: 'Уточняем профиль...', + planBuilding: 'Собираем персональный план...', + taskDesign: 'Готовим практическое задание...', + sandboxPreparing: 'Поднимаем среду практики...', + verifying: 'Проверяем решение...', +}; + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); } -function getProgressPercent(lesson: Lesson): number { - if (!lesson.progress) return 0; - const completed = lesson.progress.completedSteps?.length || 0; - return Math.round((completed / lesson.stepsCount) * 100); +function asString(value: unknown): string | null { + return typeof value === 'string' ? value : null; +} + +function asNumber(value: unknown): number | null { + return typeof value === 'number' && Number.isFinite(value) ? value : null; +} + +function asBoolean(value: unknown): boolean { + return value === true; +} + +function asStringArray(value: unknown): string[] { + if (!Array.isArray(value)) return []; + return value.filter((item): item is string => typeof item === 'string' && item.trim().length > 0); +} + +function asRecord(value: unknown): Record { + if (!value || typeof value !== 'object' || Array.isArray(value)) return {}; + return value as Record; +} + +function enrollmentProgress(enrollment: LearningEnrollment): { completed: number; total: number; percent: number } { + const completed = Array.isArray(enrollment.progress?.completed_modules) + ? enrollment.progress.completed_modules.length + : 0; + const total = Array.isArray(enrollment.plan?.modules) ? enrollment.plan.modules.length : 0; + const safeTotal = Math.max(total, 1); + const percent = Math.round((completed / safeTotal) * 100); + return { completed, total: safeTotal, percent }; +} + +function CourseCard({ + course, + isEnrolled, + isEnrolling, + onEnroll, +}: { + course: LearningCourse; + isEnrolled: boolean; + isEnrolling: boolean; + onEnroll: (courseId: string) => void; +}) { + const outcomePreview = (course.landing?.outcomes || []).slice(0, 2); + + return ( + +
+
+ +
+
+
+ + {difficultyLabels[course.difficulty] || course.difficulty} + + + {categoryLabels[course.category] || course.category} + + {course.durationHours > 0 && ( + + + {course.durationHours}ч + + )} +
+ +

{course.title}

+

{course.shortDescription}

+ + {outcomePreview.length > 0 && ( +
    + {outcomePreview.map((outcome, index) => ( +
  • + + {outcome} +
  • + ))} +
+ )} + +
+ {isEnrolled ? ( +
+
+ + Уже в ваших курсах +
+ + Подробнее + +
+ ) : ( +
+ + + Подробнее + +
+ )} +
+
+
+
+ ); +} + +function EnrollmentRow({ + enrollment, + isActive, + onSelect, +}: { + enrollment: LearningEnrollment; + isActive: boolean; + onSelect: (id: string) => void; +}) { + const progress = enrollmentProgress(enrollment); + const currentModule = Math.min(progress.completed + 1, progress.total); + + return ( + + ); +} + +function LoadingPhaseIndicator({ phase }: { phase: LearningPhase }) { + if (phase === 'idle') return null; + return ( +
+ + {phaseLabels[phase]} +
+ ); +} + +function AssistantMessage({ message }: { message: LearningChatMessage }) { + return ( +
+ {message.content && ( +
+ {message.content} +
+ )} + + {message.isStreaming && ( +
+ + {!message.content && Формирую ответ...} +
+ )} +
+ ); +} + +function LearningWidgetRenderer({ + widget, + onOpenPractice, + isCreatingPractice, + onUsePrompt, +}: { + widget: LearningWidgetPayload; + onOpenPractice: () => void; + isCreatingPractice: boolean; + onUsePrompt: (prompt: string) => void; +}) { + switch (widget.type) { + case 'learning_task': { + const title = asString(widget.data.title) || 'Практическое задание'; + const difficulty = asString(widget.data.difficulty) || ''; + const estimated = asNumber(widget.data.estimated); + const requirements = asStringArray(widget.data.requirements); + const acceptance = asStringArray(widget.data.acceptance); + const verifyCmd = asString(widget.data.verify_cmd); + + return ( +
+
+
+

Практика

+

{title}

+
+ {difficulty && ( + + {difficultyLabels[difficulty] || difficulty} + + )} + {typeof estimated === 'number' && estimated > 0 && ( + ~{estimated} мин + )} +
+
+ +
+ + {requirements.length > 0 && ( +
+

Что сделать

+
    + {requirements.map((item, idx) => ( +
  • + - + {item} +
  • + ))} +
+
+ )} + + {acceptance.length > 0 && ( +
+

Критерии

+
    + {acceptance.map((item, idx) => ( +
  • + - + {item} +
  • + ))} +
+
+ )} + +
+ {verifyCmd && ( + + )} + +
+
+ ); + } + + case 'learning_plan': { + const modulesRaw = widget.data.modules; + const modules = Array.isArray(modulesRaw) ? modulesRaw : []; + const totalHours = asNumber(widget.data.total_hours); + + return ( +
+
+

Персональный план

+ {typeof totalHours === 'number' && totalHours > 0 && ( + {totalHours} ч + )} +
+
+ {modules.slice(0, 6).map((moduleItem, index) => { + const moduleRecord = asRecord(moduleItem); + const title = asString(moduleRecord.title) || `Модуль ${index + 1}`; + const desc = asString(moduleRecord.description) || ''; + return ( +
+

{title}

+ {desc &&

{desc}

} +
+ ); + })} +
+
+ ); + } + + case 'learning_quiz': { + const title = asString(widget.data.title) || 'Квиз'; + const questions = Array.isArray(widget.data.questions) ? widget.data.questions : []; + + return ( +
+

Квиз

+

{title}

+

Вопросов: {questions.length}

+ +
+ ); + } + + case 'learning_evaluation': { + const score = asNumber(widget.data.score); + const max = asNumber(widget.data.max_score); + const passed = asBoolean(widget.data.passed); + const issues = asStringArray(widget.data.issues); + + return ( +
+
+

Проверка решения

+ + {passed ? 'PASS' : 'FAIL'} + +
+ {typeof score === 'number' && typeof max === 'number' && ( +

{score}/{max}

+ )} + {issues.length > 0 && ( +
    + {issues.map((item, idx) => ( +
  • - {item}
  • + ))} +
+ )} +
+ ); + } + + default: + return null; + } } export default function LearningPage() { - const [lessons, setLessons] = useState([]); - const [isLoading, setIsLoading] = useState(true); - const [deletingId, setDeletingId] = useState(null); + const [inputValue, setInputValue] = useState(''); + const [showSidePanel, setShowSidePanel] = useState(true); + const [panelTab, setPanelTab] = useState('course'); + + const [courses, setCourses] = useState([]); + const [enrollments, setEnrollments] = useState([]); + const [profile, setProfile] = useState(null); + const [isLoadingData, setIsLoadingData] = useState(true); + const [pageError, setPageError] = useState(null); + + const [activeEnrollmentId, setActiveEnrollmentId] = useState(null); + const [searchQuery, setSearchQuery] = useState(''); + const [enrollingId, setEnrollingId] = useState(null); + + const [showWelcomeOptions, setShowWelcomeOptions] = useState(false); + const [isSavingProfile, setIsSavingProfile] = useState(false); + const [isUploadingResume, setIsUploadingResume] = useState(false); + const [isResumeDragActive, setIsResumeDragActive] = useState(false); + const [resumeStatus, setResumeStatus] = useState(null); + const [isSubmittingOnboarding, setIsSubmittingOnboarding] = useState(false); + const [onboardingAnswers, setOnboardingAnswers] = useState>({ + experience_level: '', + current_role: '', + target_track: '', + weekly_hours: '8', + target_result: '', + }); + + const [sandboxSessionId, setSandboxSessionId] = useState(null); + const [isCreatingSandbox, setIsCreatingSandbox] = useState(false); + + const [profileForm, setProfileForm] = useState({ + name: '', + currentRole: '', + targetTrack: '', + weeklyHours: '8', + targetResult: '', + }); + + const messagesEndRef = useRef(null); + const textareaRef = useRef(null); + const resumeInputRef = useRef(null); + const autoSandboxSeedRef = useRef(null); + + const activeEnrollment = useMemo( + () => enrollments.find((enrollment) => enrollment.id === activeEnrollmentId) || null, + [enrollments, activeEnrollmentId] + ); + + const enrolledCourseIds = useMemo(() => new Set(enrollments.map((item) => item.courseId)), [enrollments]); + + const profileRecord = useMemo(() => asRecord(profile?.profile), [profile]); + const profileSkills = useMemo(() => asStringArray(profileRecord.skills), [profileRecord]); + const profileTracks = useMemo(() => asStringArray(profileRecord.recommended_tracks), [profileRecord]); + const needsOnboarding = !profile?.onboardingCompleted; + + const filteredCourses = useMemo(() => { + const query = searchQuery.trim().toLowerCase(); + if (!query) return courses; + return courses.filter((course) => + course.title.toLowerCase().includes(query) + || course.shortDescription.toLowerCase().includes(query) + || course.category.toLowerCase().includes(query) + ); + }, [courses, searchQuery]); + + const { + messages, + isLoading: isChatLoading, + loadingPhase, + sendMessage, + stopGeneration, + clearChat, + } = useLearningChat({ + enrollmentId: activeEnrollment?.id, + courseTitle: activeEnrollment?.course?.title, + planContext: activeEnrollment?.plan ? JSON.stringify(activeEnrollment.plan) : undefined, + profileContext: profile ? JSON.stringify(profile.profile) : undefined, + }); + + const hasMessages = messages.length > 0; + + const allWidgets = useMemo((): LearningWidgetPayload[] => { + const map = new Map(); + for (const message of messages) { + if (message.role !== 'assistant') continue; + const widgets = Array.isArray(message.widgets) ? message.widgets : []; + for (const widget of widgets) { + map.set(widget.type, widget as LearningWidgetPayload); + } + } + return Array.from(map.values()); + }, [messages]); + + const loadData = useCallback(async () => { + setIsLoadingData(true); + setPageError(null); - const load = useCallback(async () => { - setIsLoading(true); try { - const data = await fetchLessons(50, 0); - setLessons(data.lessons || []); - } catch (err) { - console.error('Failed to load lessons:', err); - setLessons([]); + const [coursesData, enrollmentsData, profileData] = await Promise.all([ + fetchLearningCourses({ limit: 80 }), + fetchEnrollments(), + fetchLearningProfile(), + ]); + + setCourses(coursesData.courses || []); + setEnrollments(enrollmentsData.enrollments || []); + setProfile(profileData.exists ? profileData.profile : null); + } catch (error) { + const message = error instanceof Error ? error.message : 'Не удалось загрузить учебный кабинет'; + setPageError(message); } finally { - setIsLoading(false); + setIsLoadingData(false); } }, []); useEffect(() => { - load(); - }, [load]); + void loadData(); + }, [loadData]); - const handleDelete = async (id: string, e: React.MouseEvent) => { - e.stopPropagation(); - if (deletingId) return; - - setDeletingId(id); - try { - await deleteLesson(id); - setLessons((prev) => prev.filter((l) => l.id !== id)); - } catch (err) { - console.error('Failed to delete lesson:', err); - } finally { - setDeletingId(null); + useEffect(() => { + if (enrollments.length === 0) { + if (activeEnrollmentId !== null) { + setActiveEnrollmentId(null); + } + return; } - }; - const stats = useMemo(() => { - const completed = lessons.filter((l) => getProgressPercent(l) === 100).length; - const inProgress = lessons.filter((l) => { - const pct = getProgressPercent(l); - return pct > 0 && pct < 100; - }).length; - return { completed, inProgress, total: lessons.length }; - }, [lessons]); + if (activeEnrollmentId && enrollments.some((item) => item.id === activeEnrollmentId)) { + return; + } - return ( -
-
- {/* Header */} -
-
-

Обучение

-

Интерактивные уроки с AI-наставником

-
-
- - - - Новый урок - -
+ setActiveEnrollmentId(enrollments[0]?.id || null); + }, [enrollments, activeEnrollmentId]); + + useEffect(() => { + if (!profile) return; + const profileData = asRecord(profile.profile); + const role = asString(profileData.current_role) || ''; + const track = asString(profileData.target_track) || ''; + const weeklyHours = asString(profileData.weekly_hours) || '8'; + const targetResult = asString(profileData.target_result) || ''; + + setProfileForm((prev) => ({ + name: prev.name || profile.displayName || '', + currentRole: prev.currentRole || role, + targetTrack: prev.targetTrack || track, + weeklyHours: prev.weeklyHours || weeklyHours, + targetResult: prev.targetResult || targetResult, + })); + + setOnboardingAnswers((prev) => ({ + experience_level: prev.experience_level || asString(profileData.level) || '', + current_role: prev.current_role || role, + target_track: prev.target_track || track, + weekly_hours: prev.weekly_hours || weeklyHours, + target_result: prev.target_result || targetResult, + })); + }, [profile]); + + useEffect(() => { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }, [messages]); + + useEffect(() => { + setSandboxSessionId(null); + clearChat(); + }, [activeEnrollmentId, clearChat]); + + const handleSend = useCallback(() => { + if (!inputValue.trim()) return; + sendMessage(inputValue.trim()); + setInputValue(''); + if (textareaRef.current) { + textareaRef.current.style.height = 'auto'; + } + }, [inputValue, sendMessage]); + + const handleUsePrompt = useCallback((prompt: string) => { + sendMessage(prompt); + }, [sendMessage]); + + const handleQuickPrompt = useCallback((prompt: string) => { + sendMessage(prompt); + if (panelTab !== 'widgets') setPanelTab('widgets'); + }, [panelTab, sendMessage]); + + const handleKeyDown = useCallback((event: KeyboardEvent) => { + if (event.key === 'Enter' && !event.shiftKey) { + event.preventDefault(); + handleSend(); + } + }, [handleSend]); + + const handleEnroll = useCallback(async (courseId: string) => { + setEnrollingId(courseId); + setPageError(null); + + try { + const enrollment = await enrollInCourse(courseId); + setEnrollments((prev) => [enrollment, ...prev]); + setActiveEnrollmentId(enrollment.id); + setPanelTab('course'); + } catch (error) { + const message = error instanceof Error ? error.message : 'Не удалось записаться на курс'; + setPageError(message); + } finally { + setEnrollingId(null); + } + }, []); + + const waitForExtractedText = useCallback(async (fileId: string): Promise => { + for (let i = 0; i < 12; i += 1) { + const content = await fetchUploadedFileContent(fileId); + const extractedText = content.extractedText?.trim(); + if (extractedText && extractedText.length > 60) { + return extractedText; + } + await sleep(1500); + } + return null; + }, []); + + const handleResumeUpload = useCallback(async (file: File) => { + setIsUploadingResume(true); + setResumeStatus('Загружаем резюме...'); + setPageError(null); + + try { + const uploaded = await uploadUserFile(file); + setResumeStatus('Извлекаем текст...'); + + let extractedText = await waitForExtractedText(uploaded.id); + if (!extractedText) { + await analyzeUploadedFile(uploaded.id); + extractedText = await waitForExtractedText(uploaded.id); + } + + if (!extractedText) { + throw new Error('Не удалось извлечь текст. Попробуйте PDF/DOCX с текстовым слоем.'); + } + + setResumeStatus('Создаём профиль...'); + await submitResume(uploaded.id, extractedText); + await loadData(); + setResumeStatus('Резюме обработано.'); + setPanelTab('profile'); + } catch (error) { + const message = error instanceof Error ? error.message : 'Ошибка загрузки резюме'; + setPageError(message); + setResumeStatus('Не удалось обработать резюме.'); + } finally { + setIsUploadingResume(false); + } + }, [loadData, waitForExtractedText]); + + const handleResumeDragOver = useCallback((event: DragEvent) => { + event.preventDefault(); + event.stopPropagation(); + if (!isUploadingResume) { + setIsResumeDragActive(true); + } + }, [isUploadingResume]); + + const handleResumeDragLeave = useCallback((event: DragEvent) => { + event.preventDefault(); + event.stopPropagation(); + setIsResumeDragActive(false); + }, []); + + const handleResumeDrop = useCallback((event: DragEvent) => { + event.preventDefault(); + event.stopPropagation(); + setIsResumeDragActive(false); + if (isUploadingResume) return; + const file = event.dataTransfer?.files?.[0]; + if (!file) return; + void handleResumeUpload(file); + }, [handleResumeUpload, isUploadingResume]); + + const onResumeInputChange = useCallback((event: ChangeEvent) => { + const file = event.target.files?.[0]; + event.target.value = ''; + if (!file) return; + void handleResumeUpload(file); + }, [handleResumeUpload]); + + const handleSaveProfile = useCallback(async () => { + setIsSavingProfile(true); + setPageError(null); + + try { + const currentProfile = asRecord(profile?.profile); + const mergedProfile: Record = { + ...currentProfile, + current_role: profileForm.currentRole || currentProfile.current_role || 'Абитуриент', + target_track: profileForm.targetTrack || currentProfile.target_track || '', + weekly_hours: profileForm.weeklyHours || currentProfile.weekly_hours || '8', + target_result: profileForm.targetResult || currentProfile.target_result || '', + }; + + await saveLearningProfile({ + displayName: profileForm.name || profile?.displayName || '', + onboardingCompleted: true, + profile: mergedProfile, + }); + await loadData(); + setResumeStatus('Профиль сохранён.'); + } catch (error) { + const message = error instanceof Error ? error.message : 'Не удалось сохранить профиль'; + setPageError(message); + } finally { + setIsSavingProfile(false); + } + }, [profileForm, profile, loadData]); + + const handleSubmitOnboarding = useCallback(async () => { + setIsSubmittingOnboarding(true); + setPageError(null); + + try { + const nonEmptyAnswers = Object.values(onboardingAnswers).filter((value) => value.trim().length > 0); + if (nonEmptyAnswers.length < 3) { + throw new Error('Заполните минимум 3 ответа, чтобы построить профиль.'); + } + + await submitLearningOnboarding({ + displayName: profileForm.name || profile?.displayName || '', + answers: onboardingAnswers, + }); + + setProfileForm((prev) => ({ + ...prev, + currentRole: prev.currentRole || onboardingAnswers.current_role, + targetTrack: prev.targetTrack || onboardingAnswers.target_track, + weeklyHours: prev.weeklyHours || onboardingAnswers.weekly_hours, + targetResult: prev.targetResult || onboardingAnswers.target_result, + })); + + await loadData(); + setResumeStatus('Онбординг заполнен. Профиль обновлён.'); + } catch (error) { + const message = error instanceof Error ? error.message : 'Не удалось отправить онбординг'; + setPageError(message); + } finally { + setIsSubmittingOnboarding(false); + } + }, [loadData, onboardingAnswers, profile, profileForm.name]); + + const handleOpenPractice = useCallback(async () => { + if (sandboxSessionId) { + setPanelTab('practice'); + return; + } + + setIsCreatingSandbox(true); + setPageError(null); + + try { + const session = await createSandboxSession(); + setSandboxSessionId(session.id); + setPanelTab('practice'); + } catch (error) { + const message = error instanceof Error ? error.message : 'Не удалось создать sandbox'; + setPageError(message); + } finally { + setIsCreatingSandbox(false); + } + }, [sandboxSessionId]); + + useEffect(() => { + const taskWidget = allWidgets.find((widget) => widget.type === 'learning_task'); + if (!taskWidget) return; + if (sandboxSessionId || isCreatingSandbox) return; + + const taskId = asString(taskWidget.data.task_id) || asString(taskWidget.data.taskId); + const title = asString(taskWidget.data.title) || 'task'; + const seed = `${title}:${taskId || 'none'}`; + if (autoSandboxSeedRef.current === seed) return; + autoSandboxSeedRef.current = seed; + + setIsCreatingSandbox(true); + createSandboxSession(taskId ? { taskId } : undefined) + .then((session) => { + setSandboxSessionId(session.id); + setPanelTab('practice'); + }) + .catch((error) => { + const message = error instanceof Error ? error.message : 'Не удалось подготовить sandbox'; + setPageError(message); + }) + .finally(() => { + setIsCreatingSandbox(false); + }); + }, [allWidgets, isCreatingSandbox, sandboxSessionId]); + + const currentProgress = activeEnrollment ? enrollmentProgress(activeEnrollment) : null; + + const renderCoursePanel = () => ( +
+ {activeEnrollment ? ( +
+

Текущий курс

+

{activeEnrollment.course?.title || 'Курс'}

+ {currentProgress && ( + <> +
+
+
+
+ {currentProgress.percent}% +
+

+ Модуль {Math.min(currentProgress.completed + 1, currentProgress.total)} из {currentProgress.total} +

+ + )}
- - {/* Stats */} -
-
-

{stats.completed}

-

Завершено

-
-
-

{stats.inProgress}

-

В процессе

-
-
-

{stats.total}

-

Всего

-
+ ) : ( +
+ Выберите курс, чтобы активировать персональный план.
+ )} - {/* Content */} - {isLoading ? ( -
- -

Загрузка уроков...

-
- ) : lessons.length > 0 ? ( -
- {lessons.map((lesson, i) => { - const pct = getProgressPercent(lesson); - const done = pct === 100; - const inProgress = pct > 0 && pct < 100; - - return ( - -
-
- {done ? ( - - ) : inProgress ? ( - - ) : ( - - )} -
-
-
-

- {lesson.title || lesson.topic} -

- -
-
- - {lesson.difficulty} - -
- - {formatTime(lesson.estimatedTime)} -
-
- - {lesson.stepsCount} шагов -
-
-
-
- -
- - {lesson.progress?.completedSteps?.length || 0}/{lesson.stepsCount} - -
-
-
-
- ); - })} -
+
+

Мои курсы

+ {enrollments.length > 0 ? ( + enrollments.map((enrollment) => ( + + )) ) : ( -
- -

Пока нет уроков

-

- Создайте первый урок, чтобы начать обучение -

- - - Создать первый урок - +
+ У вас пока нет курсов. Выберите курс в welcome-экране.
)}
+ + {activeEnrollment?.plan?.modules && activeEnrollment.plan.modules.length > 0 && ( +
+

План модулей

+ {activeEnrollment.plan.modules.map((moduleItem, index) => { + const done = index < (activeEnrollment.progress?.current_module || 0); + const current = index === (activeEnrollment.progress?.current_module || 0); + return ( +
+

Модуль {index + 1}: {moduleItem.title}

+ {moduleItem.practice_focus && ( +

Практика: {moduleItem.practice_focus}

+ )} +
+ ); + })} +
+ )} +
+ ); + + const renderWidgetsPanel = () => ( +
+ {allWidgets.length === 0 ? ( +
+ Виджеты появятся после запроса наставнику (план, задание, квиз, проверка). +
+ ) : ( + allWidgets.map((widget) => ( + + )) + )} +
+ ); + + const renderProfilePanel = () => ( +
+
+
+ +

Профиль абитуриента

+
+ +
+ setProfileForm((prev) => ({ ...prev, name: event.target.value }))} + placeholder={profile?.displayName || 'Имя'} + className="w-full px-3 py-2 text-xs bg-elevated/40 border border-border/50 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> + setProfileForm((prev) => ({ ...prev, currentRole: event.target.value }))} + placeholder={asString(profileRecord.current_role) || 'Текущая роль'} + className="w-full px-3 py-2 text-xs bg-elevated/40 border border-border/50 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> + setProfileForm((prev) => ({ ...prev, targetTrack: event.target.value }))} + placeholder={asString(profileRecord.target_track) || 'Целевой трек'} + className="w-full px-3 py-2 text-xs bg-elevated/40 border border-border/50 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> +
+ setProfileForm((prev) => ({ ...prev, weeklyHours: event.target.value }))} + placeholder={asString(profileRecord.weekly_hours) || 'Часы/нед'} + className="w-full px-3 py-2 text-xs bg-elevated/40 border border-border/50 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> + setProfileForm((prev) => ({ ...prev, targetResult: event.target.value }))} + placeholder={asString(profileRecord.target_result) || 'Цель'} + className="w-full px-3 py-2 text-xs bg-elevated/40 border border-border/50 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> +
+
+ + +
+ +
+
+ +

Резюме

+
+

Загрузите PDF или DOCX, чтобы система обновила профиль.

+ + + +
+ +

+ Или перетащите PDF/DOCX сюда +

+
+ + {resumeStatus && ( +

{resumeStatus}

+ )} +
+ + {(profileTracks.length > 0 || profileSkills.length > 0) && ( +
+ {profileTracks.length > 0 && ( + <> +

Рекомендованные треки

+
+ {profileTracks.map((track, index) => ( + + {track} + + ))} +
+ + )} + {profileSkills.length > 0 && ( + <> +

Ключевые навыки

+
+ {profileSkills.slice(0, 12).map((skill, index) => ( + + {skill} + + ))} +
+ + )} +
+ )} +
+ ); + + const renderPracticePanel = () => ( +
+ {!sandboxSessionId ? ( +
+
+ +
+

Среда практики не запущена

+

+ Запустите sandbox, чтобы решать задания и прогонять проверку. +

+ +
+ ) : ( + { + if (passed) { + sendMessage('Проверка в песочнице пройдена. Дай разбор результата и следующий шаг.'); + } + }} + /> + )} +
+ ); + + const renderSidePanelContent = () => { + switch (panelTab) { + case 'course': + return renderCoursePanel(); + case 'widgets': + return renderWidgetsPanel(); + case 'profile': + return renderProfilePanel(); + case 'practice': + return renderPracticePanel(); + default: + return null; + } + }; + + return ( +
+ + {pageError && ( + + + {pageError} + + )} + + + {isLoadingData ? ( +
+ +

Загрузка учебного кабинета...

+
+ ) : ( + + {!hasMessages ? ( + +
+ +
+ +
+

+ Учёба через чат-наставника +

+

+ Как в `/travel`: сначала запрос в чат, дальше план, задания и практика в одном рабочем потоке. +

+
+ + {needsOnboarding && ( + +
+ +

Быстрый онбординг (1-2 минуты)

+
+

+ Ответьте на 3-5 вопросов, и мы соберём персональный профиль обучения. +

+
+ {onboardingQuestions.map((question) => ( + { + const value = event.target.value; + setOnboardingAnswers((prev) => ({ ...prev, [question.key]: value })); + if (question.key === 'current_role') { + setProfileForm((prev) => ({ ...prev, currentRole: value })); + } + if (question.key === 'target_track') { + setProfileForm((prev) => ({ ...prev, targetTrack: value })); + } + if (question.key === 'weekly_hours') { + setProfileForm((prev) => ({ ...prev, weeklyHours: value })); + } + if (question.key === 'target_result') { + setProfileForm((prev) => ({ ...prev, targetResult: value })); + } + }} + placeholder={question.placeholder} + title={question.placeholder} + aria-label={question.label} + className="w-full px-3 py-2 text-xs bg-surface/40 border border-border/40 rounded-lg text-primary placeholder:text-muted focus:outline-none" + /> + ))} +
+
+ + Можно продолжить чат сразу после сохранения +
+
+ )} + + +
+
+