feat: default locale Russian, geo determines language for other countries

- localization-svc: defaultLocale ru, resolveLocale only by geo
- web-svc: DEFAULT_LOCALE ru, layout lang=ru, embeddedTranslations fallback ru
- countryToLocale: default ru when no country or unknown country

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
home
2026-02-23 15:10:38 +03:00
parent 8fc82a3b90
commit cd6b7857ba
606 changed files with 26148 additions and 14297 deletions

View File

@@ -0,0 +1,26 @@
{
"name": "media-svc",
"version": "1.0.0",
"private": true,
"type": "module",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"lint": "eslint src --ext .ts"
},
"dependencies": {
"@fastify/cors": "^9.0.1",
"@toolsycc/json-repair": "^0.1.22",
"fastify": "^4.28.1",
"ollama": "^0.6.3",
"openai": "^6.9.0",
"partial-json": "^0.1.7",
"zod": "^4.1.12"
},
"devDependencies": {
"@types/node": "^24.8.1",
"tsx": "^4.19.2",
"typescript": "^5.9.3"
}
}

View File

@@ -0,0 +1,85 @@
/**
* media-svc — Images and Videos search
* API: POST /api/v1/media/images, POST /api/v1/media/videos
*/
import Fastify from 'fastify';
import cors from '@fastify/cors';
import { z } from 'zod';
import { loadChatModel } from './lib/models/registry.js';
import { searchImages } from './lib/agents/image.js';
import { searchVideos } from './lib/agents/video.js';
const PORT = parseInt(process.env.PORT ?? '3016', 10);
const chatModelSchema = z.object({
providerId: z.string(),
key: z.string(),
});
const imageBodySchema = z.object({
query: z.string(),
chatHistory: z.array(z.tuple([z.string(), z.string()])),
chatModel: chatModelSchema,
});
const videoBodySchema = z.object({
query: z.string(),
chatHistory: z.array(z.tuple([z.string(), z.string()])),
chatModel: chatModelSchema,
});
const app = Fastify({ logger: true });
await app.register(cors, { origin: true });
app.get('/health', async () => ({ status: 'ok' }));
app.post('/api/v1/media/images', async (req, reply) => {
try {
const parsed = imageBodySchema.safeParse((req as { body?: unknown }).body);
if (!parsed.success) {
return reply.status(400).send({ error: 'Invalid body', details: parsed.error.flatten() });
}
const { query, chatHistory, chatModel } = parsed.data;
const llm = await loadChatModel(chatModel.providerId, chatModel.key);
const history = chatHistory.map(([role, content]) => ({
role: (role === 'human' ? 'user' : 'assistant') as 'user' | 'assistant',
content,
}));
const images = await searchImages({ chatHistory: history, query }, llm);
return reply.send({ images });
} catch (err) {
req.log.error(err);
const msg = err instanceof Error ? err.message : 'Image search failed';
return reply.status(500).send({ error: msg });
}
});
app.post('/api/v1/media/videos', async (req, reply) => {
try {
const parsed = videoBodySchema.safeParse((req as { body?: unknown }).body);
if (!parsed.success) {
return reply.status(400).send({ error: 'Invalid body', details: parsed.error.flatten() });
}
const { query, chatHistory, chatModel } = parsed.data;
const llm = await loadChatModel(chatModel.providerId, chatModel.key);
const history = chatHistory.map(([role, content]) => ({
role: (role === 'human' ? 'user' : 'assistant') as 'user' | 'assistant',
content,
}));
const videos = await searchVideos({ chatHistory: history, query }, llm);
return reply.send({ videos });
} catch (err) {
req.log.error(err);
const msg = err instanceof Error ? err.message : 'Video search failed';
return reply.status(500).send({ error: msg });
}
});
try {
await app.listen({ port: PORT, host: '0.0.0.0' });
console.log(`media-svc listening on :${PORT}`);
} catch (err) {
console.error(err);
process.exit(1);
}

View File

@@ -0,0 +1,38 @@
import z from 'zod';
import { search } from '../search.js';
import { imageSearchPrompt, imageSearchFewShots } from '../prompts/image.js';
import formatChatHistoryAsString from '../utils/formatHistory.js';
import type { BaseLLM } from '../models/base/llm.js';
import type { ChatTurnMessage } from '../types.js';
type Input = { chatHistory: ChatTurnMessage[]; query: string };
export type ImageResult = { img_src: string; url: string; title: string };
export async function searchImages(input: Input, llm: BaseLLM): Promise<ImageResult[]> {
const schema = z.object({ query: z.string().describe('The image search query.') });
const res = await llm.generateObject<{ query: string }>({
schema,
messages: [
{ role: 'system', content: imageSearchPrompt },
...imageSearchFewShots,
{
role: 'user',
content: `<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`,
},
],
});
const { results } = await search(res.query, {
engines: ['bing images', 'google images'],
});
const images: ImageResult[] = [];
for (const r of results) {
if (r.img_src && r.url && r.title) {
images.push({ img_src: r.img_src, url: r.url, title: r.title });
}
}
return images.slice(0, 10);
}

View File

@@ -0,0 +1,47 @@
import z from 'zod';
import { search } from '../search.js';
import { videoSearchPrompt, videoSearchFewShots } from '../prompts/video.js';
import formatChatHistoryAsString from '../utils/formatHistory.js';
import type { BaseLLM } from '../models/base/llm.js';
import type { ChatTurnMessage } from '../types.js';
type Input = { chatHistory: ChatTurnMessage[]; query: string };
export type VideoResult = {
img_src: string;
url: string;
title: string;
iframe_src: string;
};
export async function searchVideos(input: Input, llm: BaseLLM): Promise<VideoResult[]> {
const schema = z.object({ query: z.string().describe('The video search query.') });
const res = await llm.generateObject<{ query: string }>({
schema,
messages: [
{ role: 'system', content: videoSearchPrompt },
...videoSearchFewShots,
{
role: 'user',
content: `<conversation>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation>\n<follow_up>\n${input.query}\n</follow_up>`,
},
],
});
const { results } = await search(res.query, { engines: ['youtube'] });
const videos: VideoResult[] = [];
for (const r of results) {
const thumb = r.thumbnail ?? r.img_src;
if (thumb && r.url && r.title && r.iframe_src) {
videos.push({
img_src: thumb,
url: r.url,
title: r.title,
iframe_src: r.iframe_src,
});
}
}
return videos.slice(0, 10);
}

View File

@@ -0,0 +1,44 @@
import path from 'node:path';
import fs from 'node:fs';
export type ConfigModelProvider = {
id: string;
name: string;
type: string;
chatModels: { key: string; name: string }[];
config: Record<string, unknown>;
};
export type Config = {
modelProviders: ConfigModelProvider[];
};
const configPath = path.join(
process.env.DATA_DIR ? path.resolve(process.cwd(), process.env.DATA_DIR) : process.cwd(),
'data',
'config.json',
);
let cached: Config | null = null;
export function loadConfig(): Config {
if (cached) return cached;
if (!fs.existsSync(configPath)) {
cached = { modelProviders: [] };
return cached;
}
try {
const raw = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
cached = {
modelProviders: raw.modelProviders ?? [],
};
return cached;
} catch {
cached = { modelProviders: [] };
return cached;
}
}
export function getProviderById(id: string): ConfigModelProvider | undefined {
return loadConfig().modelProviders.find((p) => p.id === id);
}

View File

@@ -0,0 +1,11 @@
import type z from 'zod';
import type { Message } from '../../types.js';
export type GenerateObjectInput = {
schema: z.ZodTypeAny;
messages: Message[];
};
export abstract class BaseLLM {
abstract generateObject<T>(input: GenerateObjectInput): Promise<T>;
}

View File

@@ -0,0 +1,48 @@
import { Ollama } from 'ollama';
import z from 'zod';
import { repairJson } from '@toolsycc/json-repair';
import { BaseLLM } from './base/llm.js';
import type { Message } from '../types.js';
type OllamaConfig = {
baseURL: string;
model: string;
};
export class OllamaLLM extends BaseLLM {
private client: Ollama;
private model: string;
constructor(config: OllamaConfig) {
super();
this.model = config.model;
this.client = new Ollama({
host: config.baseURL || 'http://localhost:11434',
});
}
private toOllama(msg: Message): { role: 'user' | 'assistant' | 'system'; content: string } {
return {
role: msg.role === 'system' ? 'system' : msg.role === 'user' ? 'user' : 'assistant',
content: msg.content,
};
}
async generateObject<T>(input: {
schema: z.ZodTypeAny;
messages: Message[];
}): Promise<T> {
const messages = input.messages.map((m) => this.toOllama(m));
const res = await this.client.chat({
model: this.model,
messages,
format: z.toJSONSchema(input.schema),
});
const content = res.message?.content;
if (!content) throw new Error('No response from Ollama');
const parsed = JSON.parse(
repairJson(content, { extractJson: true }) as string,
);
return input.schema.parse(parsed) as T;
}
}

View File

@@ -0,0 +1,49 @@
import OpenAI from 'openai';
import { zodResponseFormat } from 'openai/helpers/zod';
import { repairJson } from '@toolsycc/json-repair';
import { BaseLLM } from './base/llm.js';
import type { Message } from '../types.js';
type OpenAIConfig = {
apiKey: string;
model: string;
baseURL?: string;
};
export class OpenAILLM extends BaseLLM {
private client: OpenAI;
private model: string;
constructor(config: OpenAIConfig) {
super();
this.model = config.model;
this.client = new OpenAI({
apiKey: config.apiKey,
baseURL: config.baseURL ?? 'https://api.openai.com/v1',
});
}
private toOpenAI(msg: Message): OpenAI.Chat.Completions.ChatCompletionMessageParam {
if (msg.role === 'system') return { role: 'system', content: msg.content };
if (msg.role === 'assistant') return { role: 'assistant', content: msg.content };
return { role: 'user', content: msg.content };
}
async generateObject<T>(input: {
schema: Parameters<typeof zodResponseFormat>[0];
messages: Message[];
}): Promise<T> {
const messages = input.messages.map((m) => this.toOpenAI(m));
const res = await this.client.chat.completions.parse({
model: this.model,
messages,
response_format: zodResponseFormat(input.schema, 'object'),
});
const content = res.choices?.[0]?.message?.content;
if (!content) throw new Error('No response from OpenAI');
const parsed = JSON.parse(
repairJson(content, { extractJson: true }) as string,
);
return input.schema.parse(parsed) as T;
}
}

View File

@@ -0,0 +1,65 @@
import { getProviderById } from '../config.js';
import { OpenAILLM } from './openai.js';
import { OllamaLLM } from './ollama.js';
import type { BaseLLM } from './base/llm.js';
function getEnvProvider(): { type: string; config: Record<string, unknown> } | null {
const p = (process.env.LLM_PROVIDER ?? '').toLowerCase();
if (p === 'ollama') {
const baseURL =
process.env.OLLAMA_BASE_URL ??
(process.env.DOCKER ? 'http://host.docker.internal:11434' : 'http://localhost:11434');
return { type: 'ollama', config: { baseURL } };
}
if (p === 'openai' || process.env.OPENAI_API_KEY) {
return {
type: 'openai',
config: {
apiKey: process.env.OPENAI_API_KEY ?? '',
baseURL: process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1',
},
};
}
return null;
}
export async function loadChatModel(providerId: string, modelKey: string): Promise<BaseLLM> {
let provider = getProviderById(providerId);
if (!provider) {
const env = getEnvProvider();
if (env) {
provider = {
id: 'env',
name: env.type,
type: env.type,
chatModels: [{ key: modelKey, name: modelKey }],
config: env.config,
};
}
}
if (!provider) throw new Error(`Provider not found: ${providerId}`);
const hasModel = provider.chatModels?.some((m) => m.key === modelKey);
if (!hasModel && provider.id !== 'env') {
throw new Error(`Model ${modelKey} not found in provider ${providerId}`);
}
const cfg = provider.config as Record<string, unknown>;
if (provider.type === 'openai') {
const apiKey = (cfg.apiKey as string) || process.env.OPENAI_API_KEY;
const baseURL = (cfg.baseURL as string) || 'https://api.openai.com/v1';
if (!apiKey) throw new Error('OpenAI API key not configured');
return new OpenAILLM({ apiKey, model: modelKey, baseURL });
}
if (provider.type === 'ollama') {
const baseURL =
(cfg.baseURL as string) ||
process.env.OLLAMA_BASE_URL ||
'http://localhost:11434';
return new OllamaLLM({ baseURL, model: modelKey });
}
throw new Error(`Unsupported provider type: ${provider.type}`);
}

View File

@@ -0,0 +1,29 @@
import type { ChatTurnMessage } from '../types.js';
export const imageSearchPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search the web for images.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Make sure to make the querey standalone and not something very broad, use context from the answers in the conversation to make it specific so user can get best image search results.
Output only the rephrased query in query key JSON format. Do not include any explanation or additional text.
`;
export const imageSearchFewShots: ChatTurnMessage[] = [
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nWhat is a cat?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"A cat"}' },
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nWhat is a car? How does it work?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"Car working"}' },
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"AC working"}' },
];

View File

@@ -0,0 +1,29 @@
import type { ChatTurnMessage } from '../types.js';
export const videoSearchPrompt = `
You will be given a conversation below and a follow up question. You need to rephrase the follow-up question so it is a standalone question that can be used by the LLM to search Youtube for videos.
You need to make sure the rephrased question agrees with the conversation and is relevant to the conversation.
Make sure to make the querey standalone and not something very broad, use context from the answers in the conversation to make it specific so user can get best video search results.
Output only the rephrased query in query key JSON format. Do not include any explanation or additional text.
`;
export const videoSearchFewShots: ChatTurnMessage[] = [
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nHow does a car work?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"How does a car work?"}' },
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nWhat is the theory of relativity?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"Theory of relativity"}' },
{
role: 'user',
content:
'<conversation>\n</conversation>\n<follow_up>\nHow does an AC work?\n</follow_up>',
},
{ role: 'assistant', content: '{"query":"AC working"}' },
];

View File

@@ -0,0 +1,58 @@
const SEARCH_SVC_URL = process.env.SEARCH_SVC_URL?.trim() ?? '';
const SEARXNG_URL = process.env.SEARXNG_URL?.trim() ?? '';
const FALLBACK_URLS = (
process.env.SEARXNG_FALLBACK_URL?.split(',').map((u) => u.trim()) ?? [
'https://searx.tiekoetter.com',
'https://search.sapti.me',
]
).filter(Boolean);
export type SearchResult = {
title: string;
url: string;
img_src?: string;
thumbnail?: string;
iframe_src?: string;
};
export async function search(
query: string,
opts?: { engines?: string[] },
): Promise<{ results: SearchResult[] }> {
const engines = opts?.engines?.join(',') ?? '';
if (SEARCH_SVC_URL) {
const params = new URLSearchParams();
params.set('q', query);
if (engines) params.set('engines', engines);
const url = `${SEARCH_SVC_URL.replace(/\/$/, '')}/api/v1/search?${params}`;
const res = await fetch(url, { signal: AbortSignal.timeout(15000) });
if (!res.ok) {
const err = (await res.json().catch(() => ({}))) as { error?: string };
throw new Error(err.error ?? `Search HTTP ${res.status}`);
}
return res.json();
}
const candidates = [SEARXNG_URL, ...FALLBACK_URLS].filter(Boolean).map((u) =>
u!.startsWith('http') ? u! : `http://${u}`,
);
if (candidates.length === 0) throw new Error('SearXNG/SEARCH_SVC_URL not configured');
let lastErr: Error | null = null;
for (const base of candidates) {
try {
const params = new URLSearchParams();
params.set('format', 'json');
params.set('q', query);
if (engines) params.set('engines', engines);
const url = `${base}/search?${params}`;
const res = await fetch(url, { signal: AbortSignal.timeout(15000) });
const data = (await res.json()) as { results?: SearchResult[] };
return { results: data.results ?? [] };
} catch (e) {
lastErr = e instanceof Error ? e : new Error(String(e));
}
}
throw lastErr ?? new Error('Search failed');
}

View File

@@ -0,0 +1,6 @@
export type UserMessage = { role: 'user'; content: string };
export type AssistantMessage = { role: 'assistant'; content: string };
export type SystemMessage = { role: 'system'; content: string };
export type ChatTurnMessage = UserMessage | AssistantMessage;
export type Message = UserMessage | AssistantMessage | SystemMessage;

View File

@@ -0,0 +1,7 @@
import type { ChatTurnMessage } from '../types.js';
export default function formatChatHistoryAsString(history: ChatTurnMessage[]): string {
return history
.map((m) => `${m.role === 'assistant' ? 'AI' : 'User'}: ${m.content}`)
.join('\n');
}

View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"declaration": true,
"declarationMap": true
},
"include": ["src/**/*"]
}