Files
gooseek/services/master-agents-svc/src/lib/agent/classifier.ts
home 328d968f3f Deploy: migrate k3s → Docker; search logic → master-agents-svc
- deploy/k3s удалён, deploy/docker добавлен (Caddyfile, docker-compose, searxng)
- chat-svc: agents/models/prompts удалены, использует llm-svc (LLMClient, EmbeddingClient)
- master-agents-svc: SearchOrchestrator, classifier, researcher, actions, widgets
- web-svc: ChatModelSelector, Optimization, Sources удалены; InputBarPlus; UnregisterSW
- geo-device-svc, localization-svc: Dockerfiles
- docs: 02-k3s-services-spec.md, RUNBOOK/TELEMETRY/WORKING удалены

Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-23 22:14:00 +03:00

40 lines
1.2 KiB
TypeScript

import z from 'zod';
import type { LlmClient } from '../llm-client.js';
import { getClassifierPrompt } from '../prompts/classifier.js';
import formatChatHistoryAsString from '../utils/formatHistory.js';
const schema = z.object({
classification: z.object({
skipSearch: z.boolean(),
personalSearch: z.boolean(),
academicSearch: z.boolean(),
discussionSearch: z.boolean(),
showWeatherWidget: z.boolean(),
showStockWidget: z.boolean(),
showCalculationWidget: z.boolean(),
}),
standaloneFollowUp: z.string(),
});
export type ClassifierInput = {
chatHistory: { role: string; content: string }[];
query: string;
llm: LlmClient;
locale?: string;
enabledSources: ('web' | 'discussions' | 'academic')[];
};
export async function classify(input: ClassifierInput): Promise<z.infer<typeof schema>> {
const output = await input.llm.generateObject<z.infer<typeof schema>>({
messages: [
{ role: 'system', content: getClassifierPrompt(input.locale) },
{
role: 'user',
content: `<conversation_history>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation_history>\n<user_query>\n${input.query}\n</user_query>`,
},
],
schema,
});
return output;
}