- deploy/k3s удалён, deploy/docker добавлен (Caddyfile, docker-compose, searxng) - chat-svc: agents/models/prompts удалены, использует llm-svc (LLMClient, EmbeddingClient) - master-agents-svc: SearchOrchestrator, classifier, researcher, actions, widgets - web-svc: ChatModelSelector, Optimization, Sources удалены; InputBarPlus; UnregisterSW - geo-device-svc, localization-svc: Dockerfiles - docs: 02-k3s-services-spec.md, RUNBOOK/TELEMETRY/WORKING удалены Co-authored-by: Cursor <cursoragent@cursor.com>
40 lines
1.2 KiB
TypeScript
40 lines
1.2 KiB
TypeScript
import z from 'zod';
|
|
import type { LlmClient } from '../llm-client.js';
|
|
import { getClassifierPrompt } from '../prompts/classifier.js';
|
|
import formatChatHistoryAsString from '../utils/formatHistory.js';
|
|
|
|
const schema = z.object({
|
|
classification: z.object({
|
|
skipSearch: z.boolean(),
|
|
personalSearch: z.boolean(),
|
|
academicSearch: z.boolean(),
|
|
discussionSearch: z.boolean(),
|
|
showWeatherWidget: z.boolean(),
|
|
showStockWidget: z.boolean(),
|
|
showCalculationWidget: z.boolean(),
|
|
}),
|
|
standaloneFollowUp: z.string(),
|
|
});
|
|
|
|
export type ClassifierInput = {
|
|
chatHistory: { role: string; content: string }[];
|
|
query: string;
|
|
llm: LlmClient;
|
|
locale?: string;
|
|
enabledSources: ('web' | 'discussions' | 'academic')[];
|
|
};
|
|
|
|
export async function classify(input: ClassifierInput): Promise<z.infer<typeof schema>> {
|
|
const output = await input.llm.generateObject<z.infer<typeof schema>>({
|
|
messages: [
|
|
{ role: 'system', content: getClassifierPrompt(input.locale) },
|
|
{
|
|
role: 'user',
|
|
content: `<conversation_history>\n${formatChatHistoryAsString(input.chatHistory)}\n</conversation_history>\n<user_query>\n${input.query}\n</user_query>`,
|
|
},
|
|
],
|
|
schema,
|
|
});
|
|
return output;
|
|
}
|