package travel import ( "context" "github.com/gooseek/backend/internal/llm" ) type LLMClientAdapter struct { client llm.Client } func NewLLMClientAdapter(client llm.Client) *LLMClientAdapter { return &LLMClientAdapter{client: client} } func (a *LLMClientAdapter) StreamChat(ctx context.Context, messages []ChatMessage, onChunk func(string)) error { llmMessages := make([]llm.Message, len(messages)) for i, m := range messages { var role llm.Role switch m.Role { case "system": role = llm.RoleSystem case "user": role = llm.RoleUser case "assistant": role = llm.RoleAssistant default: role = llm.RoleUser } llmMessages[i] = llm.Message{ Role: role, Content: m.Content, } } req := llm.StreamRequest{ Messages: llmMessages, Options: llm.StreamOptions{ MaxTokens: 4096, Temperature: 0.7, }, } ch, err := a.client.StreamText(ctx, req) if err != nil { return err } for chunk := range ch { if chunk.ContentChunk != "" { onChunk(chunk.ContentChunk) } } return nil }