package llm import ( "context" "fmt" "io" ) type Role string const ( RoleSystem Role = "system" RoleUser Role = "user" RoleAssistant Role = "assistant" RoleTool Role = "tool" ) type Message struct { Role Role `json:"role"` Content string `json:"content"` ToolCalls []ToolCall `json:"tool_calls,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"` Name string `json:"name,omitempty"` Images []ImageContent `json:"images,omitempty"` } type ImageContent struct { Type string `json:"type"` URL string `json:"url,omitempty"` Data string `json:"data,omitempty"` IsBase64 bool `json:"isBase64,omitempty"` } type ToolCall struct { ID string `json:"id"` Name string `json:"name"` Arguments map[string]interface{} `json:"arguments"` } type Tool struct { Name string `json:"name"` Description string `json:"description"` Schema interface{} `json:"schema"` } type StreamOptions struct { MaxTokens int `json:"max_tokens,omitempty"` Temperature float64 `json:"temperature,omitempty"` TopP float64 `json:"top_p,omitempty"` StopWords []string `json:"stop,omitempty"` } type StreamChunk struct { ContentChunk string `json:"content_chunk,omitempty"` ToolCallChunk []ToolCall `json:"tool_call_chunk,omitempty"` FinishReason string `json:"finish_reason,omitempty"` } type StreamRequest struct { Messages []Message `json:"messages"` Tools []Tool `json:"tools,omitempty"` Options StreamOptions `json:"options,omitempty"` } type Client interface { StreamText(ctx context.Context, req StreamRequest) (<-chan StreamChunk, error) GenerateText(ctx context.Context, req StreamRequest) (string, error) GetProviderID() string GetModelKey() string } type ProviderConfig struct { ProviderID string `json:"providerId"` ModelKey string `json:"key"` APIKey string `json:"apiKey,omitempty"` BaseURL string `json:"baseUrl,omitempty"` AgentAccessID string `json:"agentAccessId,omitempty"` } func NewClient(cfg ProviderConfig) (Client, error) { switch cfg.ProviderID { case "timeweb": return NewTimewebClient(TimewebConfig{ BaseURL: cfg.BaseURL, AgentAccessID: cfg.AgentAccessID, APIKey: cfg.APIKey, ModelKey: cfg.ModelKey, ProxySource: "gooseek", }) case "openai": return NewOpenAIClient(cfg) case "anthropic": return NewAnthropicClient(cfg) case "gemini", "google": return NewGeminiClient(cfg) default: return nil, fmt.Errorf("unknown provider: %s", cfg.ProviderID) } } type baseClient struct { providerID string modelKey string } func (c *baseClient) GetProviderID() string { return c.providerID } func (c *baseClient) GetModelKey() string { return c.modelKey } func readAllChunks(ch <-chan StreamChunk) string { var result string for chunk := range ch { result += chunk.ContentChunk } return result } type streamReader struct { ch <-chan StreamChunk buffer []byte } func (r *streamReader) Read(p []byte) (n int, err error) { if len(r.buffer) > 0 { n = copy(p, r.buffer) r.buffer = r.buffer[n:] return n, nil } chunk, ok := <-r.ch if !ok { return 0, io.EOF } data := []byte(chunk.ContentChunk) n = copy(p, data) if n < len(data) { r.buffer = data[n:] } return n, nil }