package gemini import ( "bytes" "context" "encoding/json" "fmt" "io" "net/http" "time" ) const ( // Groq — OpenAI-compatible API, free tier, no billing required. groqAPIURL = "https://api.groq.com/openai/v1/chat/completions" groqModel = "llama-3.3-70b-versatile" maxRetries = 3 ) // Client is an HTTP client for the Groq LLM API (OpenAI-compatible). type Client struct { apiKey string httpClient *http.Client } // NewClient creates a new Client. func NewClient(apiKey string) *Client { return &Client{ apiKey: apiKey, httpClient: &http.Client{ Timeout: 60 * time.Second, }, } } // generateContent sends a user prompt to Groq and returns the assistant text. func (c *Client) generateContent(ctx context.Context, messages []map[string]string) (string, error) { body := map[string]any{ "model": groqModel, "temperature": 0.7, "messages": messages, } bodyBytes, err := json.Marshal(body) if err != nil { return "", fmt.Errorf("marshal request: %w", err) } req, err := http.NewRequestWithContext(ctx, http.MethodPost, groqAPIURL, bytes.NewReader(bodyBytes)) if err != nil { return "", fmt.Errorf("create request: %w", err) } req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", "Bearer "+c.apiKey) resp, err := c.httpClient.Do(req) if err != nil { return "", fmt.Errorf("send request: %w", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { raw, _ := io.ReadAll(resp.Body) return "", fmt.Errorf("groq API error %d: %s", resp.StatusCode, string(raw)) } var result struct { Choices []struct { Message struct { Content string `json:"content"` } `json:"message"` } `json:"choices"` } if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { return "", fmt.Errorf("decode response: %w", err) } if len(result.Choices) == 0 { return "", fmt.Errorf("empty response from Groq") } return result.Choices[0].Message.Content, nil }