feat: switch AI provider from Groq to OpenAI

Replace Groq/Llama with OpenAI API:
- Text model: gpt-4o-mini
- Vision model: gpt-4o
- Rename GEMINI_API_KEY → OPENAI_API_KEY env var
- Rename callGroq → callOpenAI, update all related constants and comments

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
dbastrikin
2026-02-22 13:57:55 +02:00
parent 842bca9641
commit 1973f45b0a
6 changed files with 20 additions and 20 deletions

View File

@@ -14,5 +14,5 @@ PORT=8080
ALLOWED_ORIGINS=http://localhost:3000 ALLOWED_ORIGINS=http://localhost:3000
# External APIs # External APIs
GEMINI_API_KEY=your-gemini-key OPENAI_API_KEY=your-openai-key
PEXELS_API_KEY=your-pexels-key PEXELS_API_KEY=your-pexels-key

View File

@@ -93,7 +93,7 @@ func run() error {
authMW := middleware.Auth(&jwtAdapter{jm: jwtManager}) authMW := middleware.Auth(&jwtAdapter{jm: jwtManager})
// External API clients // External API clients
geminiClient := gemini.NewClient(cfg.GeminiAPIKey) geminiClient := gemini.NewClient(cfg.OpenAIAPIKey)
pexelsClient := pexels.NewClient(cfg.PexelsAPIKey) pexelsClient := pexels.NewClient(cfg.PexelsAPIKey)
// Ingredient domain // Ingredient domain

View File

@@ -22,7 +22,7 @@ type Config struct {
AllowedOrigins []string `envconfig:"ALLOWED_ORIGINS" default:"http://localhost:3000"` AllowedOrigins []string `envconfig:"ALLOWED_ORIGINS" default:"http://localhost:3000"`
// External APIs // External APIs
GeminiAPIKey string `envconfig:"GEMINI_API_KEY" required:"true"` OpenAIAPIKey string `envconfig:"OPENAI_API_KEY" required:"true"`
PexelsAPIKey string `envconfig:"PEXELS_API_KEY" required:"true"` PexelsAPIKey string `envconfig:"PEXELS_API_KEY" required:"true"`
} }

View File

@@ -11,19 +11,19 @@ import (
) )
const ( const (
// groqAPIURL is the Groq OpenAI-compatible endpoint (free tier, no billing required). // openaiAPIURL is the OpenAI chat completions endpoint.
groqAPIURL = "https://api.groq.com/openai/v1/chat/completions" openaiAPIURL = "https://api.openai.com/v1/chat/completions"
// groqModel is the default text generation model. // openaiModel is the default text generation model.
groqModel = "llama-3.3-70b-versatile" openaiModel = "gpt-4o-mini"
// groqVisionModel supports image inputs in OpenAI vision format. // openaiVisionModel supports image inputs.
groqVisionModel = "meta-llama/llama-4-scout-17b-16e-instruct" openaiVisionModel = "gpt-4o"
maxRetries = 3 maxRetries = 3
) )
// Client is an HTTP client for the Groq LLM API (OpenAI-compatible). // Client is an HTTP client for the OpenAI API.
type Client struct { type Client struct {
apiKey string apiKey string
httpClient *http.Client httpClient *http.Client
@@ -39,9 +39,9 @@ func NewClient(apiKey string) *Client {
} }
} }
// generateContent sends text messages to the text-only model. // generateContent sends text messages to the text model.
func (c *Client) generateContent(ctx context.Context, messages []map[string]string) (string, error) { func (c *Client) generateContent(ctx context.Context, messages []map[string]string) (string, error) {
return c.callGroq(ctx, groqModel, 0.7, messages) return c.callOpenAI(ctx, openaiModel, 0.7, messages)
} }
// generateVisionContent sends an image + text prompt to the vision model. // generateVisionContent sends an image + text prompt to the vision model.
@@ -68,12 +68,12 @@ func (c *Client) generateVisionContent(ctx context.Context, prompt, imageBase64,
}, },
}, },
} }
return c.callGroq(ctx, groqVisionModel, 0.1, messages) return c.callOpenAI(ctx, openaiVisionModel, 0.1, messages)
} }
// callGroq is the shared HTTP transport for all Groq requests. // callOpenAI is the shared HTTP transport for all OpenAI requests.
// messages can be []map[string]string (text) or []any (vision with image content). // messages can be []map[string]string (text) or []any (vision with image content).
func (c *Client) callGroq(ctx context.Context, model string, temperature float64, messages any) (string, error) { func (c *Client) callOpenAI(ctx context.Context, model string, temperature float64, messages any) (string, error) {
body := map[string]any{ body := map[string]any{
"model": model, "model": model,
"temperature": temperature, "temperature": temperature,
@@ -85,7 +85,7 @@ func (c *Client) callGroq(ctx context.Context, model string, temperature float64
return "", fmt.Errorf("marshal request: %w", err) return "", fmt.Errorf("marshal request: %w", err)
} }
req, err := http.NewRequestWithContext(ctx, http.MethodPost, groqAPIURL, bytes.NewReader(bodyBytes)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, openaiAPIURL, bytes.NewReader(bodyBytes))
if err != nil { if err != nil {
return "", fmt.Errorf("create request: %w", err) return "", fmt.Errorf("create request: %w", err)
} }
@@ -100,7 +100,7 @@ func (c *Client) callGroq(ctx context.Context, model string, temperature float64
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
raw, _ := io.ReadAll(resp.Body) raw, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("groq API error %d: %s", resp.StatusCode, string(raw)) return "", fmt.Errorf("openai API error %d: %s", resp.StatusCode, string(raw))
} }
var result struct { var result struct {
@@ -114,7 +114,7 @@ func (c *Client) callGroq(ctx context.Context, model string, temperature float64
return "", fmt.Errorf("decode response: %w", err) return "", fmt.Errorf("decode response: %w", err)
} }
if len(result.Choices) == 0 { if len(result.Choices) == 0 {
return "", fmt.Errorf("empty response from Groq") return "", fmt.Errorf("empty response from OpenAI")
} }
return result.Choices[0].Message.Content, nil return result.Choices[0].Message.Content, nil
} }

View File

@@ -68,7 +68,7 @@ type NutritionInfo struct {
func (c *Client) GenerateRecipes(ctx context.Context, req RecipeRequest) ([]Recipe, error) { func (c *Client) GenerateRecipes(ctx context.Context, req RecipeRequest) ([]Recipe, error) {
prompt := buildRecipePrompt(req) prompt := buildRecipePrompt(req)
// OpenAI-compatible messages format used by Groq. // OpenAI messages format.
messages := []map[string]string{ messages := []map[string]string{
{"role": "user", "content": prompt}, {"role": "user", "content": prompt},
} }

View File

@@ -135,7 +135,7 @@ func (h *Handler) GenerateMenu(w http.ResponseWriter, r *http.Request) {
menuReq.AvailableProducts = products menuReq.AvailableProducts = products
} }
// Generate 7-day plan via Groq. // Generate 7-day plan via OpenAI.
days, err := h.gemini.GenerateMenu(r.Context(), menuReq) days, err := h.gemini.GenerateMenu(r.Context(), menuReq)
if err != nil { if err != nil {
slog.Error("generate menu", "user_id", userID, "err", err) slog.Error("generate menu", "user_id", userID, "err", err)