feat: implement Iteration 1 — AI recipe recommendations

Backend:
- Add Groq LLM client (llama-3.3-70b) for recipe generation with JSON
  retry strategy (retries only on parse errors, not API errors)
- Add Pexels client for parallel photo search per recipe
- Add saved_recipes table (migration 004) with JSONB fields
- Add GET /recommendations endpoint (profile-aware prompt building)
- Add POST/GET/GET{id}/DELETE /saved-recipes CRUD endpoints
- Wire gemini, pexels, recommendation, savedrecipe packages in main.go

Flutter:
- Add Recipe, SavedRecipe models with json_serializable
- Add RecipeService (getRecommendations, getSavedRecipes, save, delete)
- Add RecommendationsNotifier and SavedRecipesNotifier (Riverpod)
- Add RecommendationsScreen with skeleton loading and refresh FAB
- Add RecipeDetailScreen (SliverAppBar, nutrition tooltip, steps with timer)
- Add SavedRecipesScreen with Dismissible swipe-to-delete and empty state
- Update RecipesScreen to TabBar (Recommendations / Saved)
- Add /recipe-detail route outside ShellRoute (no bottom nav)
- Extend ApiClient with getList() and deleteVoid()

Project:
- Add CLAUDE.md with English-only rule for comments and commit messages

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
dbastrikin
2026-02-21 22:43:29 +02:00
parent 24219b611e
commit e57ff8e06c
41 changed files with 5994 additions and 353 deletions

View File

@@ -0,0 +1,81 @@
package gemini
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
)
const (
// Groq — OpenAI-compatible API, free tier, no billing required.
groqAPIURL = "https://api.groq.com/openai/v1/chat/completions"
groqModel = "llama-3.3-70b-versatile"
maxRetries = 3
)
// Client is an HTTP client for the Groq LLM API (OpenAI-compatible).
type Client struct {
apiKey string
httpClient *http.Client
}
// NewClient creates a new Client.
func NewClient(apiKey string) *Client {
return &Client{
apiKey: apiKey,
httpClient: &http.Client{
Timeout: 60 * time.Second,
},
}
}
// generateContent sends a user prompt to Groq and returns the assistant text.
func (c *Client) generateContent(ctx context.Context, messages []map[string]string) (string, error) {
body := map[string]any{
"model": groqModel,
"temperature": 0.7,
"messages": messages,
}
bodyBytes, err := json.Marshal(body)
if err != nil {
return "", fmt.Errorf("marshal request: %w", err)
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, groqAPIURL, bytes.NewReader(bodyBytes))
if err != nil {
return "", fmt.Errorf("create request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+c.apiKey)
resp, err := c.httpClient.Do(req)
if err != nil {
return "", fmt.Errorf("send request: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
raw, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("groq API error %d: %s", resp.StatusCode, string(raw))
}
var result struct {
Choices []struct {
Message struct {
Content string `json:"content"`
} `json:"message"`
} `json:"choices"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", fmt.Errorf("decode response: %w", err)
}
if len(result.Choices) == 0 {
return "", fmt.Errorf("empty response from Groq")
}
return result.Choices[0].Message.Content, nil
}