Files
food-ai/backend/internal/domain/recognition/handler.go
dbastrikin c7317c4335 feat: async product/receipt recognition via Kafka
Backend:
- Migration 002: product_recognition_jobs table with JSONB images column
  and job_type CHECK ('receipt' | 'products')
- New Kafka topics: ai.products.paid / ai.products.free
- ProductJob model, ProductJobRepository (mirrors dish job pattern)
- itemEnricher extracted from Handler — shared by HTTP handler and worker
- ProductSSEBroker: PG LISTEN on product_job_update channel
- ProductWorkerPool: 5 workers, branches on job_type to call
  RecognizeReceipt or RecognizeProducts per image in parallel
- Handler: RecognizeReceipt and RecognizeProducts now return 202 Accepted
  instead of blocking; 4 new endpoints: GET /ai/product-jobs,
  /product-jobs/history, /product-jobs/{id}, /product-jobs/{id}/stream
- cmd/worker: extended to run ProductWorkerPool alongside dish WorkerPool
- cmd/server: wires productJobRepository + productSSEBroker; both SSE
  brokers started in App.Start()

Flutter client:
- ProductJobCreated, ProductJobResult, ProductJobSummary, ProductJobEvent
  models + submitReceiptRecognition/submitProductsRecognition/stream methods
- Shared _openSseStream helper eliminates duplicate SSE parsing loop
- ScanScreen: replace blocking AI calls with async submit + navigate to
  ProductJobWatchScreen
- ProductJobWatchScreen: watches SSE stream, navigates to /scan/confirm
  when done, shows error on failure
- ProductsScreen: prepends _RecentScansSection (hidden when empty); compact
  horizontal list of recent scans with "See all" → history
- ProductJobHistoryScreen: full list of all product recognition jobs
- New routes: /scan/product-job-watch, /products/job-history
- L10n: 7 new keys in all 12 ARB files

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-23 23:01:30 +02:00

440 lines
16 KiB
Go

package recognition
import (
"context"
"encoding/json"
"log/slog"
"net/http"
"strings"
"github.com/go-chi/chi/v5"
"github.com/food-ai/backend/internal/adapters/ai"
"github.com/food-ai/backend/internal/domain/dish"
"github.com/food-ai/backend/internal/domain/product"
"github.com/food-ai/backend/internal/infra/locale"
"github.com/food-ai/backend/internal/infra/middleware"
)
// DishRepository is the subset of dish.Repository used by workers and the handler.
type DishRepository interface {
FindOrCreate(ctx context.Context, name string) (string, bool, error)
FindOrCreateRecipe(ctx context.Context, dishID string, calories, proteinG, fatG, carbsG float64) (string, bool, error)
UpsertTranslation(ctx context.Context, dishID, lang, name string) error
GetTranslation(ctx context.Context, dishID, lang string) (string, bool, error)
AddRecipe(ctx context.Context, dishID string, req dish.CreateRequest) (string, error)
}
// ProductRepository is the subset of product.Repository used by this handler.
type ProductRepository interface {
FuzzyMatch(ctx context.Context, name string) (*product.Product, error)
Upsert(ctx context.Context, catalogProduct *product.Product) (*product.Product, error)
}
// Recognizer is the AI provider interface for image-based food recognition.
type Recognizer interface {
RecognizeReceipt(ctx context.Context, imageBase64, mimeType, lang string) (*ai.ReceiptResult, error)
RecognizeProducts(ctx context.Context, imageBase64, mimeType, lang string) ([]ai.RecognizedItem, error)
RecognizeDish(ctx context.Context, imageBase64, mimeType, lang string) (*ai.DishResult, error)
ClassifyIngredient(ctx context.Context, name string) (*ai.IngredientClassification, error)
GenerateRecipeForDish(ctx context.Context, dishName string) (*ai.Recipe, error)
TranslateDishName(ctx context.Context, name string) (map[string]string, error)
}
// KafkaPublisher publishes job IDs to a Kafka topic.
type KafkaPublisher interface {
Publish(ctx context.Context, topic, message string) error
}
// Handler handles POST /ai/* recognition endpoints.
type Handler struct {
enricher *itemEnricher
recognizer Recognizer
jobRepo JobRepository
productJobRepo ProductJobRepository
kafkaProducer KafkaPublisher
sseBroker *SSEBroker
productSSEBroker *ProductSSEBroker
}
// NewHandler creates a new Handler with async dish and product recognition support.
func NewHandler(
recognizer Recognizer,
productRepo ProductRepository,
jobRepo JobRepository,
productJobRepo ProductJobRepository,
kafkaProducer KafkaPublisher,
sseBroker *SSEBroker,
productSSEBroker *ProductSSEBroker,
) *Handler {
return &Handler{
enricher: newItemEnricher(recognizer, productRepo),
recognizer: recognizer,
jobRepo: jobRepo,
productJobRepo: productJobRepo,
kafkaProducer: kafkaProducer,
sseBroker: sseBroker,
productSSEBroker: productSSEBroker,
}
}
// ---------------------------------------------------------------------------
// Request / Response types
// ---------------------------------------------------------------------------
// imageRequest is the common request body containing a single base64-encoded image.
type imageRequest struct {
ImageBase64 string `json:"image_base64"`
MimeType string `json:"mime_type"`
}
// recognizeDishRequest is the body for POST /ai/recognize-dish.
type recognizeDishRequest struct {
ImageBase64 string `json:"image_base64"`
MimeType string `json:"mime_type"`
TargetDate *string `json:"target_date"`
TargetMealType *string `json:"target_meal_type"`
}
// imagesRequest is the request body for multi-image endpoints.
type imagesRequest struct {
Images []imageRequest `json:"images"`
}
// EnrichedItem is a recognized food item enriched with ingredient_mappings data.
type EnrichedItem struct {
Name string `json:"name"`
Quantity float64 `json:"quantity"`
Unit string `json:"unit"`
Category string `json:"category"`
Confidence float64 `json:"confidence"`
MappingID *string `json:"mapping_id"`
StorageDays int `json:"storage_days"`
}
// ReceiptResponse is the response for POST /ai/recognize-receipt.
type ReceiptResponse struct {
Items []EnrichedItem `json:"items"`
Unrecognized []ai.UnrecognizedItem `json:"unrecognized"`
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
// RecognizeReceipt handles POST /ai/recognize-receipt (async).
// Enqueues the receipt image for AI processing and returns 202 Accepted with a job_id.
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
func (handler *Handler) RecognizeReceipt(responseWriter http.ResponseWriter, request *http.Request) {
var req imageRequest
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
writeErrorJSON(responseWriter, request, http.StatusBadRequest, "image_base64 is required")
return
}
handler.submitProductJob(responseWriter, request, "receipt", []ProductImagePayload{
{ImageBase64: req.ImageBase64, MimeType: req.MimeType},
})
}
// RecognizeProducts handles POST /ai/recognize-products (async).
// Enqueues up to 3 product images for AI processing and returns 202 Accepted with a job_id.
// Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]}
func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, request *http.Request) {
var req imagesRequest
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || len(req.Images) == 0 {
writeErrorJSON(responseWriter, request, http.StatusBadRequest, "at least one image is required")
return
}
if len(req.Images) > 3 {
req.Images = req.Images[:3]
}
images := make([]ProductImagePayload, len(req.Images))
for index, img := range req.Images {
images[index] = ProductImagePayload{ImageBase64: img.ImageBase64, MimeType: img.MimeType}
}
handler.submitProductJob(responseWriter, request, "products", images)
}
// submitProductJob is shared by RecognizeReceipt and RecognizeProducts.
// It inserts a product job, publishes to Kafka, and writes the 202 response.
func (handler *Handler) submitProductJob(
responseWriter http.ResponseWriter,
request *http.Request,
jobType string,
images []ProductImagePayload,
) {
userID := middleware.UserIDFromCtx(request.Context())
userPlan := middleware.UserPlanFromCtx(request.Context())
lang := locale.FromContext(request.Context())
job := &ProductJob{
UserID: userID,
UserPlan: userPlan,
JobType: jobType,
Images: images,
Lang: lang,
}
if insertError := handler.productJobRepo.InsertProductJob(request.Context(), job); insertError != nil {
slog.ErrorContext(request.Context(), "insert product recognition job", "err", insertError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to create job")
return
}
position, positionError := handler.productJobRepo.ProductQueuePosition(request.Context(), userPlan, job.CreatedAt)
if positionError != nil {
position = 0
}
topic := ProductTopicFree
if userPlan == "paid" {
topic = ProductTopicPaid
}
if publishError := handler.kafkaProducer.Publish(request.Context(), topic, job.ID); publishError != nil {
slog.ErrorContext(request.Context(), "publish product recognition job", "job_id", job.ID, "err", publishError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to enqueue job")
return
}
estimatedSeconds := (position + 1) * 6
writeJSON(responseWriter, http.StatusAccepted, map[string]any{
"job_id": job.ID,
"queue_position": position,
"estimated_seconds": estimatedSeconds,
})
}
// ListRecentProductJobs handles GET /ai/product-jobs — returns the last 7 days of product jobs.
func (handler *Handler) ListRecentProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListRecentProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list recent product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// ListAllProductJobs handles GET /ai/product-jobs/history — returns all product jobs for the user.
func (handler *Handler) ListAllProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListAllProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list all product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// GetProductJob handles GET /ai/product-jobs/{id}.
func (handler *Handler) GetProductJob(responseWriter http.ResponseWriter, request *http.Request) {
jobID := chi.URLParam(request, "id")
userID := middleware.UserIDFromCtx(request.Context())
job, fetchError := handler.productJobRepo.GetProductJobByID(request.Context(), jobID)
if fetchError != nil {
writeErrorJSON(responseWriter, request, http.StatusNotFound, "job not found")
return
}
if job.UserID != userID {
writeErrorJSON(responseWriter, request, http.StatusForbidden, "forbidden")
return
}
writeJSON(responseWriter, http.StatusOK, job)
}
// GetProductJobStream handles GET /ai/product-jobs/{id}/stream — SSE stream for product job updates.
func (handler *Handler) GetProductJobStream(responseWriter http.ResponseWriter, request *http.Request) {
handler.productSSEBroker.ServeSSE(responseWriter, request)
}
// RecognizeDish handles POST /ai/recognize-dish (async).
// Enqueues the image for AI processing and returns 202 Accepted with a job_id.
// Body: {"image_base64": "...", "mime_type": "image/jpeg", "target_date": "2006-01-02", "target_meal_type": "lunch"}
func (handler *Handler) RecognizeDish(responseWriter http.ResponseWriter, request *http.Request) {
var req recognizeDishRequest
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
writeErrorJSON(responseWriter, request, http.StatusBadRequest, "image_base64 is required")
return
}
userID := middleware.UserIDFromCtx(request.Context())
userPlan := middleware.UserPlanFromCtx(request.Context())
lang := locale.FromContext(request.Context())
job := &Job{
UserID: userID,
UserPlan: userPlan,
ImageBase64: req.ImageBase64,
MimeType: req.MimeType,
Lang: lang,
TargetDate: req.TargetDate,
TargetMealType: req.TargetMealType,
}
if insertError := handler.jobRepo.InsertJob(request.Context(), job); insertError != nil {
slog.ErrorContext(request.Context(), "insert recognition job", "err", insertError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to create job")
return
}
position, positionError := handler.jobRepo.QueuePosition(request.Context(), userPlan, job.CreatedAt)
if positionError != nil {
position = 0
}
topic := TopicFree
if userPlan == "paid" {
topic = TopicPaid
}
if publishError := handler.kafkaProducer.Publish(request.Context(), topic, job.ID); publishError != nil {
slog.ErrorContext(request.Context(), "publish recognition job", "job_id", job.ID, "err", publishError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to enqueue job")
return
}
estimatedSeconds := (position + 1) * 6
writeJSON(responseWriter, http.StatusAccepted, map[string]any{
"job_id": job.ID,
"queue_position": position,
"estimated_seconds": estimatedSeconds,
})
}
// ListTodayJobs handles GET /ai/jobs — returns today's unlinked jobs for the current user.
func (handler *Handler) ListTodayJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.jobRepo.ListTodayUnlinked(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list today unlinked jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
// Return an empty array instead of null when there are no results.
if summaries == nil {
summaries = []*JobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// ListAllJobs handles GET /ai/jobs/history — returns all recognition jobs for the current user.
func (handler *Handler) ListAllJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.jobRepo.ListAll(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list all jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*JobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// GetJobStream handles GET /ai/jobs/{id}/stream — SSE endpoint for job updates.
func (handler *Handler) GetJobStream(responseWriter http.ResponseWriter, request *http.Request) {
handler.sseBroker.ServeSSE(responseWriter, request)
}
// GetJob handles GET /ai/jobs/{id} — fetches a job result (for app re-open after backgrounding).
func (handler *Handler) GetJob(responseWriter http.ResponseWriter, request *http.Request) {
jobID := chi.URLParam(request, "id")
userID := middleware.UserIDFromCtx(request.Context())
job, fetchError := handler.jobRepo.GetJobByID(request.Context(), jobID)
if fetchError != nil {
writeErrorJSON(responseWriter, request, http.StatusNotFound, "job not found")
return
}
if job.UserID != userID {
writeErrorJSON(responseWriter, request, http.StatusForbidden, "forbidden")
return
}
writeJSON(responseWriter, http.StatusOK, job)
}
// MergeAndDeduplicate combines results from multiple images.
// Items sharing the same name (case-insensitive) have their quantities summed.
func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {
seen := make(map[string]*ai.RecognizedItem)
var order []string
for _, batch := range batches {
for i := range batch {
item := &batch[i]
key := normalizeName(item.Name)
if existing, ok := seen[key]; ok {
existing.Quantity += item.Quantity
if item.Confidence > existing.Confidence {
existing.Confidence = item.Confidence
}
} else {
seen[key] = item
order = append(order, key)
}
}
}
result := make([]ai.RecognizedItem, 0, len(order))
for _, key := range order {
result = append(result, *seen[key])
}
return result
}
func normalizeName(s string) string {
return strings.ToLower(strings.TrimSpace(s))
}
func strPtr(s string) *string {
if s == "" {
return nil
}
return &s
}
func intPtr(n int) *int {
return &n
}
// ---------------------------------------------------------------------------
// HTTP helpers
// ---------------------------------------------------------------------------
type errorResponse struct {
Error string `json:"error"`
RequestID string `json:"request_id,omitempty"`
}
func writeErrorJSON(responseWriter http.ResponseWriter, request *http.Request, status int, msg string) {
responseWriter.Header().Set("Content-Type", "application/json")
responseWriter.WriteHeader(status)
_ = json.NewEncoder(responseWriter).Encode(errorResponse{
Error: msg,
RequestID: middleware.RequestIDFromCtx(request.Context()),
})
}
func writeJSON(responseWriter http.ResponseWriter, status int, value any) {
responseWriter.Header().Set("Content-Type", "application/json")
responseWriter.WriteHeader(status)
_ = json.NewEncoder(responseWriter).Encode(value)
}