feat: async product/receipt recognition via Kafka

Backend:
- Migration 002: product_recognition_jobs table with JSONB images column
  and job_type CHECK ('receipt' | 'products')
- New Kafka topics: ai.products.paid / ai.products.free
- ProductJob model, ProductJobRepository (mirrors dish job pattern)
- itemEnricher extracted from Handler — shared by HTTP handler and worker
- ProductSSEBroker: PG LISTEN on product_job_update channel
- ProductWorkerPool: 5 workers, branches on job_type to call
  RecognizeReceipt or RecognizeProducts per image in parallel
- Handler: RecognizeReceipt and RecognizeProducts now return 202 Accepted
  instead of blocking; 4 new endpoints: GET /ai/product-jobs,
  /product-jobs/history, /product-jobs/{id}, /product-jobs/{id}/stream
- cmd/worker: extended to run ProductWorkerPool alongside dish WorkerPool
- cmd/server: wires productJobRepository + productSSEBroker; both SSE
  brokers started in App.Start()

Flutter client:
- ProductJobCreated, ProductJobResult, ProductJobSummary, ProductJobEvent
  models + submitReceiptRecognition/submitProductsRecognition/stream methods
- Shared _openSseStream helper eliminates duplicate SSE parsing loop
- ScanScreen: replace blocking AI calls with async submit + navigate to
  ProductJobWatchScreen
- ProductJobWatchScreen: watches SSE stream, navigates to /scan/confirm
  when done, shows error on failure
- ProductsScreen: prepends _RecentScansSection (hidden when empty); compact
  horizontal list of recent scans with "See all" → history
- ProductJobHistoryScreen: full list of all product recognition jobs
- New routes: /scan/product-job-watch, /products/job-history
- L10n: 7 new keys in all 12 ARB files

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
dbastrikin
2026-03-23 23:01:30 +02:00
parent bffeb05a43
commit c7317c4335
43 changed files with 2073 additions and 239 deletions

View File

@@ -6,7 +6,6 @@ import (
"log/slog"
"net/http"
"strings"
"sync"
"github.com/go-chi/chi/v5"
@@ -49,27 +48,33 @@ type KafkaPublisher interface {
// Handler handles POST /ai/* recognition endpoints.
type Handler struct {
recognizer Recognizer
productRepo ProductRepository
jobRepo JobRepository
kafkaProducer KafkaPublisher
sseBroker *SSEBroker
enricher *itemEnricher
recognizer Recognizer
jobRepo JobRepository
productJobRepo ProductJobRepository
kafkaProducer KafkaPublisher
sseBroker *SSEBroker
productSSEBroker *ProductSSEBroker
}
// NewHandler creates a new Handler with async dish recognition support.
// NewHandler creates a new Handler with async dish and product recognition support.
func NewHandler(
recognizer Recognizer,
productRepo ProductRepository,
jobRepo JobRepository,
productJobRepo ProductJobRepository,
kafkaProducer KafkaPublisher,
sseBroker *SSEBroker,
productSSEBroker *ProductSSEBroker,
) *Handler {
return &Handler{
recognizer: recognizer,
productRepo: productRepo,
jobRepo: jobRepo,
kafkaProducer: kafkaProducer,
sseBroker: sseBroker,
enricher: newItemEnricher(recognizer, productRepo),
recognizer: recognizer,
jobRepo: jobRepo,
productJobRepo: productJobRepo,
kafkaProducer: kafkaProducer,
sseBroker: sseBroker,
productSSEBroker: productSSEBroker,
}
}
@@ -117,34 +122,23 @@ type ReceiptResponse struct {
// Handlers
// ---------------------------------------------------------------------------
// RecognizeReceipt handles POST /ai/recognize-receipt.
// RecognizeReceipt handles POST /ai/recognize-receipt (async).
// Enqueues the receipt image for AI processing and returns 202 Accepted with a job_id.
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
func (handler *Handler) RecognizeReceipt(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
_ = userID // logged for tracing
var req imageRequest
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
writeErrorJSON(responseWriter, request, http.StatusBadRequest, "image_base64 is required")
return
}
lang := locale.FromContext(request.Context())
result, recognizeError := handler.recognizer.RecognizeReceipt(request.Context(), req.ImageBase64, req.MimeType, lang)
if recognizeError != nil {
slog.ErrorContext(request.Context(), "recognize receipt", "err", recognizeError)
writeErrorJSON(responseWriter, request, http.StatusServiceUnavailable, "recognition failed, please try again")
return
}
enriched := handler.enrichItems(request.Context(), result.Items)
writeJSON(responseWriter, http.StatusOK, ReceiptResponse{
Items: enriched,
Unrecognized: result.Unrecognized,
handler.submitProductJob(responseWriter, request, "receipt", []ProductImagePayload{
{ImageBase64: req.ImageBase64, MimeType: req.MimeType},
})
}
// RecognizeProducts handles POST /ai/recognize-products.
// RecognizeProducts handles POST /ai/recognize-products (async).
// Enqueues up to 3 product images for AI processing and returns 202 Accepted with a job_id.
// Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]}
func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, request *http.Request) {
var req imagesRequest
@@ -153,29 +147,118 @@ func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, re
return
}
if len(req.Images) > 3 {
req.Images = req.Images[:3] // cap at 3 photos as per spec
req.Images = req.Images[:3]
}
images := make([]ProductImagePayload, len(req.Images))
for index, img := range req.Images {
images[index] = ProductImagePayload{ImageBase64: img.ImageBase64, MimeType: img.MimeType}
}
handler.submitProductJob(responseWriter, request, "products", images)
}
// submitProductJob is shared by RecognizeReceipt and RecognizeProducts.
// It inserts a product job, publishes to Kafka, and writes the 202 response.
func (handler *Handler) submitProductJob(
responseWriter http.ResponseWriter,
request *http.Request,
jobType string,
images []ProductImagePayload,
) {
userID := middleware.UserIDFromCtx(request.Context())
userPlan := middleware.UserPlanFromCtx(request.Context())
lang := locale.FromContext(request.Context())
allItems := make([][]ai.RecognizedItem, len(req.Images))
var wg sync.WaitGroup
for i, img := range req.Images {
wg.Add(1)
go func(index int, imageReq imageRequest) {
defer wg.Done()
items, recognizeError := handler.recognizer.RecognizeProducts(request.Context(), imageReq.ImageBase64, imageReq.MimeType, lang)
if recognizeError != nil {
slog.WarnContext(request.Context(), "recognize products from image", "index", index, "err", recognizeError)
return
}
allItems[index] = items
}(i, img)
}
wg.Wait()
merged := MergeAndDeduplicate(allItems)
enriched := handler.enrichItems(request.Context(), merged)
writeJSON(responseWriter, http.StatusOK, map[string]any{"items": enriched})
job := &ProductJob{
UserID: userID,
UserPlan: userPlan,
JobType: jobType,
Images: images,
Lang: lang,
}
if insertError := handler.productJobRepo.InsertProductJob(request.Context(), job); insertError != nil {
slog.ErrorContext(request.Context(), "insert product recognition job", "err", insertError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to create job")
return
}
position, positionError := handler.productJobRepo.ProductQueuePosition(request.Context(), userPlan, job.CreatedAt)
if positionError != nil {
position = 0
}
topic := ProductTopicFree
if userPlan == "paid" {
topic = ProductTopicPaid
}
if publishError := handler.kafkaProducer.Publish(request.Context(), topic, job.ID); publishError != nil {
slog.ErrorContext(request.Context(), "publish product recognition job", "job_id", job.ID, "err", publishError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to enqueue job")
return
}
estimatedSeconds := (position + 1) * 6
writeJSON(responseWriter, http.StatusAccepted, map[string]any{
"job_id": job.ID,
"queue_position": position,
"estimated_seconds": estimatedSeconds,
})
}
// ListRecentProductJobs handles GET /ai/product-jobs — returns the last 7 days of product jobs.
func (handler *Handler) ListRecentProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListRecentProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list recent product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// ListAllProductJobs handles GET /ai/product-jobs/history — returns all product jobs for the user.
func (handler *Handler) ListAllProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListAllProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list all product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// GetProductJob handles GET /ai/product-jobs/{id}.
func (handler *Handler) GetProductJob(responseWriter http.ResponseWriter, request *http.Request) {
jobID := chi.URLParam(request, "id")
userID := middleware.UserIDFromCtx(request.Context())
job, fetchError := handler.productJobRepo.GetProductJobByID(request.Context(), jobID)
if fetchError != nil {
writeErrorJSON(responseWriter, request, http.StatusNotFound, "job not found")
return
}
if job.UserID != userID {
writeErrorJSON(responseWriter, request, http.StatusForbidden, "forbidden")
return
}
writeJSON(responseWriter, http.StatusOK, job)
}
// GetProductJobStream handles GET /ai/product-jobs/{id}/stream — SSE stream for product job updates.
func (handler *Handler) GetProductJobStream(responseWriter http.ResponseWriter, request *http.Request) {
handler.productSSEBroker.ServeSSE(responseWriter, request)
}
// RecognizeDish handles POST /ai/recognize-dish (async).
@@ -287,87 +370,6 @@ func (handler *Handler) GetJob(responseWriter http.ResponseWriter, request *http
writeJSON(responseWriter, http.StatusOK, job)
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
// enrichItems matches each recognized item against the product catalog.
// Items without a match trigger a classification call and upsert into the DB.
func (handler *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []EnrichedItem {
result := make([]EnrichedItem, 0, len(items))
for _, item := range items {
enriched := EnrichedItem{
Name: item.Name,
Quantity: item.Quantity,
Unit: item.Unit,
Category: item.Category,
Confidence: item.Confidence,
StorageDays: 7, // sensible default
}
catalogProduct, matchError := handler.productRepo.FuzzyMatch(ctx, item.Name)
if matchError != nil {
slog.WarnContext(ctx, "fuzzy match product", "name", item.Name, "err", matchError)
}
if catalogProduct != nil {
id := catalogProduct.ID
enriched.MappingID = &id
if catalogProduct.DefaultUnit != nil {
enriched.Unit = *catalogProduct.DefaultUnit
}
if catalogProduct.StorageDays != nil {
enriched.StorageDays = *catalogProduct.StorageDays
}
if catalogProduct.Category != nil {
enriched.Category = *catalogProduct.Category
}
} else {
classification, classifyError := handler.recognizer.ClassifyIngredient(ctx, item.Name)
if classifyError != nil {
slog.WarnContext(ctx, "classify unknown product", "name", item.Name, "err", classifyError)
} else {
saved := handler.saveClassification(ctx, classification)
if saved != nil {
id := saved.ID
enriched.MappingID = &id
}
enriched.Category = classification.Category
enriched.Unit = classification.DefaultUnit
enriched.StorageDays = classification.StorageDays
}
}
result = append(result, enriched)
}
return result
}
// saveClassification upserts an AI-produced classification into the product catalog.
func (handler *Handler) saveClassification(ctx context.Context, classification *ai.IngredientClassification) *product.Product {
if classification == nil || classification.CanonicalName == "" {
return nil
}
catalogProduct := &product.Product{
CanonicalName: classification.CanonicalName,
Category: strPtr(classification.Category),
DefaultUnit: strPtr(classification.DefaultUnit),
CaloriesPer100g: classification.CaloriesPer100g,
ProteinPer100g: classification.ProteinPer100g,
FatPer100g: classification.FatPer100g,
CarbsPer100g: classification.CarbsPer100g,
StorageDays: intPtr(classification.StorageDays),
}
saved, upsertError := handler.productRepo.Upsert(ctx, catalogProduct)
if upsertError != nil {
slog.WarnContext(ctx, "upsert classified product", "name", classification.CanonicalName, "err", upsertError)
return nil
}
return saved
}
// MergeAndDeduplicate combines results from multiple images.
// Items sharing the same name (case-insensitive) have their quantities summed.
func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {