feat: async product/receipt recognition via Kafka

Backend:
- Migration 002: product_recognition_jobs table with JSONB images column
  and job_type CHECK ('receipt' | 'products')
- New Kafka topics: ai.products.paid / ai.products.free
- ProductJob model, ProductJobRepository (mirrors dish job pattern)
- itemEnricher extracted from Handler — shared by HTTP handler and worker
- ProductSSEBroker: PG LISTEN on product_job_update channel
- ProductWorkerPool: 5 workers, branches on job_type to call
  RecognizeReceipt or RecognizeProducts per image in parallel
- Handler: RecognizeReceipt and RecognizeProducts now return 202 Accepted
  instead of blocking; 4 new endpoints: GET /ai/product-jobs,
  /product-jobs/history, /product-jobs/{id}, /product-jobs/{id}/stream
- cmd/worker: extended to run ProductWorkerPool alongside dish WorkerPool
- cmd/server: wires productJobRepository + productSSEBroker; both SSE
  brokers started in App.Start()

Flutter client:
- ProductJobCreated, ProductJobResult, ProductJobSummary, ProductJobEvent
  models + submitReceiptRecognition/submitProductsRecognition/stream methods
- Shared _openSseStream helper eliminates duplicate SSE parsing loop
- ScanScreen: replace blocking AI calls with async submit + navigate to
  ProductJobWatchScreen
- ProductJobWatchScreen: watches SSE stream, navigates to /scan/confirm
  when done, shows error on failure
- ProductsScreen: prepends _RecentScansSection (hidden when empty); compact
  horizontal list of recent scans with "See all" → history
- ProductJobHistoryScreen: full list of all product recognition jobs
- New routes: /scan/product-job-watch, /products/job-history
- L10n: 7 new keys in all 12 ARB files

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
dbastrikin
2026-03-23 23:01:30 +02:00
parent bffeb05a43
commit c7317c4335
43 changed files with 2073 additions and 239 deletions

View File

@@ -9,8 +9,9 @@ import (
// App bundles the HTTP handler with background services that need lifecycle management.
type App struct {
handler http.Handler
sseBroker *recognition.SSEBroker
handler http.Handler
sseBroker *recognition.SSEBroker
productSSEBroker *recognition.ProductSSEBroker
}
// ServeHTTP implements http.Handler.
@@ -18,8 +19,9 @@ func (application *App) ServeHTTP(responseWriter http.ResponseWriter, request *h
application.handler.ServeHTTP(responseWriter, request)
}
// Start launches the SSE broker's LISTEN loop.
// Start launches the SSE brokers' LISTEN loops.
// Call this once before the HTTP server begins accepting connections.
func (application *App) Start(applicationContext context.Context) {
application.sseBroker.Start(applicationContext)
application.productSSEBroker.Start(applicationContext)
}

View File

@@ -56,7 +56,9 @@ func initApp(appConfig *config.Config, pool *pgxpool.Pool) (*App, error) {
// Recognition pipeline
jobRepository := recognition.NewJobRepository(pool)
sseBroker := recognition.NewSSEBroker(pool, jobRepository)
recognitionHandler := recognition.NewHandler(openaiClient, productRepository, jobRepository, kafkaProducer, sseBroker)
productJobRepository := recognition.NewProductJobRepository(pool)
productSSEBroker := recognition.NewProductSSEBroker(pool, productJobRepository)
recognitionHandler := recognition.NewHandler(openaiClient, productRepository, jobRepository, productJobRepository, kafkaProducer, sseBroker, productSSEBroker)
menuRepository := menu.NewRepository(pool)
menuHandler := menu.NewHandler(menuRepository, openaiClient, openaiClient, dishRepository, pexelsClient, userRepository, userProductRepository, dishRepository)
@@ -93,7 +95,8 @@ func initApp(appConfig *config.Config, pool *pgxpool.Pool) (*App, error) {
mainTagListHandler,
)
return &App{
handler: httpHandler,
sseBroker: sseBroker,
handler: httpHandler,
sseBroker: sseBroker,
productSSEBroker: productSSEBroker,
}, nil
}

View File

@@ -6,6 +6,7 @@ import (
"github.com/food-ai/backend/internal/adapters/kafka"
"github.com/food-ai/backend/internal/adapters/openai"
"github.com/food-ai/backend/internal/domain/dish"
"github.com/food-ai/backend/internal/domain/product"
"github.com/food-ai/backend/internal/domain/recognition"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/kelseyhightower/envconfig"
@@ -28,31 +29,57 @@ func loadConfig() (*workerConfig, error) {
// WorkerApp bundles background services that need lifecycle management.
type WorkerApp struct {
workerPool *recognition.WorkerPool
workerPool *recognition.WorkerPool
productWorkerPool *recognition.ProductWorkerPool
}
// Start launches the worker pool goroutines.
// Start launches the dish and product worker pool goroutines.
func (workerApp *WorkerApp) Start(applicationContext context.Context) {
workerApp.workerPool.Start(applicationContext)
workerApp.productWorkerPool.Start(applicationContext)
}
func initWorker(workerCfg *workerConfig, pool *pgxpool.Pool) (*WorkerApp, error) {
openaiClient := openai.NewClient(workerCfg.OpenAIAPIKey)
// Dish recognition worker.
dishRepository := dish.NewRepository(pool)
jobRepository := recognition.NewJobRepository(pool)
topic := recognition.TopicFree
groupID := "dish-recognition-free"
dishTopic := recognition.TopicFree
dishGroupID := "dish-recognition-free"
if workerCfg.WorkerPlan == "paid" {
topic = recognition.TopicPaid
groupID = "dish-recognition-paid"
dishTopic = recognition.TopicPaid
dishGroupID = "dish-recognition-paid"
}
consumer, consumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, groupID, topic)
if consumerError != nil {
return nil, consumerError
dishConsumer, dishConsumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, dishGroupID, dishTopic)
if dishConsumerError != nil {
return nil, dishConsumerError
}
workerPool := recognition.NewWorkerPool(jobRepository, openaiClient, dishRepository, consumer)
return &WorkerApp{workerPool: workerPool}, nil
workerPool := recognition.NewWorkerPool(jobRepository, openaiClient, dishRepository, dishConsumer)
// Product recognition worker.
productRepository := product.NewRepository(pool)
productJobRepository := recognition.NewProductJobRepository(pool)
productTopic := recognition.ProductTopicFree
productGroupID := "product-recognition-free"
if workerCfg.WorkerPlan == "paid" {
productTopic = recognition.ProductTopicPaid
productGroupID = "product-recognition-paid"
}
productConsumer, productConsumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, productGroupID, productTopic)
if productConsumerError != nil {
return nil, productConsumerError
}
productWorkerPool := recognition.NewProductWorkerPool(productJobRepository, openaiClient, productRepository, productConsumer)
return &WorkerApp{
workerPool: workerPool,
productWorkerPool: productWorkerPool,
}, nil
}