feat: async product/receipt recognition via Kafka

Backend:
- Migration 002: product_recognition_jobs table with JSONB images column
  and job_type CHECK ('receipt' | 'products')
- New Kafka topics: ai.products.paid / ai.products.free
- ProductJob model, ProductJobRepository (mirrors dish job pattern)
- itemEnricher extracted from Handler — shared by HTTP handler and worker
- ProductSSEBroker: PG LISTEN on product_job_update channel
- ProductWorkerPool: 5 workers, branches on job_type to call
  RecognizeReceipt or RecognizeProducts per image in parallel
- Handler: RecognizeReceipt and RecognizeProducts now return 202 Accepted
  instead of blocking; 4 new endpoints: GET /ai/product-jobs,
  /product-jobs/history, /product-jobs/{id}, /product-jobs/{id}/stream
- cmd/worker: extended to run ProductWorkerPool alongside dish WorkerPool
- cmd/server: wires productJobRepository + productSSEBroker; both SSE
  brokers started in App.Start()

Flutter client:
- ProductJobCreated, ProductJobResult, ProductJobSummary, ProductJobEvent
  models + submitReceiptRecognition/submitProductsRecognition/stream methods
- Shared _openSseStream helper eliminates duplicate SSE parsing loop
- ScanScreen: replace blocking AI calls with async submit + navigate to
  ProductJobWatchScreen
- ProductJobWatchScreen: watches SSE stream, navigates to /scan/confirm
  when done, shows error on failure
- ProductsScreen: prepends _RecentScansSection (hidden when empty); compact
  horizontal list of recent scans with "See all" → history
- ProductJobHistoryScreen: full list of all product recognition jobs
- New routes: /scan/product-job-watch, /products/job-history
- L10n: 7 new keys in all 12 ARB files

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
dbastrikin
2026-03-23 23:01:30 +02:00
parent bffeb05a43
commit c7317c4335
43 changed files with 2073 additions and 239 deletions

View File

@@ -9,8 +9,9 @@ import (
// App bundles the HTTP handler with background services that need lifecycle management. // App bundles the HTTP handler with background services that need lifecycle management.
type App struct { type App struct {
handler http.Handler handler http.Handler
sseBroker *recognition.SSEBroker sseBroker *recognition.SSEBroker
productSSEBroker *recognition.ProductSSEBroker
} }
// ServeHTTP implements http.Handler. // ServeHTTP implements http.Handler.
@@ -18,8 +19,9 @@ func (application *App) ServeHTTP(responseWriter http.ResponseWriter, request *h
application.handler.ServeHTTP(responseWriter, request) application.handler.ServeHTTP(responseWriter, request)
} }
// Start launches the SSE broker's LISTEN loop. // Start launches the SSE brokers' LISTEN loops.
// Call this once before the HTTP server begins accepting connections. // Call this once before the HTTP server begins accepting connections.
func (application *App) Start(applicationContext context.Context) { func (application *App) Start(applicationContext context.Context) {
application.sseBroker.Start(applicationContext) application.sseBroker.Start(applicationContext)
application.productSSEBroker.Start(applicationContext)
} }

View File

@@ -56,7 +56,9 @@ func initApp(appConfig *config.Config, pool *pgxpool.Pool) (*App, error) {
// Recognition pipeline // Recognition pipeline
jobRepository := recognition.NewJobRepository(pool) jobRepository := recognition.NewJobRepository(pool)
sseBroker := recognition.NewSSEBroker(pool, jobRepository) sseBroker := recognition.NewSSEBroker(pool, jobRepository)
recognitionHandler := recognition.NewHandler(openaiClient, productRepository, jobRepository, kafkaProducer, sseBroker) productJobRepository := recognition.NewProductJobRepository(pool)
productSSEBroker := recognition.NewProductSSEBroker(pool, productJobRepository)
recognitionHandler := recognition.NewHandler(openaiClient, productRepository, jobRepository, productJobRepository, kafkaProducer, sseBroker, productSSEBroker)
menuRepository := menu.NewRepository(pool) menuRepository := menu.NewRepository(pool)
menuHandler := menu.NewHandler(menuRepository, openaiClient, openaiClient, dishRepository, pexelsClient, userRepository, userProductRepository, dishRepository) menuHandler := menu.NewHandler(menuRepository, openaiClient, openaiClient, dishRepository, pexelsClient, userRepository, userProductRepository, dishRepository)
@@ -93,7 +95,8 @@ func initApp(appConfig *config.Config, pool *pgxpool.Pool) (*App, error) {
mainTagListHandler, mainTagListHandler,
) )
return &App{ return &App{
handler: httpHandler, handler: httpHandler,
sseBroker: sseBroker, sseBroker: sseBroker,
productSSEBroker: productSSEBroker,
}, nil }, nil
} }

View File

@@ -6,6 +6,7 @@ import (
"github.com/food-ai/backend/internal/adapters/kafka" "github.com/food-ai/backend/internal/adapters/kafka"
"github.com/food-ai/backend/internal/adapters/openai" "github.com/food-ai/backend/internal/adapters/openai"
"github.com/food-ai/backend/internal/domain/dish" "github.com/food-ai/backend/internal/domain/dish"
"github.com/food-ai/backend/internal/domain/product"
"github.com/food-ai/backend/internal/domain/recognition" "github.com/food-ai/backend/internal/domain/recognition"
"github.com/jackc/pgx/v5/pgxpool" "github.com/jackc/pgx/v5/pgxpool"
"github.com/kelseyhightower/envconfig" "github.com/kelseyhightower/envconfig"
@@ -28,31 +29,57 @@ func loadConfig() (*workerConfig, error) {
// WorkerApp bundles background services that need lifecycle management. // WorkerApp bundles background services that need lifecycle management.
type WorkerApp struct { type WorkerApp struct {
workerPool *recognition.WorkerPool workerPool *recognition.WorkerPool
productWorkerPool *recognition.ProductWorkerPool
} }
// Start launches the worker pool goroutines. // Start launches the dish and product worker pool goroutines.
func (workerApp *WorkerApp) Start(applicationContext context.Context) { func (workerApp *WorkerApp) Start(applicationContext context.Context) {
workerApp.workerPool.Start(applicationContext) workerApp.workerPool.Start(applicationContext)
workerApp.productWorkerPool.Start(applicationContext)
} }
func initWorker(workerCfg *workerConfig, pool *pgxpool.Pool) (*WorkerApp, error) { func initWorker(workerCfg *workerConfig, pool *pgxpool.Pool) (*WorkerApp, error) {
openaiClient := openai.NewClient(workerCfg.OpenAIAPIKey) openaiClient := openai.NewClient(workerCfg.OpenAIAPIKey)
// Dish recognition worker.
dishRepository := dish.NewRepository(pool) dishRepository := dish.NewRepository(pool)
jobRepository := recognition.NewJobRepository(pool) jobRepository := recognition.NewJobRepository(pool)
topic := recognition.TopicFree dishTopic := recognition.TopicFree
groupID := "dish-recognition-free" dishGroupID := "dish-recognition-free"
if workerCfg.WorkerPlan == "paid" { if workerCfg.WorkerPlan == "paid" {
topic = recognition.TopicPaid dishTopic = recognition.TopicPaid
groupID = "dish-recognition-paid" dishGroupID = "dish-recognition-paid"
} }
consumer, consumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, groupID, topic) dishConsumer, dishConsumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, dishGroupID, dishTopic)
if consumerError != nil { if dishConsumerError != nil {
return nil, consumerError return nil, dishConsumerError
} }
workerPool := recognition.NewWorkerPool(jobRepository, openaiClient, dishRepository, consumer) workerPool := recognition.NewWorkerPool(jobRepository, openaiClient, dishRepository, dishConsumer)
return &WorkerApp{workerPool: workerPool}, nil
// Product recognition worker.
productRepository := product.NewRepository(pool)
productJobRepository := recognition.NewProductJobRepository(pool)
productTopic := recognition.ProductTopicFree
productGroupID := "product-recognition-free"
if workerCfg.WorkerPlan == "paid" {
productTopic = recognition.ProductTopicPaid
productGroupID = "product-recognition-paid"
}
productConsumer, productConsumerError := kafka.NewConsumer(workerCfg.KafkaBrokers, productGroupID, productTopic)
if productConsumerError != nil {
return nil, productConsumerError
}
productWorkerPool := recognition.NewProductWorkerPool(productJobRepository, openaiClient, productRepository, productConsumer)
return &WorkerApp{
workerPool: workerPool,
productWorkerPool: productWorkerPool,
}, nil
} }

View File

@@ -6,7 +6,6 @@ import (
"log/slog" "log/slog"
"net/http" "net/http"
"strings" "strings"
"sync"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
@@ -49,27 +48,33 @@ type KafkaPublisher interface {
// Handler handles POST /ai/* recognition endpoints. // Handler handles POST /ai/* recognition endpoints.
type Handler struct { type Handler struct {
recognizer Recognizer enricher *itemEnricher
productRepo ProductRepository recognizer Recognizer
jobRepo JobRepository jobRepo JobRepository
kafkaProducer KafkaPublisher productJobRepo ProductJobRepository
sseBroker *SSEBroker kafkaProducer KafkaPublisher
sseBroker *SSEBroker
productSSEBroker *ProductSSEBroker
} }
// NewHandler creates a new Handler with async dish recognition support. // NewHandler creates a new Handler with async dish and product recognition support.
func NewHandler( func NewHandler(
recognizer Recognizer, recognizer Recognizer,
productRepo ProductRepository, productRepo ProductRepository,
jobRepo JobRepository, jobRepo JobRepository,
productJobRepo ProductJobRepository,
kafkaProducer KafkaPublisher, kafkaProducer KafkaPublisher,
sseBroker *SSEBroker, sseBroker *SSEBroker,
productSSEBroker *ProductSSEBroker,
) *Handler { ) *Handler {
return &Handler{ return &Handler{
recognizer: recognizer, enricher: newItemEnricher(recognizer, productRepo),
productRepo: productRepo, recognizer: recognizer,
jobRepo: jobRepo, jobRepo: jobRepo,
kafkaProducer: kafkaProducer, productJobRepo: productJobRepo,
sseBroker: sseBroker, kafkaProducer: kafkaProducer,
sseBroker: sseBroker,
productSSEBroker: productSSEBroker,
} }
} }
@@ -117,34 +122,23 @@ type ReceiptResponse struct {
// Handlers // Handlers
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// RecognizeReceipt handles POST /ai/recognize-receipt. // RecognizeReceipt handles POST /ai/recognize-receipt (async).
// Enqueues the receipt image for AI processing and returns 202 Accepted with a job_id.
// Body: {"image_base64": "...", "mime_type": "image/jpeg"} // Body: {"image_base64": "...", "mime_type": "image/jpeg"}
func (handler *Handler) RecognizeReceipt(responseWriter http.ResponseWriter, request *http.Request) { func (handler *Handler) RecognizeReceipt(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
_ = userID // logged for tracing
var req imageRequest var req imageRequest
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" { if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
writeErrorJSON(responseWriter, request, http.StatusBadRequest, "image_base64 is required") writeErrorJSON(responseWriter, request, http.StatusBadRequest, "image_base64 is required")
return return
} }
lang := locale.FromContext(request.Context()) handler.submitProductJob(responseWriter, request, "receipt", []ProductImagePayload{
result, recognizeError := handler.recognizer.RecognizeReceipt(request.Context(), req.ImageBase64, req.MimeType, lang) {ImageBase64: req.ImageBase64, MimeType: req.MimeType},
if recognizeError != nil {
slog.ErrorContext(request.Context(), "recognize receipt", "err", recognizeError)
writeErrorJSON(responseWriter, request, http.StatusServiceUnavailable, "recognition failed, please try again")
return
}
enriched := handler.enrichItems(request.Context(), result.Items)
writeJSON(responseWriter, http.StatusOK, ReceiptResponse{
Items: enriched,
Unrecognized: result.Unrecognized,
}) })
} }
// RecognizeProducts handles POST /ai/recognize-products. // RecognizeProducts handles POST /ai/recognize-products (async).
// Enqueues up to 3 product images for AI processing and returns 202 Accepted with a job_id.
// Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]} // Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]}
func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, request *http.Request) { func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, request *http.Request) {
var req imagesRequest var req imagesRequest
@@ -153,29 +147,118 @@ func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, re
return return
} }
if len(req.Images) > 3 { if len(req.Images) > 3 {
req.Images = req.Images[:3] // cap at 3 photos as per spec req.Images = req.Images[:3]
} }
images := make([]ProductImagePayload, len(req.Images))
for index, img := range req.Images {
images[index] = ProductImagePayload{ImageBase64: img.ImageBase64, MimeType: img.MimeType}
}
handler.submitProductJob(responseWriter, request, "products", images)
}
// submitProductJob is shared by RecognizeReceipt and RecognizeProducts.
// It inserts a product job, publishes to Kafka, and writes the 202 response.
func (handler *Handler) submitProductJob(
responseWriter http.ResponseWriter,
request *http.Request,
jobType string,
images []ProductImagePayload,
) {
userID := middleware.UserIDFromCtx(request.Context())
userPlan := middleware.UserPlanFromCtx(request.Context())
lang := locale.FromContext(request.Context()) lang := locale.FromContext(request.Context())
allItems := make([][]ai.RecognizedItem, len(req.Images))
var wg sync.WaitGroup
for i, img := range req.Images {
wg.Add(1)
go func(index int, imageReq imageRequest) {
defer wg.Done()
items, recognizeError := handler.recognizer.RecognizeProducts(request.Context(), imageReq.ImageBase64, imageReq.MimeType, lang)
if recognizeError != nil {
slog.WarnContext(request.Context(), "recognize products from image", "index", index, "err", recognizeError)
return
}
allItems[index] = items
}(i, img)
}
wg.Wait()
merged := MergeAndDeduplicate(allItems) job := &ProductJob{
enriched := handler.enrichItems(request.Context(), merged) UserID: userID,
writeJSON(responseWriter, http.StatusOK, map[string]any{"items": enriched}) UserPlan: userPlan,
JobType: jobType,
Images: images,
Lang: lang,
}
if insertError := handler.productJobRepo.InsertProductJob(request.Context(), job); insertError != nil {
slog.ErrorContext(request.Context(), "insert product recognition job", "err", insertError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to create job")
return
}
position, positionError := handler.productJobRepo.ProductQueuePosition(request.Context(), userPlan, job.CreatedAt)
if positionError != nil {
position = 0
}
topic := ProductTopicFree
if userPlan == "paid" {
topic = ProductTopicPaid
}
if publishError := handler.kafkaProducer.Publish(request.Context(), topic, job.ID); publishError != nil {
slog.ErrorContext(request.Context(), "publish product recognition job", "job_id", job.ID, "err", publishError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to enqueue job")
return
}
estimatedSeconds := (position + 1) * 6
writeJSON(responseWriter, http.StatusAccepted, map[string]any{
"job_id": job.ID,
"queue_position": position,
"estimated_seconds": estimatedSeconds,
})
}
// ListRecentProductJobs handles GET /ai/product-jobs — returns the last 7 days of product jobs.
func (handler *Handler) ListRecentProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListRecentProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list recent product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// ListAllProductJobs handles GET /ai/product-jobs/history — returns all product jobs for the user.
func (handler *Handler) ListAllProductJobs(responseWriter http.ResponseWriter, request *http.Request) {
userID := middleware.UserIDFromCtx(request.Context())
summaries, listError := handler.productJobRepo.ListAllProductJobs(request.Context(), userID)
if listError != nil {
slog.ErrorContext(request.Context(), "list all product jobs", "err", listError)
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "failed to list jobs")
return
}
if summaries == nil {
summaries = []*ProductJobSummary{}
}
writeJSON(responseWriter, http.StatusOK, summaries)
}
// GetProductJob handles GET /ai/product-jobs/{id}.
func (handler *Handler) GetProductJob(responseWriter http.ResponseWriter, request *http.Request) {
jobID := chi.URLParam(request, "id")
userID := middleware.UserIDFromCtx(request.Context())
job, fetchError := handler.productJobRepo.GetProductJobByID(request.Context(), jobID)
if fetchError != nil {
writeErrorJSON(responseWriter, request, http.StatusNotFound, "job not found")
return
}
if job.UserID != userID {
writeErrorJSON(responseWriter, request, http.StatusForbidden, "forbidden")
return
}
writeJSON(responseWriter, http.StatusOK, job)
}
// GetProductJobStream handles GET /ai/product-jobs/{id}/stream — SSE stream for product job updates.
func (handler *Handler) GetProductJobStream(responseWriter http.ResponseWriter, request *http.Request) {
handler.productSSEBroker.ServeSSE(responseWriter, request)
} }
// RecognizeDish handles POST /ai/recognize-dish (async). // RecognizeDish handles POST /ai/recognize-dish (async).
@@ -287,87 +370,6 @@ func (handler *Handler) GetJob(responseWriter http.ResponseWriter, request *http
writeJSON(responseWriter, http.StatusOK, job) writeJSON(responseWriter, http.StatusOK, job)
} }
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
// enrichItems matches each recognized item against the product catalog.
// Items without a match trigger a classification call and upsert into the DB.
func (handler *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []EnrichedItem {
result := make([]EnrichedItem, 0, len(items))
for _, item := range items {
enriched := EnrichedItem{
Name: item.Name,
Quantity: item.Quantity,
Unit: item.Unit,
Category: item.Category,
Confidence: item.Confidence,
StorageDays: 7, // sensible default
}
catalogProduct, matchError := handler.productRepo.FuzzyMatch(ctx, item.Name)
if matchError != nil {
slog.WarnContext(ctx, "fuzzy match product", "name", item.Name, "err", matchError)
}
if catalogProduct != nil {
id := catalogProduct.ID
enriched.MappingID = &id
if catalogProduct.DefaultUnit != nil {
enriched.Unit = *catalogProduct.DefaultUnit
}
if catalogProduct.StorageDays != nil {
enriched.StorageDays = *catalogProduct.StorageDays
}
if catalogProduct.Category != nil {
enriched.Category = *catalogProduct.Category
}
} else {
classification, classifyError := handler.recognizer.ClassifyIngredient(ctx, item.Name)
if classifyError != nil {
slog.WarnContext(ctx, "classify unknown product", "name", item.Name, "err", classifyError)
} else {
saved := handler.saveClassification(ctx, classification)
if saved != nil {
id := saved.ID
enriched.MappingID = &id
}
enriched.Category = classification.Category
enriched.Unit = classification.DefaultUnit
enriched.StorageDays = classification.StorageDays
}
}
result = append(result, enriched)
}
return result
}
// saveClassification upserts an AI-produced classification into the product catalog.
func (handler *Handler) saveClassification(ctx context.Context, classification *ai.IngredientClassification) *product.Product {
if classification == nil || classification.CanonicalName == "" {
return nil
}
catalogProduct := &product.Product{
CanonicalName: classification.CanonicalName,
Category: strPtr(classification.Category),
DefaultUnit: strPtr(classification.DefaultUnit),
CaloriesPer100g: classification.CaloriesPer100g,
ProteinPer100g: classification.ProteinPer100g,
FatPer100g: classification.FatPer100g,
CarbsPer100g: classification.CarbsPer100g,
StorageDays: intPtr(classification.StorageDays),
}
saved, upsertError := handler.productRepo.Upsert(ctx, catalogProduct)
if upsertError != nil {
slog.WarnContext(ctx, "upsert classified product", "name", classification.CanonicalName, "err", upsertError)
return nil
}
return saved
}
// MergeAndDeduplicate combines results from multiple images. // MergeAndDeduplicate combines results from multiple images.
// Items sharing the same name (case-insensitive) have their quantities summed. // Items sharing the same name (case-insensitive) have their quantities summed.
func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem { func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {

View File

@@ -0,0 +1,98 @@
package recognition
import (
"context"
"log/slog"
"github.com/food-ai/backend/internal/adapters/ai"
"github.com/food-ai/backend/internal/domain/product"
)
// itemEnricher matches recognized items against the product catalog,
// triggering AI classification for unknown items.
// Extracted from Handler so both the HTTP handler and the product worker pool can use it.
type itemEnricher struct {
recognizer Recognizer
productRepo ProductRepository
}
func newItemEnricher(recognizer Recognizer, productRepo ProductRepository) *itemEnricher {
return &itemEnricher{recognizer: recognizer, productRepo: productRepo}
}
// enrich matches each recognized item against the product catalog.
// Items without a match trigger a classification call and upsert into the DB.
func (enricher *itemEnricher) enrich(enrichContext context.Context, items []ai.RecognizedItem) []EnrichedItem {
result := make([]EnrichedItem, 0, len(items))
for _, item := range items {
enriched := EnrichedItem{
Name: item.Name,
Quantity: item.Quantity,
Unit: item.Unit,
Category: item.Category,
Confidence: item.Confidence,
StorageDays: 7, // sensible default
}
catalogProduct, matchError := enricher.productRepo.FuzzyMatch(enrichContext, item.Name)
if matchError != nil {
slog.WarnContext(enrichContext, "fuzzy match product", "name", item.Name, "err", matchError)
}
if catalogProduct != nil {
productID := catalogProduct.ID
enriched.MappingID = &productID
if catalogProduct.DefaultUnit != nil {
enriched.Unit = *catalogProduct.DefaultUnit
}
if catalogProduct.StorageDays != nil {
enriched.StorageDays = *catalogProduct.StorageDays
}
if catalogProduct.Category != nil {
enriched.Category = *catalogProduct.Category
}
} else {
classification, classifyError := enricher.recognizer.ClassifyIngredient(enrichContext, item.Name)
if classifyError != nil {
slog.WarnContext(enrichContext, "classify unknown product", "name", item.Name, "err", classifyError)
} else {
saved := enricher.saveClassification(enrichContext, classification)
if saved != nil {
savedID := saved.ID
enriched.MappingID = &savedID
}
enriched.Category = classification.Category
enriched.Unit = classification.DefaultUnit
enriched.StorageDays = classification.StorageDays
}
}
result = append(result, enriched)
}
return result
}
// saveClassification upserts an AI-produced classification into the product catalog.
func (enricher *itemEnricher) saveClassification(enrichContext context.Context, classification *ai.IngredientClassification) *product.Product {
if classification == nil || classification.CanonicalName == "" {
return nil
}
catalogProduct := &product.Product{
CanonicalName: classification.CanonicalName,
Category: strPtr(classification.Category),
DefaultUnit: strPtr(classification.DefaultUnit),
CaloriesPer100g: classification.CaloriesPer100g,
ProteinPer100g: classification.ProteinPer100g,
FatPer100g: classification.FatPer100g,
CarbsPer100g: classification.CarbsPer100g,
StorageDays: intPtr(classification.StorageDays),
}
saved, upsertError := enricher.productRepo.Upsert(enrichContext, catalogProduct)
if upsertError != nil {
slog.WarnContext(enrichContext, "upsert classified product", "name", classification.CanonicalName, "err", upsertError)
return nil
}
return saved
}

View File

@@ -0,0 +1,63 @@
package recognition
import (
"time"
"github.com/food-ai/backend/internal/adapters/ai"
)
// Kafka topic names for product recognition.
const (
ProductTopicPaid = "ai.products.paid"
ProductTopicFree = "ai.products.free"
)
// ProductImagePayload is a single image stored in the product_recognition_jobs.images JSONB column.
type ProductImagePayload struct {
ImageBase64 string `json:"image_base64"`
MimeType string `json:"mime_type"`
}
// ProductJobResultItem is an enriched product item stored in the result JSONB.
type ProductJobResultItem struct {
Name string `json:"name"`
Quantity float64 `json:"quantity"`
Unit string `json:"unit"`
Category string `json:"category"`
Confidence float64 `json:"confidence"`
MappingID *string `json:"mapping_id,omitempty"`
StorageDays int `json:"storage_days"`
}
// ProductJobResult is the JSONB payload stored in product_recognition_jobs.result.
type ProductJobResult struct {
JobType string `json:"job_type"`
Items []ProductJobResultItem `json:"items"`
Unrecognized []ai.UnrecognizedItem `json:"unrecognized,omitempty"`
}
// ProductJob represents an async product/receipt recognition task.
type ProductJob struct {
ID string
UserID string
UserPlan string
JobType string // "receipt" | "products"
Images []ProductImagePayload
Lang string
Status string
Result *ProductJobResult
Error *string
CreatedAt time.Time
StartedAt *time.Time
CompletedAt *time.Time
}
// ProductJobSummary is a lightweight record for list endpoints (omits image payloads).
type ProductJobSummary struct {
ID string `json:"id"`
JobType string `json:"job_type"`
Status string `json:"status"`
Result *ProductJobResult `json:"result,omitempty"`
Error *string `json:"error,omitempty"`
CreatedAt time.Time `json:"created_at"`
}

View File

@@ -0,0 +1,203 @@
package recognition
import (
"context"
"encoding/json"
"time"
"github.com/jackc/pgx/v5/pgxpool"
)
// ProductJobRepository provides all DB operations on product_recognition_jobs.
type ProductJobRepository interface {
InsertProductJob(ctx context.Context, job *ProductJob) error
GetProductJobByID(ctx context.Context, jobID string) (*ProductJob, error)
UpdateProductJobStatus(ctx context.Context, jobID, status string, result *ProductJobResult, errMsg *string) error
ProductQueuePosition(ctx context.Context, userPlan string, createdAt time.Time) (int, error)
NotifyProductJobUpdate(ctx context.Context, jobID string) error
ListRecentProductJobs(ctx context.Context, userID string) ([]*ProductJobSummary, error)
ListAllProductJobs(ctx context.Context, userID string) ([]*ProductJobSummary, error)
}
// PostgresProductJobRepository implements ProductJobRepository using a pgxpool.
type PostgresProductJobRepository struct {
pool *pgxpool.Pool
}
// NewProductJobRepository creates a new PostgresProductJobRepository.
func NewProductJobRepository(pool *pgxpool.Pool) *PostgresProductJobRepository {
return &PostgresProductJobRepository{pool: pool}
}
// InsertProductJob inserts a new product recognition job and populates the ID and CreatedAt fields.
func (repository *PostgresProductJobRepository) InsertProductJob(queryContext context.Context, job *ProductJob) error {
imagesJSON, marshalError := json.Marshal(job.Images)
if marshalError != nil {
return marshalError
}
return repository.pool.QueryRow(queryContext,
`INSERT INTO product_recognition_jobs (user_id, user_plan, job_type, images, lang)
VALUES ($1, $2, $3, $4, $5)
RETURNING id, created_at`,
job.UserID, job.UserPlan, job.JobType, imagesJSON, job.Lang,
).Scan(&job.ID, &job.CreatedAt)
}
// GetProductJobByID fetches a single product job by primary key.
func (repository *PostgresProductJobRepository) GetProductJobByID(queryContext context.Context, jobID string) (*ProductJob, error) {
var job ProductJob
var imagesJSON []byte
var resultJSON []byte
queryError := repository.pool.QueryRow(queryContext,
`SELECT id, user_id, user_plan, job_type, images, lang,
status, result, error, created_at, started_at, completed_at
FROM product_recognition_jobs WHERE id = $1`,
jobID,
).Scan(
&job.ID, &job.UserID, &job.UserPlan, &job.JobType, &imagesJSON, &job.Lang,
&job.Status, &resultJSON, &job.Error, &job.CreatedAt, &job.StartedAt, &job.CompletedAt,
)
if queryError != nil {
return nil, queryError
}
if imagesJSON != nil {
if unmarshalError := json.Unmarshal(imagesJSON, &job.Images); unmarshalError != nil {
return nil, unmarshalError
}
}
if resultJSON != nil {
var productResult ProductJobResult
if unmarshalError := json.Unmarshal(resultJSON, &productResult); unmarshalError == nil {
job.Result = &productResult
}
}
return &job, nil
}
// UpdateProductJobStatus transitions a job to a new status and records the result or error.
func (repository *PostgresProductJobRepository) UpdateProductJobStatus(
queryContext context.Context,
jobID, status string,
result *ProductJobResult,
errMsg *string,
) error {
var resultJSON []byte
if result != nil {
marshalledBytes, marshalError := json.Marshal(result)
if marshalError != nil {
return marshalError
}
resultJSON = marshalledBytes
}
switch status {
case JobStatusProcessing:
_, updateError := repository.pool.Exec(queryContext,
`UPDATE product_recognition_jobs SET status = $1, started_at = now() WHERE id = $2`,
status, jobID,
)
return updateError
default:
_, updateError := repository.pool.Exec(queryContext,
`UPDATE product_recognition_jobs
SET status = $1, result = $2, error = $3, completed_at = now()
WHERE id = $4`,
status, resultJSON, errMsg, jobID,
)
return updateError
}
}
// ProductQueuePosition counts product jobs ahead of createdAt in the same plan's queue.
func (repository *PostgresProductJobRepository) ProductQueuePosition(
queryContext context.Context,
userPlan string,
createdAt time.Time,
) (int, error) {
var position int
queryError := repository.pool.QueryRow(queryContext,
`SELECT COUNT(*) FROM product_recognition_jobs
WHERE status IN ('pending', 'processing')
AND user_plan = $1
AND created_at < $2`,
userPlan, createdAt,
).Scan(&position)
return position, queryError
}
// NotifyProductJobUpdate sends a PostgreSQL NOTIFY on the product_job_update channel.
func (repository *PostgresProductJobRepository) NotifyProductJobUpdate(queryContext context.Context, jobID string) error {
_, notifyError := repository.pool.Exec(queryContext, `SELECT pg_notify('product_job_update', $1)`, jobID)
return notifyError
}
// ListRecentProductJobs returns product recognition jobs from the last 7 days for the given user.
func (repository *PostgresProductJobRepository) ListRecentProductJobs(queryContext context.Context, userID string) ([]*ProductJobSummary, error) {
rows, queryError := repository.pool.Query(queryContext,
`SELECT id, job_type, status, result, error, created_at
FROM product_recognition_jobs
WHERE user_id = $1
AND created_at >= now() - interval '7 days'
ORDER BY created_at DESC`,
userID,
)
if queryError != nil {
return nil, queryError
}
defer rows.Close()
return scanProductJobSummaries(rows)
}
// ListAllProductJobs returns all product recognition jobs for the given user, newest first.
func (repository *PostgresProductJobRepository) ListAllProductJobs(queryContext context.Context, userID string) ([]*ProductJobSummary, error) {
rows, queryError := repository.pool.Query(queryContext,
`SELECT id, job_type, status, result, error, created_at
FROM product_recognition_jobs
WHERE user_id = $1
ORDER BY created_at DESC`,
userID,
)
if queryError != nil {
return nil, queryError
}
defer rows.Close()
return scanProductJobSummaries(rows)
}
type productSummaryScanner interface {
Next() bool
Scan(dest ...any) error
Err() error
}
func scanProductJobSummaries(rows productSummaryScanner) ([]*ProductJobSummary, error) {
var summaries []*ProductJobSummary
for rows.Next() {
var summary ProductJobSummary
var resultJSON []byte
scanError := rows.Scan(
&summary.ID, &summary.JobType, &summary.Status,
&resultJSON, &summary.Error, &summary.CreatedAt,
)
if scanError != nil {
return nil, scanError
}
if resultJSON != nil {
var productResult ProductJobResult
if unmarshalError := json.Unmarshal(resultJSON, &productResult); unmarshalError == nil {
summary.Result = &productResult
}
}
summaries = append(summaries, &summary)
}
if rowsError := rows.Err(); rowsError != nil {
return nil, rowsError
}
return summaries, nil
}

View File

@@ -0,0 +1,196 @@
package recognition
import (
"context"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"sync"
"github.com/go-chi/chi/v5"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/food-ai/backend/internal/infra/middleware"
)
// ProductSSEBroker manages Server-Sent Events for product recognition job status updates.
// It listens on the PostgreSQL "product_job_update" NOTIFY channel and fans out events
// to all HTTP clients currently streaming a given job.
type ProductSSEBroker struct {
pool *pgxpool.Pool
productJobRepo ProductJobRepository
mu sync.RWMutex
clients map[string][]chan sseEvent
}
// NewProductSSEBroker creates a new ProductSSEBroker.
func NewProductSSEBroker(pool *pgxpool.Pool, productJobRepo ProductJobRepository) *ProductSSEBroker {
return &ProductSSEBroker{
pool: pool,
productJobRepo: productJobRepo,
clients: make(map[string][]chan sseEvent),
}
}
// Start launches the PostgreSQL LISTEN loop in a background goroutine.
func (broker *ProductSSEBroker) Start(brokerContext context.Context) {
go broker.listenLoop(brokerContext)
}
func (broker *ProductSSEBroker) listenLoop(brokerContext context.Context) {
conn, acquireError := broker.pool.Acquire(brokerContext)
if acquireError != nil {
slog.Error("ProductSSEBroker: acquire PG connection", "err", acquireError)
return
}
defer conn.Release()
if _, listenError := conn.Exec(brokerContext, "LISTEN product_job_update"); listenError != nil {
slog.Error("ProductSSEBroker: LISTEN product_job_update", "err", listenError)
return
}
for {
notification, waitError := conn.Conn().WaitForNotification(brokerContext)
if brokerContext.Err() != nil {
return
}
if waitError != nil {
slog.Error("ProductSSEBroker: wait for notification", "err", waitError)
return
}
broker.fanOut(brokerContext, notification.Payload)
}
}
func (broker *ProductSSEBroker) subscribe(jobID string) chan sseEvent {
channel := make(chan sseEvent, 10)
broker.mu.Lock()
broker.clients[jobID] = append(broker.clients[jobID], channel)
broker.mu.Unlock()
return channel
}
func (broker *ProductSSEBroker) unsubscribe(jobID string, channel chan sseEvent) {
broker.mu.Lock()
defer broker.mu.Unlock()
existing := broker.clients[jobID]
for index, existingChannel := range existing {
if existingChannel == channel {
broker.clients[jobID] = append(broker.clients[jobID][:index], broker.clients[jobID][index+1:]...)
break
}
}
if len(broker.clients[jobID]) == 0 {
delete(broker.clients, jobID)
}
}
func (broker *ProductSSEBroker) fanOut(fanContext context.Context, jobID string) {
job, fetchError := broker.productJobRepo.GetProductJobByID(fanContext, jobID)
if fetchError != nil {
slog.Warn("ProductSSEBroker: get job for fan-out", "job_id", jobID, "err", fetchError)
return
}
event, ok := productJobToSSEEvent(job)
if !ok {
return
}
broker.mu.RLock()
channels := make([]chan sseEvent, len(broker.clients[jobID]))
copy(channels, broker.clients[jobID])
broker.mu.RUnlock()
for _, channel := range channels {
select {
case channel <- event:
default:
// channel full; skip this delivery
}
}
}
func productJobToSSEEvent(job *ProductJob) (sseEvent, bool) {
switch job.Status {
case JobStatusProcessing:
return sseEvent{name: "processing", data: "{}"}, true
case JobStatusDone:
resultJSON, marshalError := json.Marshal(job.Result)
if marshalError != nil {
return sseEvent{}, false
}
return sseEvent{name: "done", data: string(resultJSON)}, true
case JobStatusFailed:
errMsg := "recognition failed, please try again"
if job.Error != nil {
errMsg = *job.Error
}
errorData, _ := json.Marshal(map[string]string{"error": errMsg})
return sseEvent{name: "failed", data: string(errorData)}, true
default:
return sseEvent{}, false
}
}
// ServeSSE handles GET /ai/product-jobs/{id}/stream — streams SSE events until the job completes.
func (broker *ProductSSEBroker) ServeSSE(responseWriter http.ResponseWriter, request *http.Request) {
jobID := chi.URLParam(request, "id")
userID := middleware.UserIDFromCtx(request.Context())
job, fetchError := broker.productJobRepo.GetProductJobByID(request.Context(), jobID)
if fetchError != nil {
writeErrorJSON(responseWriter, request, http.StatusNotFound, "job not found")
return
}
if job.UserID != userID {
writeErrorJSON(responseWriter, request, http.StatusForbidden, "forbidden")
return
}
flusher, supported := responseWriter.(http.Flusher)
if !supported {
writeErrorJSON(responseWriter, request, http.StatusInternalServerError, "streaming not supported")
return
}
responseWriter.Header().Set("Content-Type", "text/event-stream")
responseWriter.Header().Set("Cache-Control", "no-cache")
responseWriter.Header().Set("Connection", "keep-alive")
responseWriter.Header().Set("X-Accel-Buffering", "no")
if job.Status == JobStatusDone || job.Status == JobStatusFailed {
if event, ok := productJobToSSEEvent(job); ok {
fmt.Fprintf(responseWriter, "event: %s\ndata: %s\n\n", event.name, event.data)
flusher.Flush()
}
return
}
eventChannel := broker.subscribe(jobID)
defer broker.unsubscribe(jobID, eventChannel)
position, _ := broker.productJobRepo.ProductQueuePosition(request.Context(), job.UserPlan, job.CreatedAt)
estimatedSeconds := (position + 1) * 6
queuedData, _ := json.Marshal(map[string]any{
"position": position,
"estimated_seconds": estimatedSeconds,
})
fmt.Fprintf(responseWriter, "event: queued\ndata: %s\n\n", queuedData)
flusher.Flush()
for {
select {
case event := <-eventChannel:
fmt.Fprintf(responseWriter, "event: %s\ndata: %s\n\n", event.name, event.data)
flusher.Flush()
if event.name == "done" || event.name == "failed" {
return
}
case <-request.Context().Done():
return
}
}
}

View File

@@ -0,0 +1,152 @@
package recognition
import (
"context"
"log/slog"
"sync"
"github.com/food-ai/backend/internal/adapters/ai"
"github.com/food-ai/backend/internal/adapters/kafka"
)
// ProductWorkerPool processes product/receipt recognition jobs from a single Kafka topic.
type ProductWorkerPool struct {
productJobRepo ProductJobRepository
enricher *itemEnricher
recognizer Recognizer
consumer *kafka.Consumer
workerCount int
jobs chan string
}
// NewProductWorkerPool creates a ProductWorkerPool with five workers consuming from a single consumer.
func NewProductWorkerPool(
productJobRepo ProductJobRepository,
recognizer Recognizer,
productRepo ProductRepository,
consumer *kafka.Consumer,
) *ProductWorkerPool {
return &ProductWorkerPool{
productJobRepo: productJobRepo,
enricher: newItemEnricher(recognizer, productRepo),
recognizer: recognizer,
consumer: consumer,
workerCount: defaultWorkerCount,
jobs: make(chan string, 100),
}
}
// Start launches the Kafka feeder goroutine and all worker goroutines.
func (pool *ProductWorkerPool) Start(workerContext context.Context) {
go pool.consumer.Run(workerContext, pool.jobs)
for i := 0; i < pool.workerCount; i++ {
go pool.runWorker(workerContext)
}
}
func (pool *ProductWorkerPool) runWorker(workerContext context.Context) {
for {
select {
case jobID := <-pool.jobs:
pool.processJob(workerContext, jobID)
case <-workerContext.Done():
return
}
}
}
func (pool *ProductWorkerPool) processJob(workerContext context.Context, jobID string) {
job, fetchError := pool.productJobRepo.GetProductJobByID(workerContext, jobID)
if fetchError != nil {
slog.Error("product worker: fetch job", "job_id", jobID, "err", fetchError)
return
}
if updateError := pool.productJobRepo.UpdateProductJobStatus(workerContext, jobID, JobStatusProcessing, nil, nil); updateError != nil {
slog.Error("product worker: set processing status", "job_id", jobID, "err", updateError)
}
if notifyError := pool.productJobRepo.NotifyProductJobUpdate(workerContext, jobID); notifyError != nil {
slog.Warn("product worker: notify processing", "job_id", jobID, "err", notifyError)
}
var recognizedItems []ai.RecognizedItem
var unrecognized []ai.UnrecognizedItem
var recognizeError error
switch job.JobType {
case "receipt":
if len(job.Images) == 0 {
errMsg := "no images in job"
_ = pool.productJobRepo.UpdateProductJobStatus(workerContext, jobID, JobStatusFailed, nil, &errMsg)
_ = pool.productJobRepo.NotifyProductJobUpdate(workerContext, jobID)
return
}
imagePayload := job.Images[0]
var receiptResult *ai.ReceiptResult
receiptResult, recognizeError = pool.recognizer.RecognizeReceipt(workerContext, imagePayload.ImageBase64, imagePayload.MimeType, job.Lang)
if recognizeError == nil && receiptResult != nil {
recognizedItems = receiptResult.Items
unrecognized = receiptResult.Unrecognized
}
case "products":
allItems := make([][]ai.RecognizedItem, len(job.Images))
var wg sync.WaitGroup
for index, imagePayload := range job.Images {
wg.Add(1)
go func(workerIndex int, payload ProductImagePayload) {
defer wg.Done()
items, itemsError := pool.recognizer.RecognizeProducts(workerContext, payload.ImageBase64, payload.MimeType, job.Lang)
if itemsError != nil {
slog.WarnContext(workerContext, "product worker: recognize products from image", "index", workerIndex, "err", itemsError)
return
}
allItems[workerIndex] = items
}(index, imagePayload)
}
wg.Wait()
recognizedItems = MergeAndDeduplicate(allItems)
default:
errMsg := "unknown job type: " + job.JobType
slog.Error("product worker: unknown job type", "job_id", jobID, "job_type", job.JobType)
_ = pool.productJobRepo.UpdateProductJobStatus(workerContext, jobID, JobStatusFailed, nil, &errMsg)
_ = pool.productJobRepo.NotifyProductJobUpdate(workerContext, jobID)
return
}
if recognizeError != nil {
slog.Error("product worker: recognize", "job_id", jobID, "err", recognizeError)
errMsg := "recognition failed, please try again"
_ = pool.productJobRepo.UpdateProductJobStatus(workerContext, jobID, JobStatusFailed, nil, &errMsg)
_ = pool.productJobRepo.NotifyProductJobUpdate(workerContext, jobID)
return
}
enriched := pool.enricher.enrich(workerContext, recognizedItems)
resultItems := make([]ProductJobResultItem, len(enriched))
for index, item := range enriched {
resultItems[index] = ProductJobResultItem{
Name: item.Name,
Quantity: item.Quantity,
Unit: item.Unit,
Category: item.Category,
Confidence: item.Confidence,
MappingID: item.MappingID,
StorageDays: item.StorageDays,
}
}
jobResult := &ProductJobResult{
JobType: job.JobType,
Items: resultItems,
Unrecognized: unrecognized,
}
if updateError := pool.productJobRepo.UpdateProductJobStatus(workerContext, jobID, JobStatusDone, jobResult, nil); updateError != nil {
slog.Error("product worker: set done status", "job_id", jobID, "err", updateError)
}
if notifyError := pool.productJobRepo.NotifyProductJobUpdate(workerContext, jobID); notifyError != nil {
slog.Warn("product worker: notify done", "job_id", jobID, "err", notifyError)
}
}

View File

@@ -127,6 +127,10 @@ func NewRouter(
r.Get("/jobs/history", recognitionHandler.ListAllJobs) r.Get("/jobs/history", recognitionHandler.ListAllJobs)
r.Get("/jobs/{id}", recognitionHandler.GetJob) r.Get("/jobs/{id}", recognitionHandler.GetJob)
r.Get("/jobs/{id}/stream", recognitionHandler.GetJobStream) r.Get("/jobs/{id}/stream", recognitionHandler.GetJobStream)
r.Get("/product-jobs", recognitionHandler.ListRecentProductJobs)
r.Get("/product-jobs/history", recognitionHandler.ListAllProductJobs)
r.Get("/product-jobs/{id}", recognitionHandler.GetProductJob)
r.Get("/product-jobs/{id}/stream", recognitionHandler.GetProductJobStream)
r.Post("/generate-menu", menuHandler.GenerateMenu) r.Post("/generate-menu", menuHandler.GenerateMenu)
}) })
}) })

View File

@@ -0,0 +1,24 @@
-- +goose Up
CREATE TABLE product_recognition_jobs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
user_plan TEXT NOT NULL,
job_type TEXT NOT NULL CHECK (job_type IN ('receipt', 'products')),
images JSONB NOT NULL,
lang TEXT NOT NULL DEFAULT 'en',
status TEXT NOT NULL DEFAULT 'pending',
result JSONB,
error TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
started_at TIMESTAMPTZ,
completed_at TIMESTAMPTZ
);
CREATE INDEX idx_product_recognition_jobs_user
ON product_recognition_jobs (user_id, created_at DESC);
CREATE INDEX idx_product_recognition_jobs_status
ON product_recognition_jobs (status, user_plan, created_at ASC);
-- +goose Down
DROP TABLE IF EXISTS product_recognition_jobs;

View File

@@ -13,6 +13,8 @@ import '../../features/profile/profile_provider.dart';
import '../../shared/models/user.dart'; import '../../shared/models/user.dart';
import '../../features/products/products_screen.dart'; import '../../features/products/products_screen.dart';
import '../../features/products/add_product_screen.dart'; import '../../features/products/add_product_screen.dart';
import '../../features/products/product_job_history_screen.dart';
import '../../features/scan/product_job_watch_screen.dart';
import '../../features/scan/scan_screen.dart'; import '../../features/scan/scan_screen.dart';
import '../../features/scan/recognition_confirm_screen.dart'; import '../../features/scan/recognition_confirm_screen.dart';
import '../../features/scan/recognition_service.dart'; import '../../features/scan/recognition_service.dart';
@@ -129,6 +131,17 @@ final routerProvider = Provider<GoRouter>((ref) {
path: '/scan/history', path: '/scan/history',
builder: (_, __) => const RecognitionHistoryScreen(), builder: (_, __) => const RecognitionHistoryScreen(),
), ),
GoRoute(
path: '/scan/product-job-watch',
builder: (context, state) {
final jobCreated = state.extra as ProductJobCreated;
return ProductJobWatchScreen(jobCreated: jobCreated);
},
),
GoRoute(
path: '/products/job-history',
builder: (_, __) => const ProductJobHistoryScreen(),
),
ShellRoute( ShellRoute(
builder: (context, state, child) => MainShell(child: child), builder: (context, state, child) => MainShell(child: child),
routes: [ routes: [

View File

@@ -0,0 +1,114 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import 'package:intl/intl.dart';
import '../../l10n/app_localizations.dart';
import '../scan/recognition_service.dart';
import 'product_job_provider.dart';
/// Shows the complete history of product/receipt recognition scans.
class ProductJobHistoryScreen extends ConsumerWidget {
const ProductJobHistoryScreen({super.key});
@override
Widget build(BuildContext context, WidgetRef ref) {
final l10n = AppLocalizations.of(context)!;
final state = ref.watch(allProductJobsProvider);
return Scaffold(
appBar: AppBar(
title: Text(l10n.productJobHistoryTitle),
actions: [
IconButton(
icon: const Icon(Icons.refresh),
onPressed: () =>
ref.read(allProductJobsProvider.notifier).refresh(),
),
],
),
body: state.when(
loading: () => const Center(child: CircularProgressIndicator()),
error: (err, _) => Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.error_outline, size: 48, color: Colors.red),
const SizedBox(height: 12),
FilledButton(
onPressed: () =>
ref.read(allProductJobsProvider.notifier).refresh(),
child: const Text('Retry'),
),
],
),
),
data: (jobs) => jobs.isEmpty
? Center(
child: Text(
l10n.recentScans,
style: Theme.of(context).textTheme.bodyLarge,
),
)
: RefreshIndicator(
onRefresh: () =>
ref.read(allProductJobsProvider.notifier).refresh(),
child: ListView.builder(
itemCount: jobs.length,
itemBuilder: (context, index) =>
_JobTile(job: jobs[index]),
),
),
),
);
}
}
class _JobTile extends StatelessWidget {
const _JobTile({required this.job});
final ProductJobSummary job;
@override
Widget build(BuildContext context) {
final l10n = AppLocalizations.of(context)!;
final theme = Theme.of(context);
final isDone = job.status == 'done';
final isFailed = job.status == 'failed';
final dateLabel =
DateFormat.yMd().add_jm().format(job.createdAt.toLocal());
final itemCount = job.result?.items.length ?? 0;
return ListTile(
leading: Icon(
job.jobType == 'receipt'
? Icons.receipt_long_outlined
: Icons.camera_alt_outlined,
color: theme.colorScheme.primary,
),
title: Text(
job.jobType == 'receipt' ? l10n.jobTypeReceipt : l10n.jobTypeProducts,
),
subtitle: Text(dateLabel, style: theme.textTheme.bodySmall),
trailing: isDone
? Chip(
label: Text(
'$itemCount',
style: theme.textTheme.labelSmall,
),
avatar: const Icon(Icons.check_circle_outline,
size: 16, color: Colors.green),
)
: isFailed
? const Icon(Icons.error_outline, color: Colors.red)
: const SizedBox(
width: 16,
height: 16,
child: CircularProgressIndicator(strokeWidth: 2),
),
onTap: isDone && job.result != null
? () => context.push('/scan/confirm', extra: job.result!.items)
: null,
);
}
}

View File

@@ -0,0 +1,75 @@
import 'package:flutter_riverpod/flutter_riverpod.dart';
import '../scan/recognition_service.dart';
// ---------------------------------------------------------------------------
// Recent product jobs (last 7 days) — shown on the products screen
// ---------------------------------------------------------------------------
class _RecentProductJobsNotifier
extends StateNotifier<AsyncValue<List<ProductJobSummary>>> {
_RecentProductJobsNotifier(this._service) : super(const AsyncValue.loading()) {
load();
}
final RecognitionService _service;
Future<void> load() async {
state = const AsyncValue.loading();
try {
final jobs = await _service.listRecentProductJobs();
state = AsyncValue.data(jobs);
} catch (error, stack) {
state = AsyncValue.error(error, stack);
}
}
Future<void> refresh() => load();
}
final recentProductJobsProvider = StateNotifierProvider<
_RecentProductJobsNotifier, AsyncValue<List<ProductJobSummary>>>((ref) {
final service = ref.read(recognitionServiceProvider);
return _RecentProductJobsNotifier(service);
});
// ---------------------------------------------------------------------------
// All product jobs — shown on the history screen
// ---------------------------------------------------------------------------
class _AllProductJobsNotifier
extends StateNotifier<AsyncValue<List<ProductJobSummary>>> {
_AllProductJobsNotifier(this._service) : super(const AsyncValue.loading()) {
load();
}
final RecognitionService _service;
Future<void> load() async {
state = const AsyncValue.loading();
try {
final jobs = await _service.listAllProductJobs();
state = AsyncValue.data(jobs);
} catch (error, stack) {
state = AsyncValue.error(error, stack);
}
}
Future<void> refresh() => load();
}
final allProductJobsProvider = StateNotifierProvider<_AllProductJobsNotifier,
AsyncValue<List<ProductJobSummary>>>((ref) {
final service = ref.read(recognitionServiceProvider);
return _AllProductJobsNotifier(service);
});
// ---------------------------------------------------------------------------
// SSE stream for a single product job
// ---------------------------------------------------------------------------
final productJobStreamProvider =
StreamProvider.family<ProductJobEvent, String>((ref, jobId) {
final service = ref.read(recognitionServiceProvider);
return service.streamProductJobEvents(jobId);
});

View File

@@ -3,7 +3,10 @@ import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart'; import 'package:go_router/go_router.dart';
import '../../core/locale/unit_provider.dart'; import '../../core/locale/unit_provider.dart';
import '../../l10n/app_localizations.dart';
import '../../shared/models/user_product.dart'; import '../../shared/models/user_product.dart';
import '../scan/recognition_service.dart';
import 'product_job_provider.dart';
import 'user_product_provider.dart'; import 'user_product_provider.dart';
void _showAddMenu(BuildContext context) { void _showAddMenu(BuildContext context) {
@@ -57,21 +60,181 @@ class ProductsScreen extends ConsumerWidget {
icon: const Icon(Icons.add), icon: const Icon(Icons.add),
label: const Text('Добавить'), label: const Text('Добавить'),
), ),
body: state.when( body: Column(
loading: () => const Center(child: CircularProgressIndicator()), children: [
error: (err, _) => _ErrorView( const _RecentScansSection(),
onRetry: () => ref.read(userProductsProvider.notifier).refresh(), Expanded(
), child: state.when(
data: (products) => products.isEmpty loading: () => const Center(child: CircularProgressIndicator()),
? _EmptyState( error: (err, _) => _ErrorView(
onAdd: () => _showAddMenu(context), onRetry: () => ref.read(userProductsProvider.notifier).refresh(),
) ),
: _ProductList(products: products), data: (products) => products.isEmpty
? _EmptyState(onAdd: () => _showAddMenu(context))
: _ProductList(products: products),
),
),
],
), ),
); );
} }
} }
// ---------------------------------------------------------------------------
// Recent product recognition scans section
// ---------------------------------------------------------------------------
class _RecentScansSection extends ConsumerWidget {
const _RecentScansSection();
@override
Widget build(BuildContext context, WidgetRef ref) {
final jobsState = ref.watch(recentProductJobsProvider);
final jobs = jobsState.valueOrNull;
if (jobs == null || jobs.isEmpty) return const SizedBox.shrink();
final l10n = AppLocalizations.of(context)!;
final theme = Theme.of(context);
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Padding(
padding: const EdgeInsets.fromLTRB(16, 12, 8, 4),
child: Row(
children: [
Icon(Icons.document_scanner_outlined,
size: 18, color: theme.colorScheme.primary),
const SizedBox(width: 6),
Text(
l10n.recentScans,
style: theme.textTheme.titleSmall?.copyWith(
color: theme.colorScheme.onSurfaceVariant,
fontWeight: FontWeight.w600,
),
),
const Spacer(),
TextButton(
onPressed: () => context.push('/products/job-history'),
child: Text(l10n.seeAllScans),
),
],
),
),
SizedBox(
height: 72,
child: ListView.builder(
scrollDirection: Axis.horizontal,
padding: const EdgeInsets.symmetric(horizontal: 12),
itemCount: jobs.length > 5 ? 5 : jobs.length,
itemBuilder: (context, index) =>
_ScanJobChip(job: jobs[index]),
),
),
const Divider(height: 1),
],
);
}
}
class _ScanJobChip extends ConsumerWidget {
const _ScanJobChip({required this.job});
final ProductJobSummary job;
@override
Widget build(BuildContext context, WidgetRef ref) {
final theme = Theme.of(context);
final isDone = job.status == 'done';
final isFailed = job.status == 'failed';
final isActive = !isDone && !isFailed;
final l10n = AppLocalizations.of(context)!;
return Padding(
padding: const EdgeInsets.symmetric(horizontal: 4, vertical: 8),
child: InkWell(
borderRadius: BorderRadius.circular(12),
onTap: isDone && job.result != null
? () => context.push('/scan/confirm', extra: job.result!.items)
: null,
child: Container(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 8),
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(12),
color: theme.colorScheme.surfaceContainerHighest,
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
Icon(
job.jobType == 'receipt'
? Icons.receipt_long_outlined
: Icons.camera_alt_outlined,
size: 20,
color: theme.colorScheme.onSurfaceVariant,
),
const SizedBox(width: 8),
Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
job.jobType == 'receipt'
? l10n.jobTypeReceipt
: l10n.jobTypeProducts,
style: theme.textTheme.labelMedium,
),
_StatusBadge(
status: job.status,
isFailed: isFailed,
isActive: isActive,
),
],
),
],
),
),
),
);
}
}
class _StatusBadge extends StatelessWidget {
const _StatusBadge({
required this.status,
required this.isFailed,
required this.isActive,
});
final String status;
final bool isFailed;
final bool isActive;
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
if (isActive) {
return Row(
mainAxisSize: MainAxisSize.min,
children: [
SizedBox(
width: 10,
height: 10,
child: CircularProgressIndicator(strokeWidth: 1.5),
),
const SizedBox(width: 4),
Text(status, style: theme.textTheme.labelSmall),
],
);
}
return Icon(
isFailed ? Icons.error_outline : Icons.check_circle_outline,
size: 14,
color: isFailed ? Colors.red : Colors.green,
);
}
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Product list split into expiring / normal sections // Product list split into expiring / normal sections
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@@ -0,0 +1,172 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import '../../l10n/app_localizations.dart';
import '../products/product_job_provider.dart';
import 'recognition_service.dart';
/// Watches a product recognition job via SSE and navigates to the confirmation
/// screen when the job finishes.
class ProductJobWatchScreen extends ConsumerStatefulWidget {
const ProductJobWatchScreen({super.key, required this.jobCreated});
final ProductJobCreated jobCreated;
@override
ConsumerState<ProductJobWatchScreen> createState() =>
_ProductJobWatchScreenState();
}
class _ProductJobWatchScreenState
extends ConsumerState<ProductJobWatchScreen> {
String? _errorMessage;
bool _navigated = false;
@override
Widget build(BuildContext context) {
final l10n = AppLocalizations.of(context)!;
ref.listen(
productJobStreamProvider(widget.jobCreated.jobId),
(previous, next) {
next.whenData((event) {
if (_navigated) return;
if (event is ProductJobDone) {
_navigated = true;
ref.invalidate(recentProductJobsProvider);
context.pushReplacement('/scan/confirm', extra: event.result.items);
} else if (event is ProductJobFailed) {
setState(() => _errorMessage = event.error);
}
});
},
);
final streamState =
ref.watch(productJobStreamProvider(widget.jobCreated.jobId));
return Scaffold(
appBar: AppBar(title: Text(l10n.processingProducts)),
body: _errorMessage != null
? _ErrorBody(message: _errorMessage!)
: streamState.when(
loading: () => _ProgressBody(jobCreated: widget.jobCreated),
data: (event) => switch (event) {
ProductJobQueued(
position: final position,
estimatedSeconds: final estimated,
) =>
_QueuedBody(position: position, estimatedSeconds: estimated),
ProductJobProcessing() =>
_ProcessingBody(label: l10n.processingProducts),
ProductJobDone() => _ProcessingBody(label: l10n.processingProducts),
ProductJobFailed(error: final err) => _ErrorBody(message: err),
},
error: (err, _) => _ErrorBody(message: err.toString()),
),
);
}
}
class _ProgressBody extends StatelessWidget {
const _ProgressBody({required this.jobCreated});
final ProductJobCreated jobCreated;
@override
Widget build(BuildContext context) {
return _QueuedBody(
position: jobCreated.queuePosition,
estimatedSeconds: jobCreated.estimatedSeconds,
);
}
}
class _QueuedBody extends StatelessWidget {
const _QueuedBody(
{required this.position, required this.estimatedSeconds});
final int position;
final int estimatedSeconds;
@override
Widget build(BuildContext context) {
final l10n = AppLocalizations.of(context)!;
return Center(
child: Padding(
padding: const EdgeInsets.all(32),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const CircularProgressIndicator(),
const SizedBox(height: 24),
Text(
l10n.processingProducts,
style: Theme.of(context).textTheme.titleMedium,
),
if (position > 0) ...[
const SizedBox(height: 8),
Text(
'~${estimatedSeconds}s',
style: Theme.of(context).textTheme.bodySmall,
),
],
],
),
),
);
}
}
class _ProcessingBody extends StatelessWidget {
const _ProcessingBody({required this.label});
final String label;
@override
Widget build(BuildContext context) {
return Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const CircularProgressIndicator(),
const SizedBox(height: 24),
Text(label, style: Theme.of(context).textTheme.titleMedium),
],
),
);
}
}
class _ErrorBody extends StatelessWidget {
const _ErrorBody({required this.message});
final String message;
@override
Widget build(BuildContext context) {
return Center(
child: Padding(
padding: const EdgeInsets.all(32),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.error_outline, size: 48, color: Colors.red),
const SizedBox(height: 16),
Text(
message,
textAlign: TextAlign.center,
style: Theme.of(context).textTheme.bodyMedium,
),
const SizedBox(height: 24),
FilledButton(
onPressed: () => context.pop(),
child: const Text('Back'),
),
],
),
),
);
}
}

View File

@@ -1,7 +1,6 @@
import 'dart:async'; import 'dart:async';
import 'dart:convert'; import 'dart:convert';
import 'package:dio/dio.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:http/http.dart' as http; import 'package:http/http.dart' as http;
import 'package:image_picker/image_picker.dart'; import 'package:image_picker/image_picker.dart';
@@ -142,7 +141,110 @@ class DishResult {
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Async job models // Product job models
// ---------------------------------------------------------------------------
/// Result of a completed product or receipt recognition job.
class ProductJobResult {
final String jobType;
final List<RecognizedItem> items;
final List<UnrecognizedItem> unrecognized;
const ProductJobResult({
required this.jobType,
required this.items,
required this.unrecognized,
});
factory ProductJobResult.fromJson(Map<String, dynamic> json) {
return ProductJobResult(
jobType: json['job_type'] as String? ?? '',
items: (json['items'] as List<dynamic>? ?? [])
.map((element) => RecognizedItem.fromJson(element as Map<String, dynamic>))
.toList(),
unrecognized: (json['unrecognized'] as List<dynamic>? ?? [])
.map((element) => UnrecognizedItem.fromJson(element as Map<String, dynamic>))
.toList(),
);
}
}
/// The 202 response from POST /ai/recognize-receipt or /ai/recognize-products.
class ProductJobCreated {
final String jobId;
final int queuePosition;
final int estimatedSeconds;
const ProductJobCreated({
required this.jobId,
required this.queuePosition,
required this.estimatedSeconds,
});
factory ProductJobCreated.fromJson(Map<String, dynamic> json) {
return ProductJobCreated(
jobId: json['job_id'] as String,
queuePosition: json['queue_position'] as int? ?? 0,
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
);
}
}
/// A lightweight summary of a product recognition job for list endpoints.
class ProductJobSummary {
final String id;
final String jobType;
final String status;
final ProductJobResult? result;
final String? error;
final DateTime createdAt;
const ProductJobSummary({
required this.id,
required this.jobType,
required this.status,
this.result,
this.error,
required this.createdAt,
});
factory ProductJobSummary.fromJson(Map<String, dynamic> json) {
return ProductJobSummary(
id: json['id'] as String,
jobType: json['job_type'] as String? ?? '',
status: json['status'] as String? ?? '',
result: json['result'] != null
? ProductJobResult.fromJson(json['result'] as Map<String, dynamic>)
: null,
error: json['error'] as String?,
createdAt: DateTime.parse(json['created_at'] as String),
);
}
}
/// Events emitted by the SSE stream for a product recognition job.
sealed class ProductJobEvent {}
class ProductJobQueued extends ProductJobEvent {
final int position;
final int estimatedSeconds;
ProductJobQueued({required this.position, required this.estimatedSeconds});
}
class ProductJobProcessing extends ProductJobEvent {}
class ProductJobDone extends ProductJobEvent {
final ProductJobResult result;
ProductJobDone(this.result);
}
class ProductJobFailed extends ProductJobEvent {
final String error;
ProductJobFailed(this.error);
}
// ---------------------------------------------------------------------------
// Dish job models
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
/// A lightweight summary of a dish recognition job (no image payload). /// A lightweight summary of a dish recognition job (no image payload).
@@ -239,32 +341,71 @@ class RecognitionService {
final AppConfig _appConfig; final AppConfig _appConfig;
final String Function() _languageGetter; final String Function() _languageGetter;
/// Recognizes food items from a receipt photo. /// Submits a receipt image for async recognition.
Future<ReceiptResult> recognizeReceipt(XFile image) async { /// Returns immediately with a [ProductJobCreated] containing the job ID.
Future<ProductJobCreated> submitReceiptRecognition(XFile image) async {
final payload = await _buildImagePayload(image); final payload = await _buildImagePayload(image);
final data = await _client.post('/ai/recognize-receipt', data: payload); final data = await _client.post('/ai/recognize-receipt', data: payload);
return ReceiptResult( return ProductJobCreated.fromJson(data);
items: (data['items'] as List<dynamic>? ?? [])
.map((element) => RecognizedItem.fromJson(element as Map<String, dynamic>))
.toList(),
unrecognized: (data['unrecognized'] as List<dynamic>? ?? [])
.map((element) => UnrecognizedItem.fromJson(element as Map<String, dynamic>))
.toList(),
);
} }
/// Recognizes food items from 13 product photos. /// Submits 13 product images for async recognition.
Future<List<RecognizedItem>> recognizeProducts(List<XFile> images) async { /// Returns immediately with a [ProductJobCreated] containing the job ID.
Future<ProductJobCreated> submitProductsRecognition(List<XFile> images) async {
final imageList = await Future.wait(images.map(_buildImagePayload)); final imageList = await Future.wait(images.map(_buildImagePayload));
final data = await _client.post( final data = await _client.post(
'/ai/recognize-products', '/ai/recognize-products',
data: {'images': imageList}, data: {'images': imageList},
); );
return (data['items'] as List<dynamic>? ?? []) return ProductJobCreated.fromJson(data);
.map((element) => RecognizedItem.fromJson(element as Map<String, dynamic>)) }
/// Returns product recognition jobs from the last 7 days.
Future<List<ProductJobSummary>> listRecentProductJobs() async {
final data = await _client.getList('/ai/product-jobs');
return data
.map((element) =>
ProductJobSummary.fromJson(element as Map<String, dynamic>))
.toList(); .toList();
} }
/// Returns all product recognition jobs for the current user, newest first.
Future<List<ProductJobSummary>> listAllProductJobs() async {
final data = await _client.getList('/ai/product-jobs/history');
return data
.map((element) =>
ProductJobSummary.fromJson(element as Map<String, dynamic>))
.toList();
}
/// Opens an SSE stream for product job [jobId] and emits [ProductJobEvent]s
/// until the job reaches a terminal state or the stream is cancelled.
Stream<ProductJobEvent> streamProductJobEvents(String jobId) async* {
final streamUri = Uri.parse('${_appConfig.apiBaseUrl}/ai/product-jobs/$jobId/stream');
await for (final parsed in _openSseStream(streamUri)) {
final eventName = parsed.$1;
final json = parsed.$2;
ProductJobEvent? event;
switch (eventName) {
case 'queued':
event = ProductJobQueued(
position: json['position'] as int? ?? 0,
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
);
case 'processing':
event = ProductJobProcessing();
case 'done':
event = ProductJobDone(ProductJobResult.fromJson(json));
case 'failed':
event = ProductJobFailed(json['error'] as String? ?? 'Recognition failed');
}
if (event != null) {
yield event;
if (event is ProductJobDone || event is ProductJobFailed) return;
}
}
}
/// Submits a dish image for async recognition. /// Submits a dish image for async recognition.
/// Returns a [DishJobCreated] with the job ID and queue position. /// Returns a [DishJobCreated] with the job ID and queue position.
Future<DishJobCreated> submitDishRecognition( Future<DishJobCreated> submitDishRecognition(
@@ -298,21 +439,45 @@ class RecognitionService {
.toList(); .toList();
} }
/// Opens an SSE stream for job [jobId] and emits [DishJobEvent]s until the /// Opens an SSE stream for dish job [jobId] and emits [DishJobEvent]s until
/// job reaches a terminal state (done or failed) or the stream is cancelled. /// the job reaches a terminal state (done or failed) or the stream is cancelled.
Stream<DishJobEvent> streamJobEvents(String jobId) async* {
final streamUri = Uri.parse('${_appConfig.apiBaseUrl}/ai/jobs/$jobId/stream');
await for (final parsed in _openSseStream(streamUri)) {
final eventName = parsed.$1;
final json = parsed.$2;
DishJobEvent? event;
switch (eventName) {
case 'queued':
event = DishJobQueued(
position: json['position'] as int? ?? 0,
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
);
case 'processing':
event = DishJobProcessing();
case 'done':
event = DishJobDone(DishResult.fromJson(json));
case 'failed':
event = DishJobFailed(json['error'] as String? ?? 'Recognition failed');
}
if (event != null) {
yield event;
if (event is DishJobDone || event is DishJobFailed) return;
}
}
}
/// Opens a raw SSE connection and emits (eventName, jsonData) pairs.
/// ///
/// Uses [http.Client] instead of Dio because on Flutter Web Dio relies on /// Uses [http.Client] instead of Dio because on Flutter Web Dio relies on
/// XHR which does not support SSE streaming. [http.BrowserClient] reads the /// XHR which does not support SSE streaming.
/// response via XHR onProgress events and delivers chunks before the Stream<(String, Map<String, dynamic>)> _openSseStream(Uri streamUri) async* {
/// connection is closed.
Stream<DishJobEvent> streamJobEvents(String jobId) async* {
final token = await _storage.getAccessToken(); final token = await _storage.getAccessToken();
final language = _languageGetter(); final language = _languageGetter();
final uri = Uri.parse('${_appConfig.apiBaseUrl}/ai/jobs/$jobId/stream');
final httpClient = http.Client(); final httpClient = http.Client();
try { try {
final request = http.Request('GET', uri) final request = http.Request('GET', streamUri)
..headers['Authorization'] = token != null ? 'Bearer $token' : '' ..headers['Authorization'] = token != null ? 'Bearer $token' : ''
..headers['Accept'] = 'text/event-stream' ..headers['Accept'] = 'text/event-stream'
..headers['Accept-Language'] = language ..headers['Accept-Language'] = language
@@ -329,7 +494,6 @@ class RecognitionService {
buffer.write(chunk); buffer.write(chunk);
final text = buffer.toString(); final text = buffer.toString();
// Process complete SSE messages (terminated by \n\n).
int doubleNewlineIndex; int doubleNewlineIndex;
var remaining = text; var remaining = text;
while ((doubleNewlineIndex = remaining.indexOf('\n\n')) != -1) { while ((doubleNewlineIndex = remaining.indexOf('\n\n')) != -1) {
@@ -341,10 +505,13 @@ class RecognitionService {
currentEventName = line.substring(6).trim(); currentEventName = line.substring(6).trim();
} else if (line.startsWith('data:')) { } else if (line.startsWith('data:')) {
final dataPayload = line.substring(5).trim(); final dataPayload = line.substring(5).trim();
final event = _parseSseEvent(currentEventName, dataPayload); try {
if (event != null) { final jsonData = jsonDecode(dataPayload) as Map<String, dynamic>;
yield event; if (currentEventName != null) {
if (event is DishJobDone || event is DishJobFailed) return; yield (currentEventName, jsonData);
}
} catch (_) {
// Malformed JSON — skip this message.
} }
currentEventName = null; currentEventName = null;
} }
@@ -360,29 +527,6 @@ class RecognitionService {
} }
} }
DishJobEvent? _parseSseEvent(String? eventName, String dataPayload) {
try {
final json = jsonDecode(dataPayload) as Map<String, dynamic>;
switch (eventName) {
case 'queued':
return DishJobQueued(
position: json['position'] as int? ?? 0,
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
);
case 'processing':
return DishJobProcessing();
case 'done':
return DishJobDone(DishResult.fromJson(json));
case 'failed':
return DishJobFailed(json['error'] as String? ?? 'Recognition failed');
default:
return null;
}
} catch (_) {
return null;
}
}
Future<Map<String, String>> _buildImagePayload(XFile image) async { Future<Map<String, String>> _buildImagePayload(XFile image) async {
final bytes = await image.readAsBytes(); final bytes = await image.readAsBytes();
final base64Data = base64Encode(bytes); final base64Data = base64Encode(bytes);

View File

@@ -82,30 +82,26 @@ class _ScanScreenState extends ConsumerState<ScanScreen> {
final service = ref.read(recognitionServiceProvider); final service = ref.read(recognitionServiceProvider);
final l10n = AppLocalizations.of(context)!; final l10n = AppLocalizations.of(context)!;
// Show loading overlay while the AI processes.
showDialog( showDialog(
context: context, context: context,
barrierDismissible: false, barrierDismissible: false,
builder: (dialogContext) => _LoadingDialog(label: l10n.recognizing), builder: (dialogContext) => _LoadingDialog(label: l10n.scanSubmitting),
); );
try { try {
ProductJobCreated jobCreated;
switch (mode) { switch (mode) {
case _Mode.receipt: case _Mode.receipt:
final result = await service.recognizeReceipt(files.first); jobCreated = await service.submitReceiptRecognition(files.first);
if (context.mounted) {
Navigator.pop(context); // close loading
context.push('/scan/confirm', extra: result.items);
}
case _Mode.products: case _Mode.products:
final items = await service.recognizeProducts(files); jobCreated = await service.submitProductsRecognition(files);
if (context.mounted) { }
Navigator.pop(context); if (context.mounted) {
context.push('/scan/confirm', extra: items); Navigator.pop(context); // close loading dialog
} context.push('/scan/product-job-watch', extra: jobCreated);
} }
} catch (recognitionError) { } catch (recognitionError) {
debugPrint('Recognition error: $recognitionError'); debugPrint('Recognition submit error: $recognitionError');
if (context.mounted) { if (context.mounted) {
Navigator.pop(context); // close loading Navigator.pop(context); // close loading
ScaffoldMessenger.of(context).showSnackBar( ScaffoldMessenger.of(context).showSnackBar(

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "تخطي اختيار المنتجات", "planProductsSkip": "تخطي اختيار المنتجات",
"planProductsSkipNoProducts": "التخطيط بدون منتجات", "planProductsSkipNoProducts": "التخطيط بدون منتجات",
"planProductsSelectAll": "تحديد الكل", "planProductsSelectAll": "تحديد الكل",
"planProductsDeselectAll": "إلغاء تحديد الكل" "planProductsDeselectAll": "إلغاء تحديد الكل",
"recentScans": "عمليات المسح الأخيرة",
"seeAllScans": "عرض الكل",
"productJobHistoryTitle": "سجل المسح",
"jobTypeReceipt": "إيصال",
"jobTypeProducts": "منتجات",
"scanSubmitting": "جارٍ الإرسال...",
"processingProducts": "جارٍ المعالجة..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Produktauswahl überspringen", "planProductsSkip": "Produktauswahl überspringen",
"planProductsSkipNoProducts": "Ohne Produkte planen", "planProductsSkipNoProducts": "Ohne Produkte planen",
"planProductsSelectAll": "Alle auswählen", "planProductsSelectAll": "Alle auswählen",
"planProductsDeselectAll": "Alle abwählen" "planProductsDeselectAll": "Alle abwählen",
"recentScans": "Letzte Scans",
"seeAllScans": "Alle",
"productJobHistoryTitle": "Scan-Verlauf",
"jobTypeReceipt": "Kassenbon",
"jobTypeProducts": "Produkte",
"scanSubmitting": "Wird gesendet...",
"processingProducts": "Verarbeitung..."
} }

View File

@@ -28,7 +28,9 @@
"queuePosition": "Position {position}", "queuePosition": "Position {position}",
"@queuePosition": { "@queuePosition": {
"placeholders": { "placeholders": {
"position": { "type": "int" } "position": {
"type": "int"
}
} }
}, },
"processing": "Processing...", "processing": "Processing...",
@@ -116,7 +118,9 @@
"noResultsForQuery": "Nothing found for \"{query}\"", "noResultsForQuery": "Nothing found for \"{query}\"",
"@noResultsForQuery": { "@noResultsForQuery": {
"placeholders": { "placeholders": {
"query": { "type": "String" } "query": {
"type": "String"
}
} }
}, },
"servingsLabel": "Servings", "servingsLabel": "Servings",
@@ -125,7 +129,9 @@
"planningForDate": "Planning for {date}", "planningForDate": "Planning for {date}",
"@planningForDate": { "@planningForDate": {
"placeholders": { "placeholders": {
"date": { "type": "String" } "date": {
"type": "String"
}
} }
}, },
"markAsEaten": "Mark as eaten", "markAsEaten": "Mark as eaten",
@@ -134,7 +140,6 @@
"generateWeekSubtitle": "AI will create a menu with breakfast, lunch and dinner for the whole week", "generateWeekSubtitle": "AI will create a menu with breakfast, lunch and dinner for the whole week",
"generatingMenu": "Generating menu...", "generatingMenu": "Generating menu...",
"dayPlannedLabel": "Day planned", "dayPlannedLabel": "Day planned",
"planMenuButton": "Plan meals", "planMenuButton": "Plan meals",
"planMenuTitle": "What to plan?", "planMenuTitle": "What to plan?",
"planOptionSingleMeal": "Single meal", "planOptionSingleMeal": "Single meal",
@@ -149,16 +154,23 @@
"planSelectMealType": "Meal type", "planSelectMealType": "Meal type",
"planSelectRange": "Select period", "planSelectRange": "Select period",
"planGenerateButton": "Plan", "planGenerateButton": "Plan",
"planGenerating": "Generating plan\u2026", "planGenerating": "Generating plan",
"planSuccess": "Menu planned!", "planSuccess": "Menu planned!",
"planProductsTitle": "Products for the menu", "planProductsTitle": "Products for the menu",
"planProductsSubtitle": "AI will take the selected products into account when generating recipes", "planProductsSubtitle": "AI will take the selected products into account when generating recipes",
"planProductsEmpty": "No products added", "planProductsEmpty": "No products added",
"planProductsEmptyMessage": "Add products you have at home \u2014 AI will suggest recipes from what you already have", "planProductsEmptyMessage": "Add products you have at home AI will suggest recipes from what you already have",
"planProductsAddProducts": "Add products", "planProductsAddProducts": "Add products",
"planProductsContinue": "Continue", "planProductsContinue": "Continue",
"planProductsSkip": "Skip product selection", "planProductsSkip": "Skip product selection",
"planProductsSkipNoProducts": "Plan without products", "planProductsSkipNoProducts": "Plan without products",
"planProductsSelectAll": "Select all", "planProductsSelectAll": "Select all",
"planProductsDeselectAll": "Deselect all" "planProductsDeselectAll": "Deselect all",
"recentScans": "Recent scans",
"seeAllScans": "See all",
"productJobHistoryTitle": "Scan history",
"jobTypeReceipt": "Receipt",
"jobTypeProducts": "Products",
"scanSubmitting": "Submitting...",
"processingProducts": "Processing..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Omitir selección de productos", "planProductsSkip": "Omitir selección de productos",
"planProductsSkipNoProducts": "Planificar sin productos", "planProductsSkipNoProducts": "Planificar sin productos",
"planProductsSelectAll": "Seleccionar todo", "planProductsSelectAll": "Seleccionar todo",
"planProductsDeselectAll": "Deseleccionar todo" "planProductsDeselectAll": "Deseleccionar todo",
"recentScans": "Escaneos recientes",
"seeAllScans": "Ver todos",
"productJobHistoryTitle": "Historial de escaneos",
"jobTypeReceipt": "Ticket",
"jobTypeProducts": "Productos",
"scanSubmitting": "Enviando...",
"processingProducts": "Procesando..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Ignorer la sélection des produits", "planProductsSkip": "Ignorer la sélection des produits",
"planProductsSkipNoProducts": "Planifier sans produits", "planProductsSkipNoProducts": "Planifier sans produits",
"planProductsSelectAll": "Tout sélectionner", "planProductsSelectAll": "Tout sélectionner",
"planProductsDeselectAll": "Tout désélectionner" "planProductsDeselectAll": "Tout désélectionner",
"recentScans": "Scans récents",
"seeAllScans": "Tout voir",
"productJobHistoryTitle": "Historique des scans",
"jobTypeReceipt": "Reçu",
"jobTypeProducts": "Produits",
"scanSubmitting": "Envoi...",
"processingProducts": "Traitement..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "उत्पाद चयन छोड़ें", "planProductsSkip": "उत्पाद चयन छोड़ें",
"planProductsSkipNoProducts": "उत्पादों के बिना योजना बनाएं", "planProductsSkipNoProducts": "उत्पादों के बिना योजना बनाएं",
"planProductsSelectAll": "सभी चुनें", "planProductsSelectAll": "सभी चुनें",
"planProductsDeselectAll": "सभी हटाएं" "planProductsDeselectAll": "सभी हटाएं",
"recentScans": "हाल के स्कैन",
"seeAllScans": "सभी देखें",
"productJobHistoryTitle": "स्कैन इतिहास",
"jobTypeReceipt": "रसीद",
"jobTypeProducts": "उत्पाद",
"scanSubmitting": "सबमिट हो रहा है...",
"processingProducts": "प्रोसेस हो रहा है..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Salta la selezione dei prodotti", "planProductsSkip": "Salta la selezione dei prodotti",
"planProductsSkipNoProducts": "Pianifica senza prodotti", "planProductsSkipNoProducts": "Pianifica senza prodotti",
"planProductsSelectAll": "Seleziona tutto", "planProductsSelectAll": "Seleziona tutto",
"planProductsDeselectAll": "Deseleziona tutto" "planProductsDeselectAll": "Deseleziona tutto",
"recentScans": "Scansioni recenti",
"seeAllScans": "Vedi tutto",
"productJobHistoryTitle": "Cronologia scansioni",
"jobTypeReceipt": "Scontrino",
"jobTypeProducts": "Prodotti",
"scanSubmitting": "Invio...",
"processingProducts": "Elaborazione..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "食材選択をスキップ", "planProductsSkip": "食材選択をスキップ",
"planProductsSkipNoProducts": "食材なしでプランニング", "planProductsSkipNoProducts": "食材なしでプランニング",
"planProductsSelectAll": "すべて選択", "planProductsSelectAll": "すべて選択",
"planProductsDeselectAll": "すべて解除" "planProductsDeselectAll": "すべて解除",
"recentScans": "最近のスキャン",
"seeAllScans": "すべて表示",
"productJobHistoryTitle": "スキャン履歴",
"jobTypeReceipt": "レシート",
"jobTypeProducts": "商品",
"scanSubmitting": "送信中...",
"processingProducts": "処理中..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "재료 선택 건너뛰기", "planProductsSkip": "재료 선택 건너뛰기",
"planProductsSkipNoProducts": "재료 없이 계획하기", "planProductsSkipNoProducts": "재료 없이 계획하기",
"planProductsSelectAll": "모두 선택", "planProductsSelectAll": "모두 선택",
"planProductsDeselectAll": "모두 해제" "planProductsDeselectAll": "모두 해제",
"recentScans": "최근 스캔",
"seeAllScans": "전체 보기",
"productJobHistoryTitle": "스캔 기록",
"jobTypeReceipt": "영수증",
"jobTypeProducts": "제품",
"scanSubmitting": "제출 중...",
"processingProducts": "처리 중..."
} }

View File

@@ -987,6 +987,48 @@ abstract class AppLocalizations {
/// In en, this message translates to: /// In en, this message translates to:
/// **'Deselect all'** /// **'Deselect all'**
String get planProductsDeselectAll; String get planProductsDeselectAll;
/// No description provided for @recentScans.
///
/// In en, this message translates to:
/// **'Recent scans'**
String get recentScans;
/// No description provided for @seeAllScans.
///
/// In en, this message translates to:
/// **'See all'**
String get seeAllScans;
/// No description provided for @productJobHistoryTitle.
///
/// In en, this message translates to:
/// **'Scan history'**
String get productJobHistoryTitle;
/// No description provided for @jobTypeReceipt.
///
/// In en, this message translates to:
/// **'Receipt'**
String get jobTypeReceipt;
/// No description provided for @jobTypeProducts.
///
/// In en, this message translates to:
/// **'Products'**
String get jobTypeProducts;
/// No description provided for @scanSubmitting.
///
/// In en, this message translates to:
/// **'Submitting...'**
String get scanSubmitting;
/// No description provided for @processingProducts.
///
/// In en, this message translates to:
/// **'Processing...'**
String get processingProducts;
} }
class _AppLocalizationsDelegate class _AppLocalizationsDelegate

View File

@@ -452,4 +452,25 @@ class AppLocalizationsAr extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'إلغاء تحديد الكل'; String get planProductsDeselectAll => 'إلغاء تحديد الكل';
@override
String get recentScans => 'عمليات المسح الأخيرة';
@override
String get seeAllScans => 'عرض الكل';
@override
String get productJobHistoryTitle => 'سجل المسح';
@override
String get jobTypeReceipt => 'إيصال';
@override
String get jobTypeProducts => 'منتجات';
@override
String get scanSubmitting => 'جارٍ الإرسال...';
@override
String get processingProducts => 'جارٍ المعالجة...';
} }

View File

@@ -454,4 +454,25 @@ class AppLocalizationsDe extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Alle abwählen'; String get planProductsDeselectAll => 'Alle abwählen';
@override
String get recentScans => 'Letzte Scans';
@override
String get seeAllScans => 'Alle';
@override
String get productJobHistoryTitle => 'Scan-Verlauf';
@override
String get jobTypeReceipt => 'Kassenbon';
@override
String get jobTypeProducts => 'Produkte';
@override
String get scanSubmitting => 'Wird gesendet...';
@override
String get processingProducts => 'Verarbeitung...';
} }

View File

@@ -452,4 +452,25 @@ class AppLocalizationsEn extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Deselect all'; String get planProductsDeselectAll => 'Deselect all';
@override
String get recentScans => 'Recent scans';
@override
String get seeAllScans => 'See all';
@override
String get productJobHistoryTitle => 'Scan history';
@override
String get jobTypeReceipt => 'Receipt';
@override
String get jobTypeProducts => 'Products';
@override
String get scanSubmitting => 'Submitting...';
@override
String get processingProducts => 'Processing...';
} }

View File

@@ -454,4 +454,25 @@ class AppLocalizationsEs extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Deseleccionar todo'; String get planProductsDeselectAll => 'Deseleccionar todo';
@override
String get recentScans => 'Escaneos recientes';
@override
String get seeAllScans => 'Ver todos';
@override
String get productJobHistoryTitle => 'Historial de escaneos';
@override
String get jobTypeReceipt => 'Ticket';
@override
String get jobTypeProducts => 'Productos';
@override
String get scanSubmitting => 'Enviando...';
@override
String get processingProducts => 'Procesando...';
} }

View File

@@ -455,4 +455,25 @@ class AppLocalizationsFr extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Tout désélectionner'; String get planProductsDeselectAll => 'Tout désélectionner';
@override
String get recentScans => 'Scans récents';
@override
String get seeAllScans => 'Tout voir';
@override
String get productJobHistoryTitle => 'Historique des scans';
@override
String get jobTypeReceipt => 'Reçu';
@override
String get jobTypeProducts => 'Produits';
@override
String get scanSubmitting => 'Envoi...';
@override
String get processingProducts => 'Traitement...';
} }

View File

@@ -453,4 +453,25 @@ class AppLocalizationsHi extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'सभी हटाएं'; String get planProductsDeselectAll => 'सभी हटाएं';
@override
String get recentScans => 'हाल के स्कैन';
@override
String get seeAllScans => 'सभी देखें';
@override
String get productJobHistoryTitle => 'स्कैन इतिहास';
@override
String get jobTypeReceipt => 'रसीद';
@override
String get jobTypeProducts => 'उत्पाद';
@override
String get scanSubmitting => 'सबमिट हो रहा है...';
@override
String get processingProducts => 'प्रोसेस हो रहा है...';
} }

View File

@@ -454,4 +454,25 @@ class AppLocalizationsIt extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Deseleziona tutto'; String get planProductsDeselectAll => 'Deseleziona tutto';
@override
String get recentScans => 'Scansioni recenti';
@override
String get seeAllScans => 'Vedi tutto';
@override
String get productJobHistoryTitle => 'Cronologia scansioni';
@override
String get jobTypeReceipt => 'Scontrino';
@override
String get jobTypeProducts => 'Prodotti';
@override
String get scanSubmitting => 'Invio...';
@override
String get processingProducts => 'Elaborazione...';
} }

View File

@@ -449,4 +449,25 @@ class AppLocalizationsJa extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'すべて解除'; String get planProductsDeselectAll => 'すべて解除';
@override
String get recentScans => '最近のスキャン';
@override
String get seeAllScans => 'すべて表示';
@override
String get productJobHistoryTitle => 'スキャン履歴';
@override
String get jobTypeReceipt => 'レシート';
@override
String get jobTypeProducts => '商品';
@override
String get scanSubmitting => '送信中...';
@override
String get processingProducts => '処理中...';
} }

View File

@@ -449,4 +449,25 @@ class AppLocalizationsKo extends AppLocalizations {
@override @override
String get planProductsDeselectAll => '모두 해제'; String get planProductsDeselectAll => '모두 해제';
@override
String get recentScans => '최근 스캔';
@override
String get seeAllScans => '전체 보기';
@override
String get productJobHistoryTitle => '스캔 기록';
@override
String get jobTypeReceipt => '영수증';
@override
String get jobTypeProducts => '제품';
@override
String get scanSubmitting => '제출 중...';
@override
String get processingProducts => '처리 중...';
} }

View File

@@ -454,4 +454,25 @@ class AppLocalizationsPt extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Desmarcar tudo'; String get planProductsDeselectAll => 'Desmarcar tudo';
@override
String get recentScans => 'Scans recentes';
@override
String get seeAllScans => 'Ver tudo';
@override
String get productJobHistoryTitle => 'Histórico de scans';
@override
String get jobTypeReceipt => 'Recibo';
@override
String get jobTypeProducts => 'Produtos';
@override
String get scanSubmitting => 'Enviando...';
@override
String get processingProducts => 'Processando...';
} }

View File

@@ -452,4 +452,25 @@ class AppLocalizationsRu extends AppLocalizations {
@override @override
String get planProductsDeselectAll => 'Снять всё'; String get planProductsDeselectAll => 'Снять всё';
@override
String get recentScans => 'Последние сканирования';
@override
String get seeAllScans => 'Все';
@override
String get productJobHistoryTitle => 'История сканирования';
@override
String get jobTypeReceipt => 'Чек';
@override
String get jobTypeProducts => 'Продукты';
@override
String get scanSubmitting => 'Отправка...';
@override
String get processingProducts => 'Обработка...';
} }

View File

@@ -448,4 +448,25 @@ class AppLocalizationsZh extends AppLocalizations {
@override @override
String get planProductsDeselectAll => '取消全选'; String get planProductsDeselectAll => '取消全选';
@override
String get recentScans => '最近扫描';
@override
String get seeAllScans => '全部';
@override
String get productJobHistoryTitle => '扫描历史';
@override
String get jobTypeReceipt => '收据';
@override
String get jobTypeProducts => '产品';
@override
String get scanSubmitting => '提交中...';
@override
String get processingProducts => '处理中...';
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Pular seleção de produtos", "planProductsSkip": "Pular seleção de produtos",
"planProductsSkipNoProducts": "Planejar sem produtos", "planProductsSkipNoProducts": "Planejar sem produtos",
"planProductsSelectAll": "Selecionar tudo", "planProductsSelectAll": "Selecionar tudo",
"planProductsDeselectAll": "Desmarcar tudo" "planProductsDeselectAll": "Desmarcar tudo",
"recentScans": "Scans recentes",
"seeAllScans": "Ver tudo",
"productJobHistoryTitle": "Histórico de scans",
"jobTypeReceipt": "Recibo",
"jobTypeProducts": "Produtos",
"scanSubmitting": "Enviando...",
"processingProducts": "Processando..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "Пропустить выбор продуктов", "planProductsSkip": "Пропустить выбор продуктов",
"planProductsSkipNoProducts": "Планировать без продуктов", "planProductsSkipNoProducts": "Планировать без продуктов",
"planProductsSelectAll": "Выбрать все", "planProductsSelectAll": "Выбрать все",
"planProductsDeselectAll": "Снять всё" "planProductsDeselectAll": "Снять всё",
"recentScans": "Последние сканирования",
"seeAllScans": "Все",
"productJobHistoryTitle": "История сканирования",
"jobTypeReceipt": "Чек",
"jobTypeProducts": "Продукты",
"scanSubmitting": "Отправка...",
"processingProducts": "Обработка..."
} }

View File

@@ -165,5 +165,12 @@
"planProductsSkip": "跳过食材选择", "planProductsSkip": "跳过食材选择",
"planProductsSkipNoProducts": "不选食材直接规划", "planProductsSkipNoProducts": "不选食材直接规划",
"planProductsSelectAll": "全选", "planProductsSelectAll": "全选",
"planProductsDeselectAll": "取消全选" "planProductsDeselectAll": "取消全选",
"recentScans": "最近扫描",
"seeAllScans": "全部",
"productJobHistoryTitle": "扫描历史",
"jobTypeReceipt": "收据",
"jobTypeProducts": "产品",
"scanSubmitting": "提交中...",
"processingProducts": "处理中..."
} }