feat: async dish recognition (Kafka/Watermill/SSE) + remove Wire + consolidate migrations
Async recognition pipeline:
- POST /ai/recognize-dish → 202 {job_id, queue_position, estimated_seconds}
- GET /ai/jobs/{id}/stream — SSE stream: queued → processing → done/failed
- Kafka topics: ai.recognize.paid (3 partitions) + ai.recognize.free (1 partition)
- 5-worker WorkerPool with priority loop (paid consumers first)
- SSEBroker via PostgreSQL LISTEN/NOTIFY
- Kafka adapter migrated from franz-go to Watermill (watermill-kafka/v2)
- Docker Compose: added Kafka + Zookeeper + kafka-init service
- Flutter: recognition_service.dart uses SSE; home_screen shows live job status
Remove google/wire (archived):
- Deleted wire.go (wireinject spec) and wire_gen.go
- Added cmd/server/init.go — plain Go manual DI, same initApp() logic
- Removed github.com/google/wire from go.mod
Consolidate migrations:
- Merged 001_initial_schema + 002_seed_data + 003_recognition_jobs into single 001_initial_schema.sql
- Deleted 002_seed_data.sql and 003_recognition_jobs.sql
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
27
backend/cmd/server/app.go
Normal file
27
backend/cmd/server/app.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/food-ai/backend/internal/domain/recognition"
|
||||||
|
)
|
||||||
|
|
||||||
|
// App bundles the HTTP handler with background services that need lifecycle management.
|
||||||
|
type App struct {
|
||||||
|
handler http.Handler
|
||||||
|
workerPool *recognition.WorkerPool
|
||||||
|
sseBroker *recognition.SSEBroker
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeHTTP implements http.Handler.
|
||||||
|
func (application *App) ServeHTTP(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
|
application.handler.ServeHTTP(responseWriter, request)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start launches the SSE broker's LISTEN loop and the worker pool goroutines.
|
||||||
|
// Call this once before the HTTP server begins accepting connections.
|
||||||
|
func (application *App) Start(applicationContext context.Context) {
|
||||||
|
application.sseBroker.Start(applicationContext)
|
||||||
|
application.workerPool.Start(applicationContext)
|
||||||
|
}
|
||||||
@@ -1,15 +1,8 @@
|
|||||||
// Code generated by Wire. DO NOT EDIT.
|
|
||||||
|
|
||||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire
|
|
||||||
//go:build !wireinject
|
|
||||||
// +build !wireinject
|
|
||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/food-ai/backend/internal/domain/auth"
|
|
||||||
"github.com/food-ai/backend/internal/adapters/firebase"
|
"github.com/food-ai/backend/internal/adapters/firebase"
|
||||||
"github.com/food-ai/backend/internal/infra/config"
|
"github.com/food-ai/backend/internal/domain/auth"
|
||||||
"github.com/food-ai/backend/internal/domain/diary"
|
"github.com/food-ai/backend/internal/domain/diary"
|
||||||
"github.com/food-ai/backend/internal/domain/dish"
|
"github.com/food-ai/backend/internal/domain/dish"
|
||||||
"github.com/food-ai/backend/internal/domain/home"
|
"github.com/food-ai/backend/internal/domain/home"
|
||||||
@@ -21,42 +14,60 @@ import (
|
|||||||
"github.com/food-ai/backend/internal/domain/recommendation"
|
"github.com/food-ai/backend/internal/domain/recommendation"
|
||||||
"github.com/food-ai/backend/internal/domain/savedrecipe"
|
"github.com/food-ai/backend/internal/domain/savedrecipe"
|
||||||
"github.com/food-ai/backend/internal/domain/user"
|
"github.com/food-ai/backend/internal/domain/user"
|
||||||
|
"github.com/food-ai/backend/internal/infra/config"
|
||||||
"github.com/jackc/pgx/v5/pgxpool"
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
"net/http"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Injectors from wire.go:
|
func initApp(appConfig *config.Config, pool *pgxpool.Pool) (*App, error) {
|
||||||
|
credentialsFile := newFirebaseCredentialsFile(appConfig)
|
||||||
func initRouter(appConfig *config.Config, pool *pgxpool.Pool) (http.Handler, error) {
|
tokenVerifier, firebaseError := firebase.NewAuthOrNoop(credentialsFile)
|
||||||
string2 := newFirebaseCredentialsFile(appConfig)
|
if firebaseError != nil {
|
||||||
tokenVerifier, err := firebase.NewAuthOrNoop(string2)
|
return nil, firebaseError
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
repository := user.NewRepository(pool)
|
userRepository := user.NewRepository(pool)
|
||||||
mainJwtSecret := newJWTSecret(appConfig)
|
mainJwtSecret := newJWTSecret(appConfig)
|
||||||
mainJwtAccessDuration := newJWTAccessDuration(appConfig)
|
mainJwtAccessDuration := newJWTAccessDuration(appConfig)
|
||||||
mainJwtRefreshDuration := newJWTRefreshDuration(appConfig)
|
mainJwtRefreshDuration := newJWTRefreshDuration(appConfig)
|
||||||
jwtManager := newJWTManager(mainJwtSecret, mainJwtAccessDuration, mainJwtRefreshDuration)
|
jwtManager := newJWTManager(mainJwtSecret, mainJwtAccessDuration, mainJwtRefreshDuration)
|
||||||
service := auth.NewService(tokenVerifier, repository, jwtManager)
|
authService := auth.NewService(tokenVerifier, userRepository, jwtManager)
|
||||||
handler := auth.NewHandler(service)
|
authHandler := auth.NewHandler(authService)
|
||||||
userService := user.NewService(repository)
|
userService := user.NewService(userRepository)
|
||||||
userHandler := user.NewHandler(userService)
|
userHandler := user.NewHandler(userService)
|
||||||
mainGeminiAPIKey := newOpenAIAPIKey(appConfig)
|
mainGeminiAPIKey := newOpenAIAPIKey(appConfig)
|
||||||
client := newOpenAIClient(mainGeminiAPIKey)
|
openaiClient := newOpenAIClient(mainGeminiAPIKey)
|
||||||
mainPexelsAPIKey := newPexelsAPIKey(appConfig)
|
mainPexelsAPIKey := newPexelsAPIKey(appConfig)
|
||||||
pexelsClient := newPexelsClient(mainPexelsAPIKey)
|
pexelsClient := newPexelsClient(mainPexelsAPIKey)
|
||||||
productRepository := product.NewRepository(pool)
|
productRepository := product.NewRepository(pool)
|
||||||
recommendationHandler := recommendation.NewHandler(client, pexelsClient, repository, productRepository)
|
recommendationHandler := recommendation.NewHandler(openaiClient, pexelsClient, userRepository, productRepository)
|
||||||
dishRepository := dish.NewRepository(pool)
|
dishRepository := dish.NewRepository(pool)
|
||||||
savedrecipeRepository := savedrecipe.NewRepository(pool, dishRepository)
|
savedrecipeRepository := savedrecipe.NewRepository(pool, dishRepository)
|
||||||
savedrecipeHandler := savedrecipe.NewHandler(savedrecipeRepository)
|
savedrecipeHandler := savedrecipe.NewHandler(savedrecipeRepository)
|
||||||
ingredientRepository := ingredient.NewRepository(pool)
|
ingredientRepository := ingredient.NewRepository(pool)
|
||||||
ingredientHandler := ingredient.NewHandler(ingredientRepository)
|
ingredientHandler := ingredient.NewHandler(ingredientRepository)
|
||||||
productHandler := product.NewHandler(productRepository)
|
productHandler := product.NewHandler(productRepository)
|
||||||
recognitionHandler := recognition.NewHandler(client, ingredientRepository, dishRepository)
|
|
||||||
|
// Kafka producer and consumers
|
||||||
|
kafkaProducer, kafkaProducerError := newKafkaProducer(appConfig)
|
||||||
|
if kafkaProducerError != nil {
|
||||||
|
return nil, kafkaProducerError
|
||||||
|
}
|
||||||
|
paidConsumer, paidConsumerError := newPaidKafkaConsumer(appConfig)
|
||||||
|
if paidConsumerError != nil {
|
||||||
|
return nil, paidConsumerError
|
||||||
|
}
|
||||||
|
freeConsumer, freeConsumerError := newFreeKafkaConsumer(appConfig)
|
||||||
|
if freeConsumerError != nil {
|
||||||
|
return nil, freeConsumerError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recognition pipeline
|
||||||
|
jobRepository := recognition.NewJobRepository(pool)
|
||||||
|
sseBroker := recognition.NewSSEBroker(pool, jobRepository)
|
||||||
|
workerPool := recognition.NewWorkerPool(jobRepository, openaiClient, dishRepository, paidConsumer, freeConsumer)
|
||||||
|
recognitionHandler := recognition.NewHandler(openaiClient, ingredientRepository, jobRepository, kafkaProducer, sseBroker)
|
||||||
|
|
||||||
menuRepository := menu.NewRepository(pool)
|
menuRepository := menu.NewRepository(pool)
|
||||||
menuHandler := menu.NewHandler(menuRepository, client, pexelsClient, repository, productRepository, dishRepository)
|
menuHandler := menu.NewHandler(menuRepository, openaiClient, pexelsClient, userRepository, productRepository, dishRepository)
|
||||||
diaryRepository := diary.NewRepository(pool)
|
diaryRepository := diary.NewRepository(pool)
|
||||||
diaryHandler := diary.NewHandler(diaryRepository, dishRepository, dishRepository)
|
diaryHandler := diary.NewHandler(diaryRepository, dishRepository, dishRepository)
|
||||||
homeHandler := home.NewHandler(pool)
|
homeHandler := home.NewHandler(pool)
|
||||||
@@ -64,11 +75,34 @@ func initRouter(appConfig *config.Config, pool *pgxpool.Pool) (http.Handler, err
|
|||||||
recipeRepository := recipe.NewRepository(pool)
|
recipeRepository := recipe.NewRepository(pool)
|
||||||
recipeHandler := recipe.NewHandler(recipeRepository)
|
recipeHandler := recipe.NewHandler(recipeRepository)
|
||||||
mainJwtAdapter := newJWTAdapter(jwtManager)
|
mainJwtAdapter := newJWTAdapter(jwtManager)
|
||||||
v := newAuthMiddleware(mainJwtAdapter)
|
authMiddlewareFn := newAuthMiddleware(mainJwtAdapter)
|
||||||
mainAllowedOrigins := newAllowedOrigins(appConfig)
|
mainAllowedOrigins := newAllowedOrigins(appConfig)
|
||||||
mainUnitsListHandler := newUnitsListHandler(pool)
|
mainUnitsListHandler := newUnitsListHandler(pool)
|
||||||
mainCuisineListHandler := newCuisineListHandler(pool)
|
mainCuisineListHandler := newCuisineListHandler(pool)
|
||||||
mainTagListHandler := newTagListHandler(pool)
|
mainTagListHandler := newTagListHandler(pool)
|
||||||
httpHandler := newRouter(pool, handler, userHandler, recommendationHandler, savedrecipeHandler, ingredientHandler, productHandler, recognitionHandler, menuHandler, diaryHandler, homeHandler, dishHandler, recipeHandler, v, mainAllowedOrigins, mainUnitsListHandler, mainCuisineListHandler, mainTagListHandler)
|
httpHandler := newRouter(
|
||||||
return httpHandler, nil
|
pool,
|
||||||
|
authHandler,
|
||||||
|
userHandler,
|
||||||
|
recommendationHandler,
|
||||||
|
savedrecipeHandler,
|
||||||
|
ingredientHandler,
|
||||||
|
productHandler,
|
||||||
|
recognitionHandler,
|
||||||
|
menuHandler,
|
||||||
|
diaryHandler,
|
||||||
|
homeHandler,
|
||||||
|
dishHandler,
|
||||||
|
recipeHandler,
|
||||||
|
authMiddlewareFn,
|
||||||
|
mainAllowedOrigins,
|
||||||
|
mainUnitsListHandler,
|
||||||
|
mainCuisineListHandler,
|
||||||
|
mainTagListHandler,
|
||||||
|
)
|
||||||
|
return &App{
|
||||||
|
handler: httpHandler,
|
||||||
|
workerPool: workerPool,
|
||||||
|
sseBroker: sseBroker,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
@@ -48,14 +48,16 @@ func run() error {
|
|||||||
}
|
}
|
||||||
slog.Info("languages loaded", "count", len(locale.Languages))
|
slog.Info("languages loaded", "count", len(locale.Languages))
|
||||||
|
|
||||||
router, initError := initRouter(appConfig, pool)
|
application, initError := initApp(appConfig, pool)
|
||||||
if initError != nil {
|
if initError != nil {
|
||||||
return fmt.Errorf("init router: %w", initError)
|
return fmt.Errorf("init app: %w", initError)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
application.Start(applicationContext)
|
||||||
|
|
||||||
httpServer := &http.Server{
|
httpServer := &http.Server{
|
||||||
Addr: fmt.Sprintf(":%d", appConfig.Port),
|
Addr: fmt.Sprintf(":%d", appConfig.Port),
|
||||||
Handler: router,
|
Handler: application,
|
||||||
ReadTimeout: 10 * time.Second,
|
ReadTimeout: 10 * time.Second,
|
||||||
WriteTimeout: 120 * time.Second, // menu generation can take ~60s
|
WriteTimeout: 120 * time.Second, // menu generation can take ~60s
|
||||||
IdleTimeout: 60 * time.Second,
|
IdleTimeout: 60 * time.Second,
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/food-ai/backend/internal/infra/config"
|
"github.com/food-ai/backend/internal/infra/config"
|
||||||
"github.com/food-ai/backend/internal/domain/diary"
|
"github.com/food-ai/backend/internal/domain/diary"
|
||||||
"github.com/food-ai/backend/internal/domain/dish"
|
"github.com/food-ai/backend/internal/domain/dish"
|
||||||
|
"github.com/food-ai/backend/internal/adapters/kafka"
|
||||||
"github.com/food-ai/backend/internal/adapters/openai"
|
"github.com/food-ai/backend/internal/adapters/openai"
|
||||||
"github.com/food-ai/backend/internal/domain/home"
|
"github.com/food-ai/backend/internal/domain/home"
|
||||||
"github.com/food-ai/backend/internal/domain/ingredient"
|
"github.com/food-ai/backend/internal/domain/ingredient"
|
||||||
@@ -188,17 +189,35 @@ func newAuthMiddleware(validator middleware.AccessTokenValidator) func(http.Hand
|
|||||||
return middleware.Auth(validator)
|
return middleware.Auth(validator)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Kafka providers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
func newKafkaProducer(appConfig *config.Config) (*kafka.Producer, error) {
|
||||||
|
return kafka.NewProducer(appConfig.KafkaBrokers)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPaidKafkaConsumer(appConfig *config.Config) (*kafka.Consumer, error) {
|
||||||
|
return kafka.NewConsumer(appConfig.KafkaBrokers, "dish-recognition-workers", recognition.TopicPaid)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFreeKafkaConsumer(appConfig *config.Config) (*kafka.Consumer, error) {
|
||||||
|
return kafka.NewConsumer(appConfig.KafkaBrokers, "dish-recognition-workers", recognition.TopicFree)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Interface assertions (compile-time checks)
|
// Interface assertions (compile-time checks)
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ middleware.AccessTokenValidator = (*jwtAdapter)(nil)
|
var _ middleware.AccessTokenValidator = (*jwtAdapter)(nil)
|
||||||
var _ menu.PhotoSearcher = (*pexels.Client)(nil)
|
var _ menu.PhotoSearcher = (*pexels.Client)(nil)
|
||||||
var _ menu.UserLoader = (*user.Repository)(nil)
|
var _ menu.UserLoader = (*user.Repository)(nil)
|
||||||
var _ menu.ProductLister = (*product.Repository)(nil)
|
var _ menu.ProductLister = (*product.Repository)(nil)
|
||||||
var _ menu.RecipeSaver = (*dish.Repository)(nil)
|
var _ menu.RecipeSaver = (*dish.Repository)(nil)
|
||||||
var _ recommendation.PhotoSearcher = (*pexels.Client)(nil)
|
var _ recommendation.PhotoSearcher = (*pexels.Client)(nil)
|
||||||
var _ recommendation.UserLoader = (*user.Repository)(nil)
|
var _ recommendation.UserLoader = (*user.Repository)(nil)
|
||||||
var _ recommendation.ProductLister = (*product.Repository)(nil)
|
var _ recommendation.ProductLister = (*product.Repository)(nil)
|
||||||
var _ recognition.IngredientRepository = (*ingredient.Repository)(nil)
|
var _ recognition.IngredientRepository = (*ingredient.Repository)(nil)
|
||||||
var _ user.UserRepository = (*user.Repository)(nil)
|
var _ recognition.KafkaPublisher = (*kafka.Producer)(nil)
|
||||||
|
var _ recognition.JobRepository = (*recognition.PostgresJobRepository)(nil)
|
||||||
|
var _ user.UserRepository = (*user.Repository)(nil)
|
||||||
|
|||||||
@@ -1,116 +0,0 @@
|
|||||||
//go:build wireinject
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/food-ai/backend/internal/domain/auth"
|
|
||||||
"github.com/food-ai/backend/internal/adapters/firebase"
|
|
||||||
"github.com/food-ai/backend/internal/infra/config"
|
|
||||||
"github.com/food-ai/backend/internal/domain/diary"
|
|
||||||
"github.com/food-ai/backend/internal/domain/dish"
|
|
||||||
"github.com/food-ai/backend/internal/domain/home"
|
|
||||||
"github.com/food-ai/backend/internal/domain/ingredient"
|
|
||||||
"github.com/food-ai/backend/internal/domain/menu"
|
|
||||||
"github.com/food-ai/backend/internal/infra/middleware"
|
|
||||||
"github.com/food-ai/backend/internal/adapters/openai"
|
|
||||||
"github.com/food-ai/backend/internal/adapters/pexels"
|
|
||||||
"github.com/food-ai/backend/internal/domain/product"
|
|
||||||
"github.com/food-ai/backend/internal/domain/recipe"
|
|
||||||
"github.com/food-ai/backend/internal/domain/recognition"
|
|
||||||
"github.com/food-ai/backend/internal/domain/recommendation"
|
|
||||||
"github.com/food-ai/backend/internal/domain/savedrecipe"
|
|
||||||
"github.com/food-ai/backend/internal/domain/user"
|
|
||||||
"github.com/google/wire"
|
|
||||||
"github.com/jackc/pgx/v5/pgxpool"
|
|
||||||
)
|
|
||||||
|
|
||||||
func initRouter(appConfig *config.Config, pool *pgxpool.Pool) (http.Handler, error) {
|
|
||||||
wire.Build(
|
|
||||||
// Config extractors
|
|
||||||
newOpenAIAPIKey,
|
|
||||||
newPexelsAPIKey,
|
|
||||||
newJWTSecret,
|
|
||||||
newJWTAccessDuration,
|
|
||||||
newJWTRefreshDuration,
|
|
||||||
newAllowedOrigins,
|
|
||||||
newFirebaseCredentialsFile,
|
|
||||||
|
|
||||||
// Auth
|
|
||||||
firebase.NewAuthOrNoop,
|
|
||||||
newJWTManager,
|
|
||||||
newJWTAdapter,
|
|
||||||
newAuthMiddleware,
|
|
||||||
auth.NewService,
|
|
||||||
auth.NewHandler,
|
|
||||||
|
|
||||||
// User
|
|
||||||
user.NewRepository,
|
|
||||||
user.NewService,
|
|
||||||
user.NewHandler,
|
|
||||||
|
|
||||||
// External clients
|
|
||||||
newOpenAIClient,
|
|
||||||
newPexelsClient,
|
|
||||||
|
|
||||||
// Ingredient
|
|
||||||
ingredient.NewRepository,
|
|
||||||
ingredient.NewHandler,
|
|
||||||
|
|
||||||
// Product
|
|
||||||
product.NewRepository,
|
|
||||||
product.NewHandler,
|
|
||||||
|
|
||||||
// Dish
|
|
||||||
dish.NewRepository,
|
|
||||||
dish.NewHandler,
|
|
||||||
|
|
||||||
// Recipe
|
|
||||||
recipe.NewRepository,
|
|
||||||
recipe.NewHandler,
|
|
||||||
|
|
||||||
// Saved recipes
|
|
||||||
savedrecipe.NewRepository,
|
|
||||||
savedrecipe.NewHandler,
|
|
||||||
|
|
||||||
// Menu
|
|
||||||
menu.NewRepository,
|
|
||||||
menu.NewHandler,
|
|
||||||
|
|
||||||
// Diary
|
|
||||||
diary.NewRepository,
|
|
||||||
diary.NewHandler,
|
|
||||||
|
|
||||||
// Home
|
|
||||||
home.NewHandler,
|
|
||||||
|
|
||||||
// Recognition & Recommendation
|
|
||||||
recognition.NewHandler,
|
|
||||||
recommendation.NewHandler,
|
|
||||||
|
|
||||||
// List handlers (DB-backed, injected into router)
|
|
||||||
newUnitsListHandler,
|
|
||||||
newCuisineListHandler,
|
|
||||||
newTagListHandler,
|
|
||||||
|
|
||||||
// Router
|
|
||||||
newRouter,
|
|
||||||
|
|
||||||
// Interface bindings
|
|
||||||
wire.Bind(new(user.UserRepository), new(*user.Repository)),
|
|
||||||
wire.Bind(new(menu.PhotoSearcher), new(*pexels.Client)),
|
|
||||||
wire.Bind(new(menu.UserLoader), new(*user.Repository)),
|
|
||||||
wire.Bind(new(menu.ProductLister), new(*product.Repository)),
|
|
||||||
wire.Bind(new(menu.RecipeSaver), new(*dish.Repository)),
|
|
||||||
wire.Bind(new(recommendation.PhotoSearcher), new(*pexels.Client)),
|
|
||||||
wire.Bind(new(recommendation.UserLoader), new(*user.Repository)),
|
|
||||||
wire.Bind(new(recommendation.ProductLister), new(*product.Repository)),
|
|
||||||
wire.Bind(new(recognition.IngredientRepository), new(*ingredient.Repository)),
|
|
||||||
wire.Bind(new(recognition.Recognizer), new(*openai.Client)),
|
|
||||||
wire.Bind(new(menu.MenuGenerator), new(*openai.Client)),
|
|
||||||
wire.Bind(new(recommendation.RecipeGenerator), new(*openai.Client)),
|
|
||||||
wire.Bind(new(middleware.AccessTokenValidator), new(*jwtAdapter)),
|
|
||||||
)
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,36 @@
|
|||||||
services:
|
services:
|
||||||
|
kafka:
|
||||||
|
image: confluentinc/cp-kafka:7.6.0
|
||||||
|
environment:
|
||||||
|
KAFKA_NODE_ID: 1
|
||||||
|
KAFKA_PROCESS_ROLES: broker,controller
|
||||||
|
KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
|
||||||
|
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
|
||||||
|
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka:9093
|
||||||
|
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
|
||||||
|
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
|
||||||
|
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||||
|
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
||||||
|
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||||
|
CLUSTER_ID: "MkU3OEVBNTcwNTJENDM2Qg"
|
||||||
|
ports:
|
||||||
|
- "9092:9092"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "kafka-topics --bootstrap-server localhost:9092 --list || exit 1"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 10
|
||||||
|
|
||||||
|
kafka-init:
|
||||||
|
image: confluentinc/cp-kafka:7.6.0
|
||||||
|
depends_on:
|
||||||
|
kafka:
|
||||||
|
condition: service_healthy
|
||||||
|
entrypoint: ["/bin/sh", "-c"]
|
||||||
|
command: |
|
||||||
|
"kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic ai.recognize.paid --partitions 3 --replication-factor 1 &&
|
||||||
|
kafka-topics --bootstrap-server kafka:9092 --create --if-not-exists --topic ai.recognize.free --partitions 1 --replication-factor 1"
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
environment:
|
environment:
|
||||||
@@ -26,9 +58,12 @@ services:
|
|||||||
FIREBASE_CREDENTIALS_FILE: /app/firebase-credentials.json
|
FIREBASE_CREDENTIALS_FILE: /app/firebase-credentials.json
|
||||||
JWT_SECRET: local-dev-secret-change-in-prod
|
JWT_SECRET: local-dev-secret-change-in-prod
|
||||||
ALLOWED_ORIGINS: http://localhost:3000,http://localhost:9090
|
ALLOWED_ORIGINS: http://localhost:3000,http://localhost:9090
|
||||||
|
KAFKA_BROKERS: kafka:9092
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
kafka:
|
||||||
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- ./firebase-credentials.json:/app/firebase-credentials.json:ro
|
- ./firebase-credentials.json:/app/firebase-credentials.json:ro
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ go 1.25.5
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
firebase.google.com/go/v4 v4.19.0
|
firebase.google.com/go/v4 v4.19.0
|
||||||
|
github.com/ThreeDotsLabs/watermill v1.5.1
|
||||||
|
github.com/ThreeDotsLabs/watermill-kafka/v2 v2.5.0
|
||||||
github.com/go-chi/chi/v5 v5.2.5
|
github.com/go-chi/chi/v5 v5.2.5
|
||||||
github.com/go-chi/cors v1.2.2
|
github.com/go-chi/cors v1.2.2
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||||
@@ -33,6 +35,7 @@ require (
|
|||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
|
||||||
github.com/MicahParks/keyfunc v1.9.0 // indirect
|
github.com/MicahParks/keyfunc v1.9.0 // indirect
|
||||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||||
|
github.com/Shopify/sarama v1.38.0 // indirect
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f // indirect
|
github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f // indirect
|
||||||
@@ -46,6 +49,9 @@ require (
|
|||||||
github.com/docker/docker v28.5.1+incompatible // indirect
|
github.com/docker/docker v28.5.1+incompatible // indirect
|
||||||
github.com/docker/go-connections v0.6.0 // indirect
|
github.com/docker/go-connections v0.6.0 // indirect
|
||||||
github.com/docker/go-units v0.5.0 // indirect
|
github.com/docker/go-units v0.5.0 // indirect
|
||||||
|
github.com/eapache/go-resiliency v1.3.0 // indirect
|
||||||
|
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect
|
||||||
|
github.com/eapache/queue v1.1.0 // indirect
|
||||||
github.com/ebitengine/purego v0.8.4 // indirect
|
github.com/ebitengine/purego v0.8.4 // indirect
|
||||||
github.com/envoyproxy/go-control-plane/envoy v1.35.0 // indirect
|
github.com/envoyproxy/go-control-plane/envoy v1.35.0 // indirect
|
||||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||||
@@ -56,15 +62,24 @@ require (
|
|||||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
|
github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
|
||||||
github.com/golang/protobuf v1.5.4 // indirect
|
github.com/golang/protobuf v1.5.4 // indirect
|
||||||
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
github.com/google/s2a-go v0.1.9 // indirect
|
github.com/google/s2a-go v0.1.9 // indirect
|
||||||
github.com/google/wire v0.7.0 // indirect
|
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.11 // indirect
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11 // indirect
|
||||||
github.com/googleapis/gax-go/v2 v2.17.0 // indirect
|
github.com/googleapis/gax-go/v2 v2.17.0 // indirect
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8 // indirect
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8 // indirect
|
||||||
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
|
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||||
github.com/klauspost/compress v1.18.0 // indirect
|
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
|
||||||
|
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
|
||||||
|
github.com/jcmturner/gofork v1.7.6 // indirect
|
||||||
|
github.com/jcmturner/gokrb5/v8 v8.4.3 // indirect
|
||||||
|
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
|
||||||
|
github.com/klauspost/compress v1.18.4 // indirect
|
||||||
|
github.com/lithammer/shortuuid/v3 v3.0.7 // indirect
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||||
github.com/magiconair/properties v1.8.10 // indirect
|
github.com/magiconair/properties v1.8.10 // indirect
|
||||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||||
@@ -76,12 +91,15 @@ require (
|
|||||||
github.com/moby/sys/userns v0.1.0 // indirect
|
github.com/moby/sys/userns v0.1.0 // indirect
|
||||||
github.com/moby/term v0.5.0 // indirect
|
github.com/moby/term v0.5.0 // indirect
|
||||||
github.com/morikuni/aec v1.0.0 // indirect
|
github.com/morikuni/aec v1.0.0 // indirect
|
||||||
|
github.com/oklog/ulid v1.3.1 // indirect
|
||||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.25 // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
||||||
|
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect
|
||||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6 // indirect
|
github.com/shirou/gopsutil/v4 v4.25.6 // indirect
|
||||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||||
@@ -92,6 +110,7 @@ require (
|
|||||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||||
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect
|
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.31.0 // indirect
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||||
go.opentelemetry.io/otel v1.40.0 // indirect
|
go.opentelemetry.io/otel v1.40.0 // indirect
|
||||||
@@ -102,11 +121,11 @@ require (
|
|||||||
go.opentelemetry.io/otel/trace v1.40.0 // indirect
|
go.opentelemetry.io/otel/trace v1.40.0 // indirect
|
||||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||||
go.uber.org/multierr v1.11.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
golang.org/x/crypto v0.47.0 // indirect
|
golang.org/x/crypto v0.48.0 // indirect
|
||||||
golang.org/x/net v0.49.0 // indirect
|
golang.org/x/net v0.49.0 // indirect
|
||||||
golang.org/x/oauth2 v0.35.0 // indirect
|
golang.org/x/oauth2 v0.35.0 // indirect
|
||||||
golang.org/x/sync v0.19.0 // indirect
|
golang.org/x/sync v0.19.0 // indirect
|
||||||
golang.org/x/sys v0.40.0 // indirect
|
golang.org/x/sys v0.41.0 // indirect
|
||||||
golang.org/x/text v0.34.0 // indirect
|
golang.org/x/text v0.34.0 // indirect
|
||||||
golang.org/x/time v0.14.0 // indirect
|
golang.org/x/time v0.14.0 // indirect
|
||||||
google.golang.org/appengine/v2 v2.0.6 // indirect
|
google.golang.org/appengine/v2 v2.0.6 // indirect
|
||||||
|
|||||||
125
backend/go.sum
125
backend/go.sum
@@ -30,6 +30,7 @@ github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8af
|
|||||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
|
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c=
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c=
|
||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0=
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0=
|
||||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
|
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
|
||||||
@@ -42,6 +43,16 @@ github.com/MicahParks/keyfunc v1.9.0 h1:lhKd5xrFHLNOWrDc4Tyb/Q1AJ4LCzQ48GVJyVIID
|
|||||||
github.com/MicahParks/keyfunc v1.9.0/go.mod h1:IdnCilugA0O/99dW+/MkvlyrsX8+L8+x95xuVNtM5jw=
|
github.com/MicahParks/keyfunc v1.9.0/go.mod h1:IdnCilugA0O/99dW+/MkvlyrsX8+L8+x95xuVNtM5jw=
|
||||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||||
|
github.com/Shopify/sarama v1.32.0/go.mod h1:+EmJJKZWVT/faR9RcOxJerP+LId4iWdQPBGLy1Y1Njs=
|
||||||
|
github.com/Shopify/sarama v1.38.0 h1:Q81EWxDT2Xs7kCaaiDGV30GyNCWd6K1Xmd4k2qpTWE8=
|
||||||
|
github.com/Shopify/sarama v1.38.0/go.mod h1:djdek3V4gS0N9LZ+OhfuuM6rE1bEKeDffYY8UvsRNyM=
|
||||||
|
github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c=
|
||||||
|
github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc=
|
||||||
|
github.com/Shopify/toxiproxy/v2 v2.5.0/go.mod h1:yhM2epWtAmel9CB8r2+L+PCmhH6yH2pITaPAo7jxJl0=
|
||||||
|
github.com/ThreeDotsLabs/watermill v1.5.1 h1:t5xMivyf9tpmU3iozPqyrCZXHvoV1XQDfihas4sV0fY=
|
||||||
|
github.com/ThreeDotsLabs/watermill v1.5.1/go.mod h1:Uop10dA3VeJWsSvis9qO3vbVY892LARrKAdki6WtXS4=
|
||||||
|
github.com/ThreeDotsLabs/watermill-kafka/v2 v2.5.0 h1:/KYEjLlLx6nW3jn6AEcwAlWkPWP62zi/sUsEP4uKkZE=
|
||||||
|
github.com/ThreeDotsLabs/watermill-kafka/v2 v2.5.0/go.mod h1:w+9jhI7x5ZP67ceSUIIpkgLzjAakotfHX4sWyqsKVjs=
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
@@ -58,6 +69,8 @@ github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpS
|
|||||||
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||||
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
||||||
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
||||||
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
@@ -74,6 +87,13 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4
|
|||||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||||
|
github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0=
|
||||||
|
github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho=
|
||||||
|
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=
|
||||||
|
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||||
|
github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=
|
||||||
|
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||||
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
|
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
|
||||||
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||||
github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329 h1:K+fnvUM0VZ7ZFJf0n4L/BRlnsb9pL/GuDG6FqaH+PwM=
|
github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329 h1:K+fnvUM0VZ7ZFJf0n4L/BRlnsb9pL/GuDG6FqaH+PwM=
|
||||||
@@ -86,6 +106,9 @@ github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfU
|
|||||||
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
|
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
|
||||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||||
|
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
||||||
|
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||||
|
github.com/frankban/quicktest v1.14.2/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||||
github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug=
|
github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug=
|
||||||
github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=
|
github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=
|
||||||
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
|
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
|
||||||
@@ -93,6 +116,7 @@ github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vz
|
|||||||
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
|
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
|
||||||
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
|
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
|
||||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
|
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
@@ -107,24 +131,37 @@ github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArs
|
|||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||||
|
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||||
|
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=
|
github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=
|
||||||
github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=
|
github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=
|
||||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||||
|
github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4=
|
|
||||||
github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
|
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.11 h1:vAe81Msw+8tKUxi2Dqh/NZMz7475yUvmRIkXr4oN2ao=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11 h1:vAe81Msw+8tKUxi2Dqh/NZMz7475yUvmRIkXr4oN2ao=
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.11/go.mod h1:RFV7MUdlb7AgEq2v7FmMCfeSMCllAzWxFgRdusoGks8=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.11/go.mod h1:RFV7MUdlb7AgEq2v7FmMCfeSMCllAzWxFgRdusoGks8=
|
||||||
github.com/googleapis/gax-go/v2 v2.17.0 h1:RksgfBpxqff0EZkDWYuz9q/uWsTVz+kf43LsZ1J6SMc=
|
github.com/googleapis/gax-go/v2 v2.17.0 h1:RksgfBpxqff0EZkDWYuz9q/uWsTVz+kf43LsZ1J6SMc=
|
||||||
github.com/googleapis/gax-go/v2 v2.17.0/go.mod h1:mzaqghpQp4JDh3HvADwrat+6M3MOIDp5YKHhb9PAgDY=
|
github.com/googleapis/gax-go/v2 v2.17.0/go.mod h1:mzaqghpQp4JDh3HvADwrat+6M3MOIDp5YKHhb9PAgDY=
|
||||||
|
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||||
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
|
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8 h1:NpbJl/eVbvrGE0MJ6X16X9SAifesl6Fwxg/YmCvubRI=
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8 h1:NpbJl/eVbvrGE0MJ6X16X9SAifesl6Fwxg/YmCvubRI=
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8/go.mod h1:mi7YA+gCzVem12exXy46ZespvGtX/lZmD/RLnQhVW7U=
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.8/go.mod h1:mi7YA+gCzVem12exXy46ZespvGtX/lZmD/RLnQhVW7U=
|
||||||
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||||
|
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||||
|
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||||
@@ -133,14 +170,36 @@ github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo=
|
|||||||
github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw=
|
github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw=
|
||||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
|
||||||
|
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
|
||||||
|
github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
|
||||||
|
github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
|
||||||
|
github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
||||||
|
github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=
|
||||||
|
github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
|
||||||
|
github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
|
||||||
|
github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
|
||||||
|
github.com/jcmturner/gokrb5/v8 v8.4.2/go.mod h1:sb+Xq/fTY5yktf/VxLsE3wlfPqQjp0aWNYyvBVK62bc=
|
||||||
|
github.com/jcmturner/gokrb5/v8 v8.4.3 h1:iTonLeSJOn7MVUtyMT+arAn5AKAPrkilzhGw8wE/Tq8=
|
||||||
|
github.com/jcmturner/gokrb5/v8 v8.4.3/go.mod h1:dqRwJGXznQrzw6cWmyo6kH+E7jksEQG/CyVWsJEsJO0=
|
||||||
|
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
|
||||||
|
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
|
||||||
github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=
|
github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=
|
||||||
github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg=
|
github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg=
|
||||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
github.com/klauspost/compress v1.14.4/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
github.com/klauspost/compress v1.18.4 h1:RPhnKRAQ4Fh8zU2FY/6ZFDwTVTxgJ/EMydqSTzE9a2c=
|
||||||
|
github.com/klauspost/compress v1.18.4/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
|
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/lithammer/shortuuid/v3 v3.0.7 h1:trX0KTHy4Pbwo/6ia8fscyHoGA+mf1jWbPJVuvyJQQ8=
|
||||||
|
github.com/lithammer/shortuuid/v3 v3.0.7/go.mod h1:vMk8ke37EmiewwolSO1NLW8vP4ZaKlRuDIi8tWWmAts=
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||||
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
||||||
@@ -169,10 +228,15 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
|||||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
|
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||||
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||||
|
github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0=
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
|
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
|
||||||
@@ -184,23 +248,35 @@ github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF
|
|||||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||||
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
|
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
|
||||||
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
|
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
|
||||||
|
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM=
|
||||||
|
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||||
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
|
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
|
||||||
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
|
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
|
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
|
||||||
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo=
|
github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo=
|
||||||
github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs=
|
github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
|
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
|
||||||
@@ -209,6 +285,10 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA
|
|||||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||||
|
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
|
||||||
|
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||||
|
github.com/xdg-go/scram v1.1.0/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs=
|
||||||
|
github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
@@ -216,10 +296,13 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ
|
|||||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||||
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 h1:ZoYbqX7OaA/TAikspPl3ozPI6iY6LiIY9I8cUfm+pJs=
|
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 h1:ZoYbqX7OaA/TAikspPl3ozPI6iY6LiIY9I8cUfm+pJs=
|
||||||
go.opentelemetry.io/contrib/detectors/gcp v1.38.0/go.mod h1:SU+iU7nu5ud4oCb3LQOhIZ3nRLj6FNVrKgtflbaf2ts=
|
go.opentelemetry.io/contrib/detectors/gcp v1.38.0/go.mod h1:SU+iU7nu5ud4oCb3LQOhIZ3nRLj6FNVrKgtflbaf2ts=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.31.0 h1:J8jI81RCB7U9a3qsTZXM/38XrvbLJCye6J32bfQctYY=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/github.com/Shopify/sarama/otelsarama v0.31.0/go.mod h1:72+cPzsW6geApbceSLMbZtYZeGMgtRDw5TcSEsdGlhc=
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
|
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
|
||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
|
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||||
|
go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ=
|
||||||
go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
|
go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
|
||||||
go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
|
go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0 h1:QKdN8ly8zEMrByybbQgv8cWBcdAarwmIPZ6FThrWXJs=
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.40.0 h1:QKdN8ly8zEMrByybbQgv8cWBcdAarwmIPZ6FThrWXJs=
|
||||||
@@ -234,6 +317,7 @@ go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ7
|
|||||||
go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE=
|
go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw=
|
go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg=
|
go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg=
|
||||||
|
go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0=
|
||||||
go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
|
go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
|
||||||
go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
|
go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
|
||||||
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
||||||
@@ -241,15 +325,23 @@ go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pq
|
|||||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
|
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||||
|
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.0.0-20220725212005-46097bf591d3/go.mod h1:AaygXjzTFtRAg2ttMY5RMuhpJ3cNnI0XpyFJD1iQRSM=
|
||||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||||
golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ=
|
golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ=
|
||||||
@@ -259,24 +351,30 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ
|
|||||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
|
||||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||||
@@ -288,6 +386,7 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn
|
|||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||||
google.golang.org/api v0.266.0 h1:hco+oNCf9y7DmLeAtHJi/uBAY7n/7XC9mZPxu1ROiyk=
|
google.golang.org/api v0.266.0 h1:hco+oNCf9y7DmLeAtHJi/uBAY7n/7XC9mZPxu1ROiyk=
|
||||||
@@ -307,9 +406,15 @@ google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw
|
|||||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
||||||
|
|||||||
66
backend/internal/adapters/kafka/consumer.go
Normal file
66
backend/internal/adapters/kafka/consumer.go
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
package kafka
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
|
"github.com/ThreeDotsLabs/watermill"
|
||||||
|
wmkafka "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Consumer wraps a Watermill Kafka subscriber for consuming a single topic within a consumer group.
|
||||||
|
type Consumer struct {
|
||||||
|
subscriber *wmkafka.Subscriber
|
||||||
|
topic string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewConsumer creates a Consumer subscribed to the given topic within a consumer group.
|
||||||
|
func NewConsumer(brokers []string, groupID, topic string) (*Consumer, error) {
|
||||||
|
subscriber, createError := wmkafka.NewSubscriber(
|
||||||
|
wmkafka.SubscriberConfig{
|
||||||
|
Brokers: brokers,
|
||||||
|
ConsumerGroup: groupID,
|
||||||
|
Unmarshaler: wmkafka.DefaultMarshaler{},
|
||||||
|
OverwriteSaramaConfig: wmkafka.DefaultSaramaSubscriberConfig(),
|
||||||
|
},
|
||||||
|
watermill.NopLogger{},
|
||||||
|
)
|
||||||
|
if createError != nil {
|
||||||
|
return nil, createError
|
||||||
|
}
|
||||||
|
return &Consumer{subscriber: subscriber, topic: topic}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run subscribes to the Kafka topic and writes job IDs to the out channel until runContext is cancelled.
|
||||||
|
// Call this in a dedicated goroutine — it blocks until the context is done.
|
||||||
|
// Each message is Ack'd after its job ID is successfully forwarded to the channel,
|
||||||
|
// or Nack'd when the context is cancelled before forwarding completes.
|
||||||
|
func (consumer *Consumer) Run(runContext context.Context, out chan<- string) {
|
||||||
|
messageChannel, subscribeError := consumer.subscriber.Subscribe(runContext, consumer.topic)
|
||||||
|
if subscribeError != nil {
|
||||||
|
slog.Error("kafka consumer subscribe", "topic", consumer.topic, "err", subscribeError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case msg, ok := <-messageChannel:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
select {
|
||||||
|
case out <- string(msg.Payload):
|
||||||
|
msg.Ack()
|
||||||
|
case <-runContext.Done():
|
||||||
|
msg.Nack()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
case <-runContext.Done():
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close shuts down the underlying Kafka subscriber.
|
||||||
|
func (consumer *Consumer) Close() {
|
||||||
|
_ = consumer.subscriber.Close()
|
||||||
|
}
|
||||||
42
backend/internal/adapters/kafka/producer.go
Normal file
42
backend/internal/adapters/kafka/producer.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package kafka
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/ThreeDotsLabs/watermill"
|
||||||
|
"github.com/ThreeDotsLabs/watermill/message"
|
||||||
|
wmkafka "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Producer wraps a Watermill Kafka publisher for publishing messages to Kafka topics.
|
||||||
|
type Producer struct {
|
||||||
|
publisher message.Publisher
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewProducer creates a Producer connected to the given brokers.
|
||||||
|
func NewProducer(brokers []string) (*Producer, error) {
|
||||||
|
publisher, createError := wmkafka.NewPublisher(
|
||||||
|
wmkafka.PublisherConfig{
|
||||||
|
Brokers: brokers,
|
||||||
|
Marshaler: wmkafka.DefaultMarshaler{},
|
||||||
|
},
|
||||||
|
watermill.NopLogger{},
|
||||||
|
)
|
||||||
|
if createError != nil {
|
||||||
|
return nil, createError
|
||||||
|
}
|
||||||
|
return &Producer{publisher: publisher}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish writes a single message to the named topic.
|
||||||
|
// The context parameter is accepted for interface compatibility but is not forwarded
|
||||||
|
// to the Watermill publisher, which does not accept a context.
|
||||||
|
func (producer *Producer) Publish(_ context.Context, topic, jobID string) error {
|
||||||
|
msg := message.NewMessage(watermill.NewUUID(), []byte(jobID))
|
||||||
|
return producer.publisher.Publish(topic, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close shuts down the underlying Kafka publisher.
|
||||||
|
func (producer *Producer) Close() {
|
||||||
|
_ = producer.publisher.Close()
|
||||||
|
}
|
||||||
@@ -8,6 +8,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
|
||||||
"github.com/food-ai/backend/internal/adapters/ai"
|
"github.com/food-ai/backend/internal/adapters/ai"
|
||||||
"github.com/food-ai/backend/internal/domain/dish"
|
"github.com/food-ai/backend/internal/domain/dish"
|
||||||
"github.com/food-ai/backend/internal/domain/ingredient"
|
"github.com/food-ai/backend/internal/domain/ingredient"
|
||||||
@@ -15,7 +17,7 @@ import (
|
|||||||
"github.com/food-ai/backend/internal/infra/middleware"
|
"github.com/food-ai/backend/internal/infra/middleware"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DishRepository is the subset of dish.Repository used by this handler.
|
// DishRepository is the subset of dish.Repository used by workers and the handler.
|
||||||
type DishRepository interface {
|
type DishRepository interface {
|
||||||
FindOrCreate(ctx context.Context, name string) (string, bool, error)
|
FindOrCreate(ctx context.Context, name string) (string, bool, error)
|
||||||
FindOrCreateRecipe(ctx context.Context, dishID string, calories, proteinG, fatG, carbsG float64) (string, bool, error)
|
FindOrCreateRecipe(ctx context.Context, dishID string, calories, proteinG, fatG, carbsG float64) (string, bool, error)
|
||||||
@@ -41,16 +43,35 @@ type Recognizer interface {
|
|||||||
TranslateDishName(ctx context.Context, name string) (map[string]string, error)
|
TranslateDishName(ctx context.Context, name string) (map[string]string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// KafkaPublisher publishes job IDs to a Kafka topic.
|
||||||
|
type KafkaPublisher interface {
|
||||||
|
Publish(ctx context.Context, topic, message string) error
|
||||||
|
}
|
||||||
|
|
||||||
// Handler handles POST /ai/* recognition endpoints.
|
// Handler handles POST /ai/* recognition endpoints.
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
recognizer Recognizer
|
recognizer Recognizer
|
||||||
ingredientRepo IngredientRepository
|
ingredientRepo IngredientRepository
|
||||||
dishRepo DishRepository
|
jobRepo JobRepository
|
||||||
|
kafkaProducer KafkaPublisher
|
||||||
|
sseBroker *SSEBroker
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewHandler creates a new Handler.
|
// NewHandler creates a new Handler with async dish recognition support.
|
||||||
func NewHandler(recognizer Recognizer, repo IngredientRepository, dishRepo DishRepository) *Handler {
|
func NewHandler(
|
||||||
return &Handler{recognizer: recognizer, ingredientRepo: repo, dishRepo: dishRepo}
|
recognizer Recognizer,
|
||||||
|
ingredientRepo IngredientRepository,
|
||||||
|
jobRepo JobRepository,
|
||||||
|
kafkaProducer KafkaPublisher,
|
||||||
|
sseBroker *SSEBroker,
|
||||||
|
) *Handler {
|
||||||
|
return &Handler{
|
||||||
|
recognizer: recognizer,
|
||||||
|
ingredientRepo: ingredientRepo,
|
||||||
|
jobRepo: jobRepo,
|
||||||
|
kafkaProducer: kafkaProducer,
|
||||||
|
sseBroker: sseBroker,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@@ -81,39 +102,36 @@ type EnrichedItem struct {
|
|||||||
|
|
||||||
// ReceiptResponse is the response for POST /ai/recognize-receipt.
|
// ReceiptResponse is the response for POST /ai/recognize-receipt.
|
||||||
type ReceiptResponse struct {
|
type ReceiptResponse struct {
|
||||||
Items []EnrichedItem `json:"items"`
|
Items []EnrichedItem `json:"items"`
|
||||||
Unrecognized []ai.UnrecognizedItem `json:"unrecognized"`
|
Unrecognized []ai.UnrecognizedItem `json:"unrecognized"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DishResponse is the response for POST /ai/recognize-dish.
|
|
||||||
type DishResponse = ai.DishResult
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Handlers
|
// Handlers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// RecognizeReceipt handles POST /ai/recognize-receipt.
|
// RecognizeReceipt handles POST /ai/recognize-receipt.
|
||||||
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
|
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
|
||||||
func (h *Handler) RecognizeReceipt(w http.ResponseWriter, r *http.Request) {
|
func (handler *Handler) RecognizeReceipt(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
userID := middleware.UserIDFromCtx(r.Context())
|
userID := middleware.UserIDFromCtx(request.Context())
|
||||||
_ = userID // logged for tracing
|
_ = userID // logged for tracing
|
||||||
|
|
||||||
var req imageRequest
|
var req imageRequest
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil || req.ImageBase64 == "" {
|
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
|
||||||
writeErrorJSON(w, http.StatusBadRequest, "image_base64 is required")
|
writeErrorJSON(responseWriter, http.StatusBadRequest, "image_base64 is required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
lang := locale.FromContext(r.Context())
|
lang := locale.FromContext(request.Context())
|
||||||
result, err := h.recognizer.RecognizeReceipt(r.Context(), req.ImageBase64, req.MimeType, lang)
|
result, recognizeError := handler.recognizer.RecognizeReceipt(request.Context(), req.ImageBase64, req.MimeType, lang)
|
||||||
if err != nil {
|
if recognizeError != nil {
|
||||||
slog.Error("recognize receipt", "err", err)
|
slog.Error("recognize receipt", "err", recognizeError)
|
||||||
writeErrorJSON(w, http.StatusServiceUnavailable, "recognition failed, please try again")
|
writeErrorJSON(responseWriter, http.StatusServiceUnavailable, "recognition failed, please try again")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
enriched := h.enrichItems(r.Context(), result.Items)
|
enriched := handler.enrichItems(request.Context(), result.Items)
|
||||||
writeJSON(w, http.StatusOK, ReceiptResponse{
|
writeJSON(responseWriter, http.StatusOK, ReceiptResponse{
|
||||||
Items: enriched,
|
Items: enriched,
|
||||||
Unrecognized: result.Unrecognized,
|
Unrecognized: result.Unrecognized,
|
||||||
})
|
})
|
||||||
@@ -121,92 +139,108 @@ func (h *Handler) RecognizeReceipt(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
// RecognizeProducts handles POST /ai/recognize-products.
|
// RecognizeProducts handles POST /ai/recognize-products.
|
||||||
// Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]}
|
// Body: {"images": [{"image_base64": "...", "mime_type": "image/jpeg"}, ...]}
|
||||||
func (h *Handler) RecognizeProducts(w http.ResponseWriter, r *http.Request) {
|
func (handler *Handler) RecognizeProducts(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
var req imagesRequest
|
var req imagesRequest
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil || len(req.Images) == 0 {
|
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || len(req.Images) == 0 {
|
||||||
writeErrorJSON(w, http.StatusBadRequest, "at least one image is required")
|
writeErrorJSON(responseWriter, http.StatusBadRequest, "at least one image is required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if len(req.Images) > 3 {
|
if len(req.Images) > 3 {
|
||||||
req.Images = req.Images[:3] // cap at 3 photos as per spec
|
req.Images = req.Images[:3] // cap at 3 photos as per spec
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process each image in parallel.
|
lang := locale.FromContext(request.Context())
|
||||||
lang := locale.FromContext(r.Context())
|
|
||||||
allItems := make([][]ai.RecognizedItem, len(req.Images))
|
allItems := make([][]ai.RecognizedItem, len(req.Images))
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
for i, img := range req.Images {
|
for i, img := range req.Images {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(i int, img imageRequest) {
|
go func(index int, imageReq imageRequest) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
items, err := h.recognizer.RecognizeProducts(r.Context(), img.ImageBase64, img.MimeType, lang)
|
items, recognizeError := handler.recognizer.RecognizeProducts(request.Context(), imageReq.ImageBase64, imageReq.MimeType, lang)
|
||||||
if err != nil {
|
if recognizeError != nil {
|
||||||
slog.Warn("recognize products from image", "index", i, "err", err)
|
slog.Warn("recognize products from image", "index", index, "err", recognizeError)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
allItems[i] = items
|
allItems[index] = items
|
||||||
}(i, img)
|
}(i, img)
|
||||||
}
|
}
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
merged := MergeAndDeduplicate(allItems)
|
merged := MergeAndDeduplicate(allItems)
|
||||||
enriched := h.enrichItems(r.Context(), merged)
|
enriched := handler.enrichItems(request.Context(), merged)
|
||||||
writeJSON(w, http.StatusOK, map[string]any{"items": enriched})
|
writeJSON(responseWriter, http.StatusOK, map[string]any{"items": enriched})
|
||||||
}
|
}
|
||||||
|
|
||||||
// RecognizeDish handles POST /ai/recognize-dish.
|
// RecognizeDish handles POST /ai/recognize-dish (async).
|
||||||
|
// Enqueues the image for AI processing and returns 202 Accepted with a job_id.
|
||||||
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
|
// Body: {"image_base64": "...", "mime_type": "image/jpeg"}
|
||||||
func (h *Handler) RecognizeDish(w http.ResponseWriter, r *http.Request) {
|
func (handler *Handler) RecognizeDish(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
var req imageRequest
|
var req imageRequest
|
||||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil || req.ImageBase64 == "" {
|
if decodeError := json.NewDecoder(request.Body).Decode(&req); decodeError != nil || req.ImageBase64 == "" {
|
||||||
writeErrorJSON(w, http.StatusBadRequest, "image_base64 is required")
|
writeErrorJSON(responseWriter, http.StatusBadRequest, "image_base64 is required")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
lang := locale.FromContext(r.Context())
|
userID := middleware.UserIDFromCtx(request.Context())
|
||||||
result, err := h.recognizer.RecognizeDish(r.Context(), req.ImageBase64, req.MimeType, lang)
|
userPlan := middleware.UserPlanFromCtx(request.Context())
|
||||||
if err != nil {
|
lang := locale.FromContext(request.Context())
|
||||||
slog.Error("recognize dish", "err", err)
|
|
||||||
writeErrorJSON(w, http.StatusServiceUnavailable, "recognition failed, please try again")
|
job := &Job{
|
||||||
|
UserID: userID,
|
||||||
|
UserPlan: userPlan,
|
||||||
|
ImageBase64: req.ImageBase64,
|
||||||
|
MimeType: req.MimeType,
|
||||||
|
Lang: lang,
|
||||||
|
}
|
||||||
|
if insertError := handler.jobRepo.InsertJob(request.Context(), job); insertError != nil {
|
||||||
|
slog.Error("insert recognition job", "err", insertError)
|
||||||
|
writeErrorJSON(responseWriter, http.StatusInternalServerError, "failed to create job")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve dish_id and recipe_id for each candidate in parallel.
|
position, positionError := handler.jobRepo.QueuePosition(request.Context(), userPlan, job.CreatedAt)
|
||||||
var mu sync.Mutex
|
if positionError != nil {
|
||||||
var wg sync.WaitGroup
|
position = 0
|
||||||
for i := range result.Candidates {
|
|
||||||
wg.Add(1)
|
|
||||||
go func(i int) {
|
|
||||||
defer wg.Done()
|
|
||||||
candidate := result.Candidates[i]
|
|
||||||
dishID, created, findError := h.dishRepo.FindOrCreate(r.Context(), candidate.DishName)
|
|
||||||
if findError != nil {
|
|
||||||
slog.Warn("find or create dish", "name", candidate.DishName, "err", findError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
mu.Lock()
|
|
||||||
result.Candidates[i].DishID = &dishID
|
|
||||||
mu.Unlock()
|
|
||||||
if created {
|
|
||||||
go h.enrichDishInBackground(dishID, candidate.DishName)
|
|
||||||
}
|
|
||||||
|
|
||||||
recipeID, _, recipeError := h.dishRepo.FindOrCreateRecipe(
|
|
||||||
r.Context(), dishID,
|
|
||||||
candidate.Calories, candidate.ProteinG, candidate.FatG, candidate.CarbsG,
|
|
||||||
)
|
|
||||||
if recipeError != nil {
|
|
||||||
slog.Warn("find or create recipe", "dish_id", dishID, "err", recipeError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
mu.Lock()
|
|
||||||
result.Candidates[i].RecipeID = &recipeID
|
|
||||||
mu.Unlock()
|
|
||||||
}(i)
|
|
||||||
}
|
}
|
||||||
wg.Wait()
|
|
||||||
|
|
||||||
writeJSON(w, http.StatusOK, result)
|
topic := TopicFree
|
||||||
|
if userPlan == "paid" {
|
||||||
|
topic = TopicPaid
|
||||||
|
}
|
||||||
|
if publishError := handler.kafkaProducer.Publish(request.Context(), topic, job.ID); publishError != nil {
|
||||||
|
slog.Error("publish recognition job", "job_id", job.ID, "err", publishError)
|
||||||
|
writeErrorJSON(responseWriter, http.StatusInternalServerError, "failed to enqueue job")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
estimatedSeconds := (position + 1) * 6
|
||||||
|
writeJSON(responseWriter, http.StatusAccepted, map[string]any{
|
||||||
|
"job_id": job.ID,
|
||||||
|
"queue_position": position,
|
||||||
|
"estimated_seconds": estimatedSeconds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJobStream handles GET /ai/jobs/{id}/stream — SSE endpoint for job updates.
|
||||||
|
func (handler *Handler) GetJobStream(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
|
handler.sseBroker.ServeSSE(responseWriter, request)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJob handles GET /ai/jobs/{id} — fetches a job result (for app re-open after backgrounding).
|
||||||
|
func (handler *Handler) GetJob(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
|
jobID := chi.URLParam(request, "id")
|
||||||
|
userID := middleware.UserIDFromCtx(request.Context())
|
||||||
|
|
||||||
|
job, fetchError := handler.jobRepo.GetJobByID(request.Context(), jobID)
|
||||||
|
if fetchError != nil {
|
||||||
|
writeErrorJSON(responseWriter, http.StatusNotFound, "job not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if job.UserID != userID {
|
||||||
|
writeErrorJSON(responseWriter, http.StatusForbidden, "forbidden")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
writeJSON(responseWriter, http.StatusOK, job)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@@ -214,8 +248,8 @@ func (h *Handler) RecognizeDish(w http.ResponseWriter, r *http.Request) {
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// enrichItems matches each recognized item against ingredient_mappings.
|
// enrichItems matches each recognized item against ingredient_mappings.
|
||||||
// Items without a match trigger a Gemini classification call and upsert into the DB.
|
// Items without a match trigger a classification call and upsert into the DB.
|
||||||
func (h *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []EnrichedItem {
|
func (handler *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []EnrichedItem {
|
||||||
result := make([]EnrichedItem, 0, len(items))
|
result := make([]EnrichedItem, 0, len(items))
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
enriched := EnrichedItem{
|
enriched := EnrichedItem{
|
||||||
@@ -227,13 +261,12 @@ func (h *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []
|
|||||||
StorageDays: 7, // sensible default
|
StorageDays: 7, // sensible default
|
||||||
}
|
}
|
||||||
|
|
||||||
mapping, err := h.ingredientRepo.FuzzyMatch(ctx, item.Name)
|
mapping, matchError := handler.ingredientRepo.FuzzyMatch(ctx, item.Name)
|
||||||
if err != nil {
|
if matchError != nil {
|
||||||
slog.Warn("fuzzy match ingredient", "name", item.Name, "err", err)
|
slog.Warn("fuzzy match ingredient", "name", item.Name, "err", matchError)
|
||||||
}
|
}
|
||||||
|
|
||||||
if mapping != nil {
|
if mapping != nil {
|
||||||
// Found existing mapping — use its canonical data.
|
|
||||||
id := mapping.ID
|
id := mapping.ID
|
||||||
enriched.MappingID = &id
|
enriched.MappingID = &id
|
||||||
if mapping.DefaultUnit != nil {
|
if mapping.DefaultUnit != nil {
|
||||||
@@ -246,12 +279,11 @@ func (h *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []
|
|||||||
enriched.Category = *mapping.Category
|
enriched.Category = *mapping.Category
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No mapping — ask AI to classify and save for future reuse.
|
classification, classifyError := handler.recognizer.ClassifyIngredient(ctx, item.Name)
|
||||||
classification, err := h.recognizer.ClassifyIngredient(ctx, item.Name)
|
if classifyError != nil {
|
||||||
if err != nil {
|
slog.Warn("classify unknown ingredient", "name", item.Name, "err", classifyError)
|
||||||
slog.Warn("classify unknown ingredient", "name", item.Name, "err", err)
|
|
||||||
} else {
|
} else {
|
||||||
saved := h.saveClassification(ctx, classification)
|
saved := handler.saveClassification(ctx, classification)
|
||||||
if saved != nil {
|
if saved != nil {
|
||||||
id := saved.ID
|
id := saved.ID
|
||||||
enriched.MappingID = &id
|
enriched.MappingID = &id
|
||||||
@@ -267,41 +299,41 @@ func (h *Handler) enrichItems(ctx context.Context, items []ai.RecognizedItem) []
|
|||||||
}
|
}
|
||||||
|
|
||||||
// saveClassification upserts an AI-produced ingredient classification into the DB.
|
// saveClassification upserts an AI-produced ingredient classification into the DB.
|
||||||
func (h *Handler) saveClassification(ctx context.Context, c *ai.IngredientClassification) *ingredient.IngredientMapping {
|
func (handler *Handler) saveClassification(ctx context.Context, classification *ai.IngredientClassification) *ingredient.IngredientMapping {
|
||||||
if c == nil || c.CanonicalName == "" {
|
if classification == nil || classification.CanonicalName == "" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
m := &ingredient.IngredientMapping{
|
mapping := &ingredient.IngredientMapping{
|
||||||
CanonicalName: c.CanonicalName,
|
CanonicalName: classification.CanonicalName,
|
||||||
Category: strPtr(c.Category),
|
Category: strPtr(classification.Category),
|
||||||
DefaultUnit: strPtr(c.DefaultUnit),
|
DefaultUnit: strPtr(classification.DefaultUnit),
|
||||||
CaloriesPer100g: c.CaloriesPer100g,
|
CaloriesPer100g: classification.CaloriesPer100g,
|
||||||
ProteinPer100g: c.ProteinPer100g,
|
ProteinPer100g: classification.ProteinPer100g,
|
||||||
FatPer100g: c.FatPer100g,
|
FatPer100g: classification.FatPer100g,
|
||||||
CarbsPer100g: c.CarbsPer100g,
|
CarbsPer100g: classification.CarbsPer100g,
|
||||||
StorageDays: intPtr(c.StorageDays),
|
StorageDays: intPtr(classification.StorageDays),
|
||||||
}
|
}
|
||||||
|
|
||||||
saved, err := h.ingredientRepo.Upsert(ctx, m)
|
saved, upsertError := handler.ingredientRepo.Upsert(ctx, mapping)
|
||||||
if err != nil {
|
if upsertError != nil {
|
||||||
slog.Warn("upsert classified ingredient", "name", c.CanonicalName, "err", err)
|
slog.Warn("upsert classified ingredient", "name", classification.CanonicalName, "err", upsertError)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(c.Aliases) > 0 {
|
if len(classification.Aliases) > 0 {
|
||||||
if err := h.ingredientRepo.UpsertAliases(ctx, saved.ID, "en", c.Aliases); err != nil {
|
if aliasError := handler.ingredientRepo.UpsertAliases(ctx, saved.ID, "en", classification.Aliases); aliasError != nil {
|
||||||
slog.Warn("upsert ingredient aliases", "id", saved.ID, "err", err)
|
slog.Warn("upsert ingredient aliases", "id", saved.ID, "err", aliasError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, t := range c.Translations {
|
for _, translation := range classification.Translations {
|
||||||
if err := h.ingredientRepo.UpsertTranslation(ctx, saved.ID, t.Lang, t.Name); err != nil {
|
if translationError := handler.ingredientRepo.UpsertTranslation(ctx, saved.ID, translation.Lang, translation.Name); translationError != nil {
|
||||||
slog.Warn("upsert ingredient translation", "id", saved.ID, "lang", t.Lang, "err", err)
|
slog.Warn("upsert ingredient translation", "id", saved.ID, "lang", translation.Lang, "err", translationError)
|
||||||
}
|
}
|
||||||
if len(t.Aliases) > 0 {
|
if len(translation.Aliases) > 0 {
|
||||||
if err := h.ingredientRepo.UpsertAliases(ctx, saved.ID, t.Lang, t.Aliases); err != nil {
|
if aliasError := handler.ingredientRepo.UpsertAliases(ctx, saved.ID, translation.Lang, translation.Aliases); aliasError != nil {
|
||||||
slog.Warn("upsert ingredient translation aliases", "id", saved.ID, "lang", t.Lang, "err", err)
|
slog.Warn("upsert ingredient translation aliases", "id", saved.ID, "lang", translation.Lang, "err", aliasError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -309,58 +341,6 @@ func (h *Handler) saveClassification(ctx context.Context, c *ai.IngredientClassi
|
|||||||
return saved
|
return saved
|
||||||
}
|
}
|
||||||
|
|
||||||
// enrichDishInBackground generates name translations for a newly created dish stub.
|
|
||||||
// Recipe creation is handled synchronously in RecognizeDish.
|
|
||||||
// Runs as a fire-and-forget goroutine so it never blocks the HTTP response.
|
|
||||||
func (h *Handler) enrichDishInBackground(dishID, dishName string) {
|
|
||||||
enrichContext := context.Background()
|
|
||||||
|
|
||||||
translations, translateError := h.recognizer.TranslateDishName(enrichContext, dishName)
|
|
||||||
if translateError != nil {
|
|
||||||
slog.Warn("translate dish name", "name", dishName, "err", translateError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for lang, translatedName := range translations {
|
|
||||||
if upsertError := h.dishRepo.UpsertTranslation(enrichContext, dishID, lang, translatedName); upsertError != nil {
|
|
||||||
slog.Warn("upsert dish translation", "dish_id", dishID, "lang", lang, "err", upsertError)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// aiRecipeToCreateRequest converts an AI-generated recipe into a dish.CreateRequest.
|
|
||||||
func aiRecipeToCreateRequest(recipe *ai.Recipe) dish.CreateRequest {
|
|
||||||
ingredients := make([]dish.IngredientInput, len(recipe.Ingredients))
|
|
||||||
for i, ingredient := range recipe.Ingredients {
|
|
||||||
ingredients[i] = dish.IngredientInput{
|
|
||||||
Name: ingredient.Name, Amount: ingredient.Amount, Unit: ingredient.Unit,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
steps := make([]dish.StepInput, len(recipe.Steps))
|
|
||||||
for i, step := range recipe.Steps {
|
|
||||||
steps[i] = dish.StepInput{
|
|
||||||
Number: step.Number, Description: step.Description, TimerSeconds: step.TimerSeconds,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return dish.CreateRequest{
|
|
||||||
Name: recipe.Title,
|
|
||||||
Description: recipe.Description,
|
|
||||||
CuisineSlug: recipe.Cuisine,
|
|
||||||
ImageURL: recipe.ImageURL,
|
|
||||||
Tags: recipe.Tags,
|
|
||||||
Source: "ai",
|
|
||||||
Difficulty: recipe.Difficulty,
|
|
||||||
PrepTimeMin: recipe.PrepTimeMin,
|
|
||||||
CookTimeMin: recipe.CookTimeMin,
|
|
||||||
Servings: recipe.Servings,
|
|
||||||
Calories: recipe.Nutrition.Calories,
|
|
||||||
Protein: recipe.Nutrition.ProteinG,
|
|
||||||
Fat: recipe.Nutrition.FatG,
|
|
||||||
Carbs: recipe.Nutrition.CarbsG,
|
|
||||||
Ingredients: ingredients,
|
|
||||||
Steps: steps,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MergeAndDeduplicate combines results from multiple images.
|
// MergeAndDeduplicate combines results from multiple images.
|
||||||
// Items sharing the same name (case-insensitive) have their quantities summed.
|
// Items sharing the same name (case-insensitive) have their quantities summed.
|
||||||
func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {
|
func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {
|
||||||
@@ -373,7 +353,6 @@ func MergeAndDeduplicate(batches [][]ai.RecognizedItem) []ai.RecognizedItem {
|
|||||||
key := normalizeName(item.Name)
|
key := normalizeName(item.Name)
|
||||||
if existing, ok := seen[key]; ok {
|
if existing, ok := seen[key]; ok {
|
||||||
existing.Quantity += item.Quantity
|
existing.Quantity += item.Quantity
|
||||||
// Keep the higher confidence estimate.
|
|
||||||
if item.Confidence > existing.Confidence {
|
if item.Confidence > existing.Confidence {
|
||||||
existing.Confidence = item.Confidence
|
existing.Confidence = item.Confidence
|
||||||
}
|
}
|
||||||
@@ -414,14 +393,14 @@ type errorResponse struct {
|
|||||||
Error string `json:"error"`
|
Error string `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeErrorJSON(w http.ResponseWriter, status int, msg string) {
|
func writeErrorJSON(responseWriter http.ResponseWriter, status int, msg string) {
|
||||||
w.Header().Set("Content-Type", "application/json")
|
responseWriter.Header().Set("Content-Type", "application/json")
|
||||||
w.WriteHeader(status)
|
responseWriter.WriteHeader(status)
|
||||||
_ = json.NewEncoder(w).Encode(errorResponse{Error: msg})
|
_ = json.NewEncoder(responseWriter).Encode(errorResponse{Error: msg})
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeJSON(w http.ResponseWriter, status int, v any) {
|
func writeJSON(responseWriter http.ResponseWriter, status int, value any) {
|
||||||
w.Header().Set("Content-Type", "application/json")
|
responseWriter.Header().Set("Content-Type", "application/json")
|
||||||
w.WriteHeader(status)
|
responseWriter.WriteHeader(status)
|
||||||
_ = json.NewEncoder(w).Encode(v)
|
_ = json.NewEncoder(responseWriter).Encode(value)
|
||||||
}
|
}
|
||||||
|
|||||||
37
backend/internal/domain/recognition/job.go
Normal file
37
backend/internal/domain/recognition/job.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package recognition
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/food-ai/backend/internal/adapters/ai"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Job status constants.
|
||||||
|
const (
|
||||||
|
JobStatusPending = "pending"
|
||||||
|
JobStatusProcessing = "processing"
|
||||||
|
JobStatusDone = "done"
|
||||||
|
JobStatusFailed = "failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Kafka topic names.
|
||||||
|
const (
|
||||||
|
TopicPaid = "ai.recognize.paid"
|
||||||
|
TopicFree = "ai.recognize.free"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Job represents an async dish recognition task stored in recognition_jobs.
|
||||||
|
type Job struct {
|
||||||
|
ID string
|
||||||
|
UserID string
|
||||||
|
UserPlan string
|
||||||
|
ImageBase64 string
|
||||||
|
MimeType string
|
||||||
|
Lang string
|
||||||
|
Status string
|
||||||
|
Result *ai.DishResult
|
||||||
|
Error *string
|
||||||
|
CreatedAt time.Time
|
||||||
|
StartedAt *time.Time
|
||||||
|
CompletedAt *time.Time
|
||||||
|
}
|
||||||
125
backend/internal/domain/recognition/job_repository.go
Normal file
125
backend/internal/domain/recognition/job_repository.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package recognition
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/food-ai/backend/internal/adapters/ai"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// JobRepository provides all DB operations on recognition_jobs.
|
||||||
|
type JobRepository interface {
|
||||||
|
InsertJob(ctx context.Context, job *Job) error
|
||||||
|
GetJobByID(ctx context.Context, jobID string) (*Job, error)
|
||||||
|
UpdateJobStatus(ctx context.Context, jobID, status string, result *ai.DishResult, errMsg *string) error
|
||||||
|
QueuePosition(ctx context.Context, userPlan string, createdAt time.Time) (int, error)
|
||||||
|
NotifyJobUpdate(ctx context.Context, jobID string) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// PostgresJobRepository implements JobRepository using a pgxpool.
|
||||||
|
type PostgresJobRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJobRepository creates a new PostgresJobRepository.
|
||||||
|
func NewJobRepository(pool *pgxpool.Pool) *PostgresJobRepository {
|
||||||
|
return &PostgresJobRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertJob inserts a new recognition job and populates the ID and CreatedAt fields.
|
||||||
|
func (repository *PostgresJobRepository) InsertJob(queryContext context.Context, job *Job) error {
|
||||||
|
return repository.pool.QueryRow(queryContext,
|
||||||
|
`INSERT INTO recognition_jobs (user_id, user_plan, image_base64, mime_type, lang)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
RETURNING id, created_at`,
|
||||||
|
job.UserID, job.UserPlan, job.ImageBase64, job.MimeType, job.Lang,
|
||||||
|
).Scan(&job.ID, &job.CreatedAt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJobByID fetches a single job by primary key.
|
||||||
|
func (repository *PostgresJobRepository) GetJobByID(queryContext context.Context, jobID string) (*Job, error) {
|
||||||
|
var job Job
|
||||||
|
var resultJSON []byte
|
||||||
|
|
||||||
|
queryError := repository.pool.QueryRow(queryContext,
|
||||||
|
`SELECT id, user_id, user_plan, image_base64, mime_type, lang, status,
|
||||||
|
result, error, created_at, started_at, completed_at
|
||||||
|
FROM recognition_jobs WHERE id = $1`,
|
||||||
|
jobID,
|
||||||
|
).Scan(
|
||||||
|
&job.ID, &job.UserID, &job.UserPlan,
|
||||||
|
&job.ImageBase64, &job.MimeType, &job.Lang, &job.Status,
|
||||||
|
&resultJSON, &job.Error, &job.CreatedAt, &job.StartedAt, &job.CompletedAt,
|
||||||
|
)
|
||||||
|
if queryError != nil {
|
||||||
|
return nil, queryError
|
||||||
|
}
|
||||||
|
|
||||||
|
if resultJSON != nil {
|
||||||
|
var dishResult ai.DishResult
|
||||||
|
if unmarshalError := json.Unmarshal(resultJSON, &dishResult); unmarshalError == nil {
|
||||||
|
job.Result = &dishResult
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &job, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateJobStatus transitions a job to a new status and records the result or error.
|
||||||
|
func (repository *PostgresJobRepository) UpdateJobStatus(
|
||||||
|
queryContext context.Context,
|
||||||
|
jobID, status string,
|
||||||
|
result *ai.DishResult,
|
||||||
|
errMsg *string,
|
||||||
|
) error {
|
||||||
|
var resultJSON []byte
|
||||||
|
if result != nil {
|
||||||
|
marshalledBytes, marshalError := json.Marshal(result)
|
||||||
|
if marshalError != nil {
|
||||||
|
return marshalError
|
||||||
|
}
|
||||||
|
resultJSON = marshalledBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
switch status {
|
||||||
|
case JobStatusProcessing:
|
||||||
|
_, updateError := repository.pool.Exec(queryContext,
|
||||||
|
`UPDATE recognition_jobs SET status = $1, started_at = now() WHERE id = $2`,
|
||||||
|
status, jobID,
|
||||||
|
)
|
||||||
|
return updateError
|
||||||
|
default:
|
||||||
|
_, updateError := repository.pool.Exec(queryContext,
|
||||||
|
`UPDATE recognition_jobs
|
||||||
|
SET status = $1, result = $2, error = $3, completed_at = now()
|
||||||
|
WHERE id = $4`,
|
||||||
|
status, resultJSON, errMsg, jobID,
|
||||||
|
)
|
||||||
|
return updateError
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueuePosition counts jobs ahead of createdAt in the same plan's queue.
|
||||||
|
func (repository *PostgresJobRepository) QueuePosition(
|
||||||
|
queryContext context.Context,
|
||||||
|
userPlan string,
|
||||||
|
createdAt time.Time,
|
||||||
|
) (int, error) {
|
||||||
|
var position int
|
||||||
|
queryError := repository.pool.QueryRow(queryContext,
|
||||||
|
`SELECT COUNT(*) FROM recognition_jobs
|
||||||
|
WHERE status IN ('pending', 'processing')
|
||||||
|
AND user_plan = $1
|
||||||
|
AND created_at < $2`,
|
||||||
|
userPlan, createdAt,
|
||||||
|
).Scan(&position)
|
||||||
|
return position, queryError
|
||||||
|
}
|
||||||
|
|
||||||
|
// NotifyJobUpdate sends a PostgreSQL NOTIFY on the job_update channel.
|
||||||
|
func (repository *PostgresJobRepository) NotifyJobUpdate(queryContext context.Context, jobID string) error {
|
||||||
|
_, notifyError := repository.pool.Exec(queryContext, `SELECT pg_notify('job_update', $1)`, jobID)
|
||||||
|
return notifyError
|
||||||
|
}
|
||||||
206
backend/internal/domain/recognition/sse.go
Normal file
206
backend/internal/domain/recognition/sse.go
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
package recognition
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
|
||||||
|
"github.com/food-ai/backend/internal/infra/middleware"
|
||||||
|
)
|
||||||
|
|
||||||
|
type sseEvent struct {
|
||||||
|
name string
|
||||||
|
data string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSEBroker manages Server-Sent Events for job status updates.
|
||||||
|
// It listens on the PostgreSQL "job_update" NOTIFY channel and fans out
|
||||||
|
// events to all HTTP clients currently streaming a given job.
|
||||||
|
type SSEBroker struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
jobRepo JobRepository
|
||||||
|
mu sync.RWMutex
|
||||||
|
clients map[string][]chan sseEvent
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSSEBroker creates a new SSEBroker.
|
||||||
|
func NewSSEBroker(pool *pgxpool.Pool, jobRepo JobRepository) *SSEBroker {
|
||||||
|
return &SSEBroker{
|
||||||
|
pool: pool,
|
||||||
|
jobRepo: jobRepo,
|
||||||
|
clients: make(map[string][]chan sseEvent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start launches the PostgreSQL LISTEN loop in a background goroutine.
|
||||||
|
func (broker *SSEBroker) Start(brokerContext context.Context) {
|
||||||
|
go broker.listenLoop(brokerContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (broker *SSEBroker) listenLoop(brokerContext context.Context) {
|
||||||
|
conn, acquireError := broker.pool.Acquire(brokerContext)
|
||||||
|
if acquireError != nil {
|
||||||
|
slog.Error("SSEBroker: acquire PG connection", "err", acquireError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer conn.Release()
|
||||||
|
|
||||||
|
if _, listenError := conn.Exec(brokerContext, "LISTEN job_update"); listenError != nil {
|
||||||
|
slog.Error("SSEBroker: LISTEN job_update", "err", listenError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
notification, waitError := conn.Conn().WaitForNotification(brokerContext)
|
||||||
|
if brokerContext.Err() != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if waitError != nil {
|
||||||
|
slog.Error("SSEBroker: wait for notification", "err", waitError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
broker.fanOut(brokerContext, notification.Payload)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (broker *SSEBroker) subscribe(jobID string) chan sseEvent {
|
||||||
|
channel := make(chan sseEvent, 10)
|
||||||
|
broker.mu.Lock()
|
||||||
|
broker.clients[jobID] = append(broker.clients[jobID], channel)
|
||||||
|
broker.mu.Unlock()
|
||||||
|
return channel
|
||||||
|
}
|
||||||
|
|
||||||
|
func (broker *SSEBroker) unsubscribe(jobID string, channel chan sseEvent) {
|
||||||
|
broker.mu.Lock()
|
||||||
|
defer broker.mu.Unlock()
|
||||||
|
existing := broker.clients[jobID]
|
||||||
|
for index, existing := range existing {
|
||||||
|
if existing == channel {
|
||||||
|
broker.clients[jobID] = append(broker.clients[jobID][:index], broker.clients[jobID][index+1:]...)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(broker.clients[jobID]) == 0 {
|
||||||
|
delete(broker.clients, jobID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (broker *SSEBroker) fanOut(fanContext context.Context, jobID string) {
|
||||||
|
job, fetchError := broker.jobRepo.GetJobByID(fanContext, jobID)
|
||||||
|
if fetchError != nil {
|
||||||
|
slog.Warn("SSEBroker: get job for fan-out", "job_id", jobID, "err", fetchError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
event, ok := jobToSSEEvent(job)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
broker.mu.RLock()
|
||||||
|
channels := make([]chan sseEvent, len(broker.clients[jobID]))
|
||||||
|
copy(channels, broker.clients[jobID])
|
||||||
|
broker.mu.RUnlock()
|
||||||
|
|
||||||
|
for _, channel := range channels {
|
||||||
|
select {
|
||||||
|
case channel <- event:
|
||||||
|
default:
|
||||||
|
// channel full; skip this delivery
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func jobToSSEEvent(job *Job) (sseEvent, bool) {
|
||||||
|
switch job.Status {
|
||||||
|
case JobStatusProcessing:
|
||||||
|
return sseEvent{name: "processing", data: "{}"}, true
|
||||||
|
case JobStatusDone:
|
||||||
|
resultJSON, marshalError := json.Marshal(job.Result)
|
||||||
|
if marshalError != nil {
|
||||||
|
return sseEvent{}, false
|
||||||
|
}
|
||||||
|
return sseEvent{name: "done", data: string(resultJSON)}, true
|
||||||
|
case JobStatusFailed:
|
||||||
|
errMsg := "recognition failed, please try again"
|
||||||
|
if job.Error != nil {
|
||||||
|
errMsg = *job.Error
|
||||||
|
}
|
||||||
|
errorData, _ := json.Marshal(map[string]string{"error": errMsg})
|
||||||
|
return sseEvent{name: "failed", data: string(errorData)}, true
|
||||||
|
default:
|
||||||
|
return sseEvent{}, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeSSE handles GET /ai/jobs/{id}/stream — streams SSE events until the job completes.
|
||||||
|
func (broker *SSEBroker) ServeSSE(responseWriter http.ResponseWriter, request *http.Request) {
|
||||||
|
jobID := chi.URLParam(request, "id")
|
||||||
|
userID := middleware.UserIDFromCtx(request.Context())
|
||||||
|
|
||||||
|
job, fetchError := broker.jobRepo.GetJobByID(request.Context(), jobID)
|
||||||
|
if fetchError != nil {
|
||||||
|
writeErrorJSON(responseWriter, http.StatusNotFound, "job not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if job.UserID != userID {
|
||||||
|
writeErrorJSON(responseWriter, http.StatusForbidden, "forbidden")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
flusher, supported := responseWriter.(http.Flusher)
|
||||||
|
if !supported {
|
||||||
|
writeErrorJSON(responseWriter, http.StatusInternalServerError, "streaming not supported")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
responseWriter.Header().Set("Content-Type", "text/event-stream")
|
||||||
|
responseWriter.Header().Set("Cache-Control", "no-cache")
|
||||||
|
responseWriter.Header().Set("Connection", "keep-alive")
|
||||||
|
responseWriter.Header().Set("X-Accel-Buffering", "no")
|
||||||
|
|
||||||
|
// If the job is already in a terminal state, send the event immediately.
|
||||||
|
if job.Status == JobStatusDone || job.Status == JobStatusFailed {
|
||||||
|
if event, ok := jobToSSEEvent(job); ok {
|
||||||
|
fmt.Fprintf(responseWriter, "event: %s\ndata: %s\n\n", event.name, event.data)
|
||||||
|
flusher.Flush()
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to future notifications before sending the queued event to
|
||||||
|
// avoid a race where the job completes between reading the current state
|
||||||
|
// and registering the subscriber.
|
||||||
|
eventChannel := broker.subscribe(jobID)
|
||||||
|
defer broker.unsubscribe(jobID, eventChannel)
|
||||||
|
|
||||||
|
// Send initial queued event with estimated wait.
|
||||||
|
position, _ := broker.jobRepo.QueuePosition(request.Context(), job.UserPlan, job.CreatedAt)
|
||||||
|
estimatedSeconds := (position + 1) * 6
|
||||||
|
queuedData, _ := json.Marshal(map[string]any{
|
||||||
|
"position": position,
|
||||||
|
"estimated_seconds": estimatedSeconds,
|
||||||
|
})
|
||||||
|
fmt.Fprintf(responseWriter, "event: queued\ndata: %s\n\n", queuedData)
|
||||||
|
flusher.Flush()
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case event := <-eventChannel:
|
||||||
|
fmt.Fprintf(responseWriter, "event: %s\ndata: %s\n\n", event.name, event.data)
|
||||||
|
flusher.Flush()
|
||||||
|
if event.name == "done" || event.name == "failed" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
case <-request.Context().Done():
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
165
backend/internal/domain/recognition/worker.go
Normal file
165
backend/internal/domain/recognition/worker.go
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
package recognition
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/food-ai/backend/internal/adapters/kafka"
|
||||||
|
)
|
||||||
|
|
||||||
|
const defaultWorkerCount = 5
|
||||||
|
|
||||||
|
// WorkerPool processes dish recognition jobs from Kafka with priority queuing.
|
||||||
|
// Paid jobs are processed before free jobs.
|
||||||
|
type WorkerPool struct {
|
||||||
|
jobRepo JobRepository
|
||||||
|
recognizer Recognizer
|
||||||
|
dishRepo DishRepository
|
||||||
|
paidConsumer *kafka.Consumer
|
||||||
|
freeConsumer *kafka.Consumer
|
||||||
|
workerCount int
|
||||||
|
paidJobs chan string
|
||||||
|
freeJobs chan string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWorkerPool creates a WorkerPool with five workers.
|
||||||
|
func NewWorkerPool(
|
||||||
|
jobRepo JobRepository,
|
||||||
|
recognizer Recognizer,
|
||||||
|
dishRepo DishRepository,
|
||||||
|
paidConsumer *kafka.Consumer,
|
||||||
|
freeConsumer *kafka.Consumer,
|
||||||
|
) *WorkerPool {
|
||||||
|
return &WorkerPool{
|
||||||
|
jobRepo: jobRepo,
|
||||||
|
recognizer: recognizer,
|
||||||
|
dishRepo: dishRepo,
|
||||||
|
paidConsumer: paidConsumer,
|
||||||
|
freeConsumer: freeConsumer,
|
||||||
|
workerCount: defaultWorkerCount,
|
||||||
|
paidJobs: make(chan string, 100),
|
||||||
|
freeJobs: make(chan string, 100),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start launches the Kafka feeder goroutines and all worker goroutines.
|
||||||
|
func (pool *WorkerPool) Start(workerContext context.Context) {
|
||||||
|
go pool.paidConsumer.Run(workerContext, pool.paidJobs)
|
||||||
|
go pool.freeConsumer.Run(workerContext, pool.freeJobs)
|
||||||
|
for i := 0; i < pool.workerCount; i++ {
|
||||||
|
go pool.runWorker(workerContext)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *WorkerPool) runWorker(workerContext context.Context) {
|
||||||
|
for {
|
||||||
|
// Priority step: drain paid queue without blocking.
|
||||||
|
select {
|
||||||
|
case jobID := <-pool.paidJobs:
|
||||||
|
pool.processJob(workerContext, jobID)
|
||||||
|
continue
|
||||||
|
case <-workerContext.Done():
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to either queue with a 100ms timeout.
|
||||||
|
select {
|
||||||
|
case jobID := <-pool.paidJobs:
|
||||||
|
pool.processJob(workerContext, jobID)
|
||||||
|
case jobID := <-pool.freeJobs:
|
||||||
|
pool.processJob(workerContext, jobID)
|
||||||
|
case <-workerContext.Done():
|
||||||
|
return
|
||||||
|
case <-time.After(100 * time.Millisecond):
|
||||||
|
// nothing available; loop again
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pool *WorkerPool) processJob(workerContext context.Context, jobID string) {
|
||||||
|
job, fetchError := pool.jobRepo.GetJobByID(workerContext, jobID)
|
||||||
|
if fetchError != nil {
|
||||||
|
slog.Error("worker: fetch job", "job_id", jobID, "err", fetchError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transition to processing.
|
||||||
|
if updateError := pool.jobRepo.UpdateJobStatus(workerContext, jobID, JobStatusProcessing, nil, nil); updateError != nil {
|
||||||
|
slog.Error("worker: set processing status", "job_id", jobID, "err", updateError)
|
||||||
|
}
|
||||||
|
if notifyError := pool.jobRepo.NotifyJobUpdate(workerContext, jobID); notifyError != nil {
|
||||||
|
slog.Warn("worker: notify processing", "job_id", jobID, "err", notifyError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run AI recognition.
|
||||||
|
result, recognizeError := pool.recognizer.RecognizeDish(workerContext, job.ImageBase64, job.MimeType, job.Lang)
|
||||||
|
if recognizeError != nil {
|
||||||
|
slog.Error("worker: recognize dish", "job_id", jobID, "err", recognizeError)
|
||||||
|
errMsg := "recognition failed, please try again"
|
||||||
|
_ = pool.jobRepo.UpdateJobStatus(workerContext, jobID, JobStatusFailed, nil, &errMsg)
|
||||||
|
_ = pool.jobRepo.NotifyJobUpdate(workerContext, jobID)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve dish_id and recipe_id for each candidate in parallel.
|
||||||
|
var mu sync.Mutex
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
for index := range result.Candidates {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(candidateIndex int) {
|
||||||
|
defer wg.Done()
|
||||||
|
candidate := result.Candidates[candidateIndex]
|
||||||
|
dishID, created, findError := pool.dishRepo.FindOrCreate(workerContext, candidate.DishName)
|
||||||
|
if findError != nil {
|
||||||
|
slog.Warn("worker: find or create dish", "name", candidate.DishName, "err", findError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
mu.Lock()
|
||||||
|
result.Candidates[candidateIndex].DishID = &dishID
|
||||||
|
mu.Unlock()
|
||||||
|
if created {
|
||||||
|
go enrichDishInBackground(pool.recognizer, pool.dishRepo, dishID, candidate.DishName)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipeID, _, recipeError := pool.dishRepo.FindOrCreateRecipe(
|
||||||
|
workerContext, dishID,
|
||||||
|
candidate.Calories, candidate.ProteinG, candidate.FatG, candidate.CarbsG,
|
||||||
|
)
|
||||||
|
if recipeError != nil {
|
||||||
|
slog.Warn("worker: find or create recipe", "dish_id", dishID, "err", recipeError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
mu.Lock()
|
||||||
|
result.Candidates[candidateIndex].RecipeID = &recipeID
|
||||||
|
mu.Unlock()
|
||||||
|
}(index)
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
// Transition to done.
|
||||||
|
if updateError := pool.jobRepo.UpdateJobStatus(workerContext, jobID, JobStatusDone, result, nil); updateError != nil {
|
||||||
|
slog.Error("worker: set done status", "job_id", jobID, "err", updateError)
|
||||||
|
}
|
||||||
|
if notifyError := pool.jobRepo.NotifyJobUpdate(workerContext, jobID); notifyError != nil {
|
||||||
|
slog.Warn("worker: notify done", "job_id", jobID, "err", notifyError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// enrichDishInBackground translates a newly created dish name into all supported languages.
|
||||||
|
// Runs as a fire-and-forget goroutine so it never blocks recognition.
|
||||||
|
func enrichDishInBackground(recognizer Recognizer, dishRepo DishRepository, dishID, dishName string) {
|
||||||
|
enrichContext := context.Background()
|
||||||
|
translations, translateError := recognizer.TranslateDishName(enrichContext, dishName)
|
||||||
|
if translateError != nil {
|
||||||
|
slog.Warn("translate dish name", "name", dishName, "err", translateError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for lang, translatedName := range translations {
|
||||||
|
if upsertError := dishRepo.UpsertTranslation(enrichContext, dishID, lang, translatedName); upsertError != nil {
|
||||||
|
slog.Warn("upsert dish translation", "dish_id", dishID, "lang", lang, "err", upsertError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -24,6 +24,9 @@ type Config struct {
|
|||||||
// External APIs
|
// External APIs
|
||||||
OpenAIAPIKey string `envconfig:"OPENAI_API_KEY" required:"true"`
|
OpenAIAPIKey string `envconfig:"OPENAI_API_KEY" required:"true"`
|
||||||
PexelsAPIKey string `envconfig:"PEXELS_API_KEY" required:"true"`
|
PexelsAPIKey string `envconfig:"PEXELS_API_KEY" required:"true"`
|
||||||
|
|
||||||
|
// Kafka
|
||||||
|
KafkaBrokers []string `envconfig:"KAFKA_BROKERS" default:"kafka:9092"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func Load() (*Config, error) {
|
func Load() (*Config, error) {
|
||||||
|
|||||||
@@ -120,6 +120,8 @@ func NewRouter(
|
|||||||
r.Post("/recognize-receipt", recognitionHandler.RecognizeReceipt)
|
r.Post("/recognize-receipt", recognitionHandler.RecognizeReceipt)
|
||||||
r.Post("/recognize-products", recognitionHandler.RecognizeProducts)
|
r.Post("/recognize-products", recognitionHandler.RecognizeProducts)
|
||||||
r.Post("/recognize-dish", recognitionHandler.RecognizeDish)
|
r.Post("/recognize-dish", recognitionHandler.RecognizeDish)
|
||||||
|
r.Get("/jobs/{id}", recognitionHandler.GetJob)
|
||||||
|
r.Get("/jobs/{id}/stream", recognitionHandler.GetJobStream)
|
||||||
r.Post("/generate-menu", menuHandler.GenerateMenu)
|
r.Post("/generate-menu", menuHandler.GenerateMenu)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ CREATE TABLE ingredient_category_translations (
|
|||||||
);
|
);
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
-- ingredients (canonical catalog — formerly ingredient_mappings)
|
-- ingredients (canonical catalog)
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
CREATE TABLE ingredients (
|
CREATE TABLE ingredients (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||||
@@ -151,7 +151,7 @@ CREATE TABLE ingredient_translations (
|
|||||||
CREATE INDEX idx_ingredient_translations_ingredient_id ON ingredient_translations (ingredient_id);
|
CREATE INDEX idx_ingredient_translations_ingredient_id ON ingredient_translations (ingredient_id);
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
-- ingredient_aliases (relational, replaces JSONB aliases column)
|
-- ingredient_aliases
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
CREATE TABLE ingredient_aliases (
|
CREATE TABLE ingredient_aliases (
|
||||||
ingredient_id UUID NOT NULL REFERENCES ingredients(id) ON DELETE CASCADE,
|
ingredient_id UUID NOT NULL REFERENCES ingredients(id) ON DELETE CASCADE,
|
||||||
@@ -269,7 +269,7 @@ CREATE INDEX idx_recipes_calories ON recipes (calories_per_serving);
|
|||||||
CREATE INDEX idx_recipes_source ON recipes (source);
|
CREATE INDEX idx_recipes_source ON recipes (source);
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
-- recipe_translations (per-language cooking notes only)
|
-- recipe_translations
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
CREATE TABLE recipe_translations (
|
CREATE TABLE recipe_translations (
|
||||||
recipe_id UUID NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
recipe_id UUID NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||||
@@ -348,7 +348,7 @@ CREATE TABLE product_ingredients (
|
|||||||
);
|
);
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
-- user_saved_recipes (thin bookmark — content lives in dishes + recipes)
|
-- user_saved_recipes
|
||||||
-- ---------------------------------------------------------------------------
|
-- ---------------------------------------------------------------------------
|
||||||
CREATE TABLE user_saved_recipes (
|
CREATE TABLE user_saved_recipes (
|
||||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||||
@@ -407,7 +407,205 @@ CREATE TABLE meal_diary (
|
|||||||
);
|
);
|
||||||
CREATE INDEX idx_meal_diary_user_date ON meal_diary (user_id, date);
|
CREATE INDEX idx_meal_diary_user_date ON meal_diary (user_id, date);
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- recognition_jobs
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
CREATE TABLE recognition_jobs (
|
||||||
|
id UUID PRIMARY KEY DEFAULT uuid_generate_v7(),
|
||||||
|
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||||
|
user_plan TEXT NOT NULL,
|
||||||
|
image_base64 TEXT NOT NULL,
|
||||||
|
mime_type TEXT NOT NULL DEFAULT 'image/jpeg',
|
||||||
|
lang TEXT NOT NULL DEFAULT 'en',
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
-- pending | processing | done | failed
|
||||||
|
result JSONB,
|
||||||
|
error TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
started_at TIMESTAMPTZ,
|
||||||
|
completed_at TIMESTAMPTZ
|
||||||
|
);
|
||||||
|
CREATE INDEX idx_recognition_jobs_user ON recognition_jobs (user_id, created_at DESC);
|
||||||
|
CREATE INDEX idx_recognition_jobs_pending ON recognition_jobs (status, user_plan, created_at ASC);
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: languages
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO languages (code, native_name, english_name, sort_order) VALUES
|
||||||
|
('en', 'English', 'English', 1),
|
||||||
|
('ru', 'Русский', 'Russian', 2),
|
||||||
|
('es', 'Español', 'Spanish', 3),
|
||||||
|
('de', 'Deutsch', 'German', 4),
|
||||||
|
('fr', 'Français', 'French', 5),
|
||||||
|
('it', 'Italiano', 'Italian', 6),
|
||||||
|
('pt', 'Português', 'Portuguese', 7),
|
||||||
|
('zh', '中文', 'Chinese (Simplified)', 8),
|
||||||
|
('ja', '日本語', 'Japanese', 9),
|
||||||
|
('ko', '한국어', 'Korean', 10),
|
||||||
|
('ar', 'العربية', 'Arabic', 11),
|
||||||
|
('hi', 'हिन्दी', 'Hindi', 12);
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: units + unit_translations
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO units (code, sort_order) VALUES
|
||||||
|
('g', 1),
|
||||||
|
('kg', 2),
|
||||||
|
('ml', 3),
|
||||||
|
('l', 4),
|
||||||
|
('pcs', 5),
|
||||||
|
('pack', 6);
|
||||||
|
|
||||||
|
INSERT INTO unit_translations (unit_code, lang, name) VALUES
|
||||||
|
('g', 'ru', 'г'),
|
||||||
|
('kg', 'ru', 'кг'),
|
||||||
|
('ml', 'ru', 'мл'),
|
||||||
|
('l', 'ru', 'л'),
|
||||||
|
('pcs', 'ru', 'шт'),
|
||||||
|
('pack', 'ru', 'уп');
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: ingredient_categories + ingredient_category_translations
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO ingredient_categories (slug, sort_order) VALUES
|
||||||
|
('dairy', 1),
|
||||||
|
('meat', 2),
|
||||||
|
('produce', 3),
|
||||||
|
('bakery', 4),
|
||||||
|
('frozen', 5),
|
||||||
|
('beverages', 6),
|
||||||
|
('other', 7);
|
||||||
|
|
||||||
|
INSERT INTO ingredient_category_translations (category_slug, lang, name) VALUES
|
||||||
|
('dairy', 'ru', 'Молочные продукты'),
|
||||||
|
('meat', 'ru', 'Мясо и птица'),
|
||||||
|
('produce', 'ru', 'Овощи и фрукты'),
|
||||||
|
('bakery', 'ru', 'Выпечка и хлеб'),
|
||||||
|
('frozen', 'ru', 'Замороженные'),
|
||||||
|
('beverages', 'ru', 'Напитки'),
|
||||||
|
('other', 'ru', 'Прочее');
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: cuisines + cuisine_translations
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO cuisines (slug, name, sort_order) VALUES
|
||||||
|
('italian', 'Italian', 1),
|
||||||
|
('french', 'French', 2),
|
||||||
|
('russian', 'Russian', 3),
|
||||||
|
('chinese', 'Chinese', 4),
|
||||||
|
('japanese', 'Japanese', 5),
|
||||||
|
('korean', 'Korean', 6),
|
||||||
|
('mexican', 'Mexican', 7),
|
||||||
|
('mediterranean', 'Mediterranean', 8),
|
||||||
|
('indian', 'Indian', 9),
|
||||||
|
('thai', 'Thai', 10),
|
||||||
|
('american', 'American', 11),
|
||||||
|
('georgian', 'Georgian', 12),
|
||||||
|
('spanish', 'Spanish', 13),
|
||||||
|
('german', 'German', 14),
|
||||||
|
('middle_eastern', 'Middle Eastern', 15),
|
||||||
|
('turkish', 'Turkish', 16),
|
||||||
|
('greek', 'Greek', 17),
|
||||||
|
('vietnamese', 'Vietnamese', 18),
|
||||||
|
('other', 'Other', 19);
|
||||||
|
|
||||||
|
INSERT INTO cuisine_translations (cuisine_slug, lang, name) VALUES
|
||||||
|
('italian', 'ru', 'Итальянская'),
|
||||||
|
('french', 'ru', 'Французская'),
|
||||||
|
('russian', 'ru', 'Русская'),
|
||||||
|
('chinese', 'ru', 'Китайская'),
|
||||||
|
('japanese', 'ru', 'Японская'),
|
||||||
|
('korean', 'ru', 'Корейская'),
|
||||||
|
('mexican', 'ru', 'Мексиканская'),
|
||||||
|
('mediterranean', 'ru', 'Средиземноморская'),
|
||||||
|
('indian', 'ru', 'Индийская'),
|
||||||
|
('thai', 'ru', 'Тайская'),
|
||||||
|
('american', 'ru', 'Американская'),
|
||||||
|
('georgian', 'ru', 'Грузинская'),
|
||||||
|
('spanish', 'ru', 'Испанская'),
|
||||||
|
('german', 'ru', 'Немецкая'),
|
||||||
|
('middle_eastern', 'ru', 'Ближневосточная'),
|
||||||
|
('turkish', 'ru', 'Турецкая'),
|
||||||
|
('greek', 'ru', 'Греческая'),
|
||||||
|
('vietnamese', 'ru', 'Вьетнамская'),
|
||||||
|
('other', 'ru', 'Другая');
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: tags + tag_translations
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO tags (slug, name, sort_order) VALUES
|
||||||
|
('vegan', 'Vegan', 1),
|
||||||
|
('vegetarian', 'Vegetarian', 2),
|
||||||
|
('gluten_free', 'Gluten-Free', 3),
|
||||||
|
('dairy_free', 'Dairy-Free', 4),
|
||||||
|
('healthy', 'Healthy', 5),
|
||||||
|
('quick', 'Quick', 6),
|
||||||
|
('spicy', 'Spicy', 7),
|
||||||
|
('sweet', 'Sweet', 8),
|
||||||
|
('soup', 'Soup', 9),
|
||||||
|
('salad', 'Salad', 10),
|
||||||
|
('main_course', 'Main Course', 11),
|
||||||
|
('appetizer', 'Appetizer', 12),
|
||||||
|
('breakfast', 'Breakfast', 13),
|
||||||
|
('dessert', 'Dessert', 14),
|
||||||
|
('grilled', 'Grilled', 15),
|
||||||
|
('baked', 'Baked', 16),
|
||||||
|
('fried', 'Fried', 17),
|
||||||
|
('raw', 'Raw', 18),
|
||||||
|
('fermented', 'Fermented', 19);
|
||||||
|
|
||||||
|
INSERT INTO tag_translations (tag_slug, lang, name) VALUES
|
||||||
|
('vegan', 'ru', 'Веганское'),
|
||||||
|
('vegetarian', 'ru', 'Вегетарианское'),
|
||||||
|
('gluten_free', 'ru', 'Без глютена'),
|
||||||
|
('dairy_free', 'ru', 'Без молока'),
|
||||||
|
('healthy', 'ru', 'Здоровое'),
|
||||||
|
('quick', 'ru', 'Быстрое'),
|
||||||
|
('spicy', 'ru', 'Острое'),
|
||||||
|
('sweet', 'ru', 'Сладкое'),
|
||||||
|
('soup', 'ru', 'Суп'),
|
||||||
|
('salad', 'ru', 'Салат'),
|
||||||
|
('main_course', 'ru', 'Основное блюдо'),
|
||||||
|
('appetizer', 'ru', 'Закуска'),
|
||||||
|
('breakfast', 'ru', 'Завтрак'),
|
||||||
|
('dessert', 'ru', 'Десерт'),
|
||||||
|
('grilled', 'ru', 'Жареное на гриле'),
|
||||||
|
('baked', 'ru', 'Запечённое'),
|
||||||
|
('fried', 'ru', 'Жареное'),
|
||||||
|
('raw', 'ru', 'Сырое'),
|
||||||
|
('fermented', 'ru', 'Ферментированное');
|
||||||
|
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
-- Seed data: dish_categories + dish_category_translations
|
||||||
|
-- ---------------------------------------------------------------------------
|
||||||
|
INSERT INTO dish_categories (slug, name, sort_order) VALUES
|
||||||
|
('soup', 'Soup', 1),
|
||||||
|
('salad', 'Salad', 2),
|
||||||
|
('main_course', 'Main Course', 3),
|
||||||
|
('side_dish', 'Side Dish', 4),
|
||||||
|
('appetizer', 'Appetizer', 5),
|
||||||
|
('dessert', 'Dessert', 6),
|
||||||
|
('breakfast', 'Breakfast', 7),
|
||||||
|
('drink', 'Drink', 8),
|
||||||
|
('bread', 'Bread', 9),
|
||||||
|
('sauce', 'Sauce', 10),
|
||||||
|
('snack', 'Snack', 11);
|
||||||
|
|
||||||
|
INSERT INTO dish_category_translations (category_slug, lang, name) VALUES
|
||||||
|
('soup', 'ru', 'Суп'),
|
||||||
|
('salad', 'ru', 'Салат'),
|
||||||
|
('main_course', 'ru', 'Основное блюдо'),
|
||||||
|
('side_dish', 'ru', 'Гарнир'),
|
||||||
|
('appetizer', 'ru', 'Закуска'),
|
||||||
|
('dessert', 'ru', 'Десерт'),
|
||||||
|
('breakfast', 'ru', 'Завтрак'),
|
||||||
|
('drink', 'ru', 'Напиток'),
|
||||||
|
('bread', 'ru', 'Выпечка'),
|
||||||
|
('sauce', 'ru', 'Соус'),
|
||||||
|
('snack', 'ru', 'Снэк');
|
||||||
|
|
||||||
-- +goose Down
|
-- +goose Down
|
||||||
|
DROP TABLE IF EXISTS recognition_jobs;
|
||||||
DROP TABLE IF EXISTS meal_diary;
|
DROP TABLE IF EXISTS meal_diary;
|
||||||
DROP TABLE IF EXISTS shopping_lists;
|
DROP TABLE IF EXISTS shopping_lists;
|
||||||
DROP TABLE IF EXISTS menu_items;
|
DROP TABLE IF EXISTS menu_items;
|
||||||
@@ -447,3 +645,4 @@ DROP TYPE IF EXISTS user_gender;
|
|||||||
DROP TYPE IF EXISTS user_plan;
|
DROP TYPE IF EXISTS user_plan;
|
||||||
DROP FUNCTION IF EXISTS uuid_generate_v7();
|
DROP FUNCTION IF EXISTS uuid_generate_v7();
|
||||||
DROP EXTENSION IF EXISTS pg_trgm;
|
DROP EXTENSION IF EXISTS pg_trgm;
|
||||||
|
DROP EXTENSION IF EXISTS pgcrypto;
|
||||||
|
|||||||
@@ -1,190 +0,0 @@
|
|||||||
-- +goose Up
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- languages
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO languages (code, native_name, english_name, sort_order) VALUES
|
|
||||||
('en', 'English', 'English', 1),
|
|
||||||
('ru', 'Русский', 'Russian', 2),
|
|
||||||
('es', 'Español', 'Spanish', 3),
|
|
||||||
('de', 'Deutsch', 'German', 4),
|
|
||||||
('fr', 'Français', 'French', 5),
|
|
||||||
('it', 'Italiano', 'Italian', 6),
|
|
||||||
('pt', 'Português', 'Portuguese', 7),
|
|
||||||
('zh', '中文', 'Chinese (Simplified)', 8),
|
|
||||||
('ja', '日本語', 'Japanese', 9),
|
|
||||||
('ko', '한국어', 'Korean', 10),
|
|
||||||
('ar', 'العربية', 'Arabic', 11),
|
|
||||||
('hi', 'हिन्दी', 'Hindi', 12);
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- units + unit_translations
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO units (code, sort_order) VALUES
|
|
||||||
('g', 1),
|
|
||||||
('kg', 2),
|
|
||||||
('ml', 3),
|
|
||||||
('l', 4),
|
|
||||||
('pcs', 5),
|
|
||||||
('pack', 6);
|
|
||||||
|
|
||||||
INSERT INTO unit_translations (unit_code, lang, name) VALUES
|
|
||||||
('g', 'ru', 'г'),
|
|
||||||
('kg', 'ru', 'кг'),
|
|
||||||
('ml', 'ru', 'мл'),
|
|
||||||
('l', 'ru', 'л'),
|
|
||||||
('pcs', 'ru', 'шт'),
|
|
||||||
('pack', 'ru', 'уп');
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- ingredient_categories + ingredient_category_translations
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO ingredient_categories (slug, sort_order) VALUES
|
|
||||||
('dairy', 1),
|
|
||||||
('meat', 2),
|
|
||||||
('produce', 3),
|
|
||||||
('bakery', 4),
|
|
||||||
('frozen', 5),
|
|
||||||
('beverages', 6),
|
|
||||||
('other', 7);
|
|
||||||
|
|
||||||
INSERT INTO ingredient_category_translations (category_slug, lang, name) VALUES
|
|
||||||
('dairy', 'ru', 'Молочные продукты'),
|
|
||||||
('meat', 'ru', 'Мясо и птица'),
|
|
||||||
('produce', 'ru', 'Овощи и фрукты'),
|
|
||||||
('bakery', 'ru', 'Выпечка и хлеб'),
|
|
||||||
('frozen', 'ru', 'Замороженные'),
|
|
||||||
('beverages', 'ru', 'Напитки'),
|
|
||||||
('other', 'ru', 'Прочее');
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- cuisines + cuisine_translations
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO cuisines (slug, name, sort_order) VALUES
|
|
||||||
('italian', 'Italian', 1),
|
|
||||||
('french', 'French', 2),
|
|
||||||
('russian', 'Russian', 3),
|
|
||||||
('chinese', 'Chinese', 4),
|
|
||||||
('japanese', 'Japanese', 5),
|
|
||||||
('korean', 'Korean', 6),
|
|
||||||
('mexican', 'Mexican', 7),
|
|
||||||
('mediterranean', 'Mediterranean', 8),
|
|
||||||
('indian', 'Indian', 9),
|
|
||||||
('thai', 'Thai', 10),
|
|
||||||
('american', 'American', 11),
|
|
||||||
('georgian', 'Georgian', 12),
|
|
||||||
('spanish', 'Spanish', 13),
|
|
||||||
('german', 'German', 14),
|
|
||||||
('middle_eastern', 'Middle Eastern', 15),
|
|
||||||
('turkish', 'Turkish', 16),
|
|
||||||
('greek', 'Greek', 17),
|
|
||||||
('vietnamese', 'Vietnamese', 18),
|
|
||||||
('other', 'Other', 19);
|
|
||||||
|
|
||||||
INSERT INTO cuisine_translations (cuisine_slug, lang, name) VALUES
|
|
||||||
('italian', 'ru', 'Итальянская'),
|
|
||||||
('french', 'ru', 'Французская'),
|
|
||||||
('russian', 'ru', 'Русская'),
|
|
||||||
('chinese', 'ru', 'Китайская'),
|
|
||||||
('japanese', 'ru', 'Японская'),
|
|
||||||
('korean', 'ru', 'Корейская'),
|
|
||||||
('mexican', 'ru', 'Мексиканская'),
|
|
||||||
('mediterranean', 'ru', 'Средиземноморская'),
|
|
||||||
('indian', 'ru', 'Индийская'),
|
|
||||||
('thai', 'ru', 'Тайская'),
|
|
||||||
('american', 'ru', 'Американская'),
|
|
||||||
('georgian', 'ru', 'Грузинская'),
|
|
||||||
('spanish', 'ru', 'Испанская'),
|
|
||||||
('german', 'ru', 'Немецкая'),
|
|
||||||
('middle_eastern', 'ru', 'Ближневосточная'),
|
|
||||||
('turkish', 'ru', 'Турецкая'),
|
|
||||||
('greek', 'ru', 'Греческая'),
|
|
||||||
('vietnamese', 'ru', 'Вьетнамская'),
|
|
||||||
('other', 'ru', 'Другая');
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- tags + tag_translations
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO tags (slug, name, sort_order) VALUES
|
|
||||||
('vegan', 'Vegan', 1),
|
|
||||||
('vegetarian', 'Vegetarian', 2),
|
|
||||||
('gluten_free', 'Gluten-Free', 3),
|
|
||||||
('dairy_free', 'Dairy-Free', 4),
|
|
||||||
('healthy', 'Healthy', 5),
|
|
||||||
('quick', 'Quick', 6),
|
|
||||||
('spicy', 'Spicy', 7),
|
|
||||||
('sweet', 'Sweet', 8),
|
|
||||||
('soup', 'Soup', 9),
|
|
||||||
('salad', 'Salad', 10),
|
|
||||||
('main_course', 'Main Course', 11),
|
|
||||||
('appetizer', 'Appetizer', 12),
|
|
||||||
('breakfast', 'Breakfast', 13),
|
|
||||||
('dessert', 'Dessert', 14),
|
|
||||||
('grilled', 'Grilled', 15),
|
|
||||||
('baked', 'Baked', 16),
|
|
||||||
('fried', 'Fried', 17),
|
|
||||||
('raw', 'Raw', 18),
|
|
||||||
('fermented', 'Fermented', 19);
|
|
||||||
|
|
||||||
INSERT INTO tag_translations (tag_slug, lang, name) VALUES
|
|
||||||
('vegan', 'ru', 'Веганское'),
|
|
||||||
('vegetarian', 'ru', 'Вегетарианское'),
|
|
||||||
('gluten_free', 'ru', 'Без глютена'),
|
|
||||||
('dairy_free', 'ru', 'Без молока'),
|
|
||||||
('healthy', 'ru', 'Здоровое'),
|
|
||||||
('quick', 'ru', 'Быстрое'),
|
|
||||||
('spicy', 'ru', 'Острое'),
|
|
||||||
('sweet', 'ru', 'Сладкое'),
|
|
||||||
('soup', 'ru', 'Суп'),
|
|
||||||
('salad', 'ru', 'Салат'),
|
|
||||||
('main_course', 'ru', 'Основное блюдо'),
|
|
||||||
('appetizer', 'ru', 'Закуска'),
|
|
||||||
('breakfast', 'ru', 'Завтрак'),
|
|
||||||
('dessert', 'ru', 'Десерт'),
|
|
||||||
('grilled', 'ru', 'Жареное на гриле'),
|
|
||||||
('baked', 'ru', 'Запечённое'),
|
|
||||||
('fried', 'ru', 'Жареное'),
|
|
||||||
('raw', 'ru', 'Сырое'),
|
|
||||||
('fermented', 'ru', 'Ферментированное');
|
|
||||||
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
-- dish_categories + dish_category_translations
|
|
||||||
-- ---------------------------------------------------------------------------
|
|
||||||
INSERT INTO dish_categories (slug, name, sort_order) VALUES
|
|
||||||
('soup', 'Soup', 1),
|
|
||||||
('salad', 'Salad', 2),
|
|
||||||
('main_course', 'Main Course', 3),
|
|
||||||
('side_dish', 'Side Dish', 4),
|
|
||||||
('appetizer', 'Appetizer', 5),
|
|
||||||
('dessert', 'Dessert', 6),
|
|
||||||
('breakfast', 'Breakfast', 7),
|
|
||||||
('drink', 'Drink', 8),
|
|
||||||
('bread', 'Bread', 9),
|
|
||||||
('sauce', 'Sauce', 10),
|
|
||||||
('snack', 'Snack', 11);
|
|
||||||
|
|
||||||
INSERT INTO dish_category_translations (category_slug, lang, name) VALUES
|
|
||||||
('soup', 'ru', 'Суп'),
|
|
||||||
('salad', 'ru', 'Салат'),
|
|
||||||
('main_course', 'ru', 'Основное блюдо'),
|
|
||||||
('side_dish', 'ru', 'Гарнир'),
|
|
||||||
('appetizer', 'ru', 'Закуска'),
|
|
||||||
('dessert', 'ru', 'Десерт'),
|
|
||||||
('breakfast', 'ru', 'Завтрак'),
|
|
||||||
('drink', 'ru', 'Напиток'),
|
|
||||||
('bread', 'ru', 'Выпечка'),
|
|
||||||
('sauce', 'ru', 'Соус'),
|
|
||||||
('snack', 'ru', 'Снэк');
|
|
||||||
|
|
||||||
-- +goose Down
|
|
||||||
DELETE FROM dish_category_translations;
|
|
||||||
DELETE FROM dish_categories;
|
|
||||||
DELETE FROM tag_translations;
|
|
||||||
DELETE FROM tags;
|
|
||||||
DELETE FROM cuisine_translations;
|
|
||||||
DELETE FROM cuisines;
|
|
||||||
DELETE FROM ingredient_category_translations;
|
|
||||||
DELETE FROM ingredient_categories;
|
|
||||||
DELETE FROM unit_translations;
|
|
||||||
DELETE FROM units;
|
|
||||||
DELETE FROM languages;
|
|
||||||
@@ -735,7 +735,7 @@ Future<void> _pickAndShowDishResult(
|
|||||||
WidgetRef ref,
|
WidgetRef ref,
|
||||||
String mealTypeId,
|
String mealTypeId,
|
||||||
) async {
|
) async {
|
||||||
// 1. Choose image source
|
// 1. Choose image source.
|
||||||
final source = await showModalBottomSheet<ImageSource>(
|
final source = await showModalBottomSheet<ImageSource>(
|
||||||
context: context,
|
context: context,
|
||||||
builder: (_) => SafeArea(
|
builder: (_) => SafeArea(
|
||||||
@@ -758,7 +758,7 @@ Future<void> _pickAndShowDishResult(
|
|||||||
);
|
);
|
||||||
if (source == null || !context.mounted) return;
|
if (source == null || !context.mounted) return;
|
||||||
|
|
||||||
// 2. Pick image
|
// 2. Pick image.
|
||||||
final image = await ImagePicker().pickImage(
|
final image = await ImagePicker().pickImage(
|
||||||
source: source,
|
source: source,
|
||||||
imageQuality: 70,
|
imageQuality: 70,
|
||||||
@@ -767,47 +767,66 @@ Future<void> _pickAndShowDishResult(
|
|||||||
);
|
);
|
||||||
if (image == null || !context.mounted) return;
|
if (image == null || !context.mounted) return;
|
||||||
|
|
||||||
// 3. Show loading
|
// 3. Show progress dialog.
|
||||||
// Capture root navigator now (before await) to avoid using the wrong one later.
|
// Capture root navigator before await to avoid GoRouter inner-navigator issues.
|
||||||
// showDialog defaults to useRootNavigator: true; Navigator.pop(context) would resolve
|
|
||||||
// to GoRouter's inner navigator instead, which only has /home and would crash.
|
|
||||||
final rootNavigator = Navigator.of(context, rootNavigator: true);
|
final rootNavigator = Navigator.of(context, rootNavigator: true);
|
||||||
|
final progressNotifier = _DishProgressNotifier();
|
||||||
showDialog(
|
showDialog(
|
||||||
context: context,
|
context: context,
|
||||||
barrierDismissible: false,
|
barrierDismissible: false,
|
||||||
builder: (_) => const AlertDialog(
|
builder: (_) => _DishProgressDialog(notifier: progressNotifier),
|
||||||
content: Column(
|
|
||||||
mainAxisSize: MainAxisSize.min,
|
|
||||||
children: [
|
|
||||||
CircularProgressIndicator(),
|
|
||||||
SizedBox(height: 16),
|
|
||||||
Text('Распознаём...'),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// 4. Call API
|
// 4. Submit image and listen to SSE stream.
|
||||||
|
final service = ref.read(recognitionServiceProvider);
|
||||||
try {
|
try {
|
||||||
final dish = await ref.read(recognitionServiceProvider).recognizeDish(image);
|
final jobCreated = await service.submitDishRecognition(image);
|
||||||
if (!context.mounted) return;
|
if (!context.mounted) return;
|
||||||
rootNavigator.pop(); // close loading
|
|
||||||
|
|
||||||
// 5. Show result as bottom sheet
|
await for (final event in service.streamJobEvents(jobCreated.jobId)) {
|
||||||
showModalBottomSheet(
|
if (!context.mounted) break;
|
||||||
context: context,
|
|
||||||
isScrollControlled: true,
|
switch (event) {
|
||||||
useSafeArea: true,
|
case DishJobQueued():
|
||||||
builder: (sheetContext) => DishResultSheet(
|
progressNotifier.update(
|
||||||
dish: dish,
|
message: 'Вы в очереди #${event.position + 1} · ~${event.estimatedSeconds} сек',
|
||||||
preselectedMealType: mealTypeId,
|
showUpgrade: event.position > 0,
|
||||||
onAdded: () => Navigator.pop(sheetContext),
|
);
|
||||||
),
|
case DishJobProcessing():
|
||||||
);
|
progressNotifier.update(message: 'Обрабатываем...');
|
||||||
|
case DishJobDone():
|
||||||
|
rootNavigator.pop(); // close dialog
|
||||||
|
if (!context.mounted) return;
|
||||||
|
showModalBottomSheet(
|
||||||
|
context: context,
|
||||||
|
isScrollControlled: true,
|
||||||
|
useSafeArea: true,
|
||||||
|
builder: (sheetContext) => DishResultSheet(
|
||||||
|
dish: event.result,
|
||||||
|
preselectedMealType: mealTypeId,
|
||||||
|
onAdded: () => Navigator.pop(sheetContext),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
case DishJobFailed():
|
||||||
|
rootNavigator.pop(); // close dialog
|
||||||
|
if (!context.mounted) return;
|
||||||
|
ScaffoldMessenger.of(context).showSnackBar(
|
||||||
|
SnackBar(
|
||||||
|
content: Text(event.error),
|
||||||
|
action: SnackBarAction(
|
||||||
|
label: 'Повторить',
|
||||||
|
onPressed: () => _pickAndShowDishResult(context, ref, mealTypeId),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (recognitionError) {
|
} catch (recognitionError) {
|
||||||
debugPrint('Dish recognition error: $recognitionError');
|
debugPrint('Dish recognition error: $recognitionError');
|
||||||
if (context.mounted) {
|
if (context.mounted) {
|
||||||
rootNavigator.pop(); // close loading
|
rootNavigator.pop(); // close dialog
|
||||||
ScaffoldMessenger.of(context).showSnackBar(
|
ScaffoldMessenger.of(context).showSnackBar(
|
||||||
const SnackBar(
|
const SnackBar(
|
||||||
content: Text('Не удалось распознать. Попробуйте ещё раз.'),
|
content: Text('Не удалось распознать. Попробуйте ещё раз.'),
|
||||||
@@ -817,6 +836,67 @@ Future<void> _pickAndShowDishResult(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Async recognition progress dialog
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class _DishProgressState {
|
||||||
|
final String message;
|
||||||
|
final bool showUpgrade;
|
||||||
|
|
||||||
|
const _DishProgressState({
|
||||||
|
required this.message,
|
||||||
|
this.showUpgrade = false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
class _DishProgressNotifier extends ChangeNotifier {
|
||||||
|
_DishProgressState _state = const _DishProgressState(message: 'Анализируем фото...');
|
||||||
|
|
||||||
|
_DishProgressState get state => _state;
|
||||||
|
|
||||||
|
void update({required String message, bool showUpgrade = false}) {
|
||||||
|
_state = _DishProgressState(message: message, showUpgrade: showUpgrade);
|
||||||
|
notifyListeners();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class _DishProgressDialog extends StatelessWidget {
|
||||||
|
final _DishProgressNotifier notifier;
|
||||||
|
|
||||||
|
const _DishProgressDialog({required this.notifier});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Widget build(BuildContext context) {
|
||||||
|
return ListenableBuilder(
|
||||||
|
listenable: notifier,
|
||||||
|
builder: (context, _) {
|
||||||
|
final state = notifier.state;
|
||||||
|
return AlertDialog(
|
||||||
|
content: Column(
|
||||||
|
mainAxisSize: MainAxisSize.min,
|
||||||
|
children: [
|
||||||
|
const CircularProgressIndicator(),
|
||||||
|
const SizedBox(height: 16),
|
||||||
|
Text(state.message, textAlign: TextAlign.center),
|
||||||
|
if (state.showUpgrade) ...[
|
||||||
|
const SizedBox(height: 12),
|
||||||
|
Text(
|
||||||
|
'Хотите без очереди? Upgrade →',
|
||||||
|
style: Theme.of(context).textTheme.bodySmall?.copyWith(
|
||||||
|
color: Theme.of(context).colorScheme.primary,
|
||||||
|
),
|
||||||
|
textAlign: TextAlign.center,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
],
|
||||||
|
),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class _MealCard extends ConsumerWidget {
|
class _MealCard extends ConsumerWidget {
|
||||||
final MealTypeOption mealTypeOption;
|
final MealTypeOption mealTypeOption;
|
||||||
final List<DiaryEntry> entries;
|
final List<DiaryEntry> entries;
|
||||||
|
|||||||
@@ -1,10 +1,15 @@
|
|||||||
|
import 'dart:async';
|
||||||
import 'dart:convert';
|
import 'dart:convert';
|
||||||
|
|
||||||
|
import 'package:dio/dio.dart';
|
||||||
import 'package:flutter_riverpod/flutter_riverpod.dart';
|
import 'package:flutter_riverpod/flutter_riverpod.dart';
|
||||||
import 'package:image_picker/image_picker.dart';
|
import 'package:image_picker/image_picker.dart';
|
||||||
|
|
||||||
import '../../core/api/api_client.dart';
|
import '../../core/api/api_client.dart';
|
||||||
import '../../core/auth/auth_provider.dart';
|
import '../../core/auth/auth_provider.dart';
|
||||||
|
import '../../core/auth/secure_storage.dart';
|
||||||
|
import '../../core/config/app_config.dart';
|
||||||
|
import '../../core/locale/language_provider.dart';
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Models
|
// Models
|
||||||
@@ -135,14 +140,68 @@ class DishResult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Async job models
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// The 202 response from POST /ai/recognize-dish.
|
||||||
|
class DishJobCreated {
|
||||||
|
final String jobId;
|
||||||
|
final int queuePosition;
|
||||||
|
final int estimatedSeconds;
|
||||||
|
|
||||||
|
const DishJobCreated({
|
||||||
|
required this.jobId,
|
||||||
|
required this.queuePosition,
|
||||||
|
required this.estimatedSeconds,
|
||||||
|
});
|
||||||
|
|
||||||
|
factory DishJobCreated.fromJson(Map<String, dynamic> json) {
|
||||||
|
return DishJobCreated(
|
||||||
|
jobId: json['job_id'] as String,
|
||||||
|
queuePosition: json['queue_position'] as int? ?? 0,
|
||||||
|
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Events emitted by the SSE stream for a dish recognition job.
|
||||||
|
sealed class DishJobEvent {}
|
||||||
|
|
||||||
|
class DishJobQueued extends DishJobEvent {
|
||||||
|
final int position;
|
||||||
|
final int estimatedSeconds;
|
||||||
|
DishJobQueued({required this.position, required this.estimatedSeconds});
|
||||||
|
}
|
||||||
|
|
||||||
|
class DishJobProcessing extends DishJobEvent {}
|
||||||
|
|
||||||
|
class DishJobDone extends DishJobEvent {
|
||||||
|
final DishResult result;
|
||||||
|
DishJobDone(this.result);
|
||||||
|
}
|
||||||
|
|
||||||
|
class DishJobFailed extends DishJobEvent {
|
||||||
|
final String error;
|
||||||
|
DishJobFailed(this.error);
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Service
|
// Service
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
class RecognitionService {
|
class RecognitionService {
|
||||||
const RecognitionService(this._client);
|
const RecognitionService(
|
||||||
|
this._client,
|
||||||
|
this._storage,
|
||||||
|
this._appConfig,
|
||||||
|
this._languageGetter,
|
||||||
|
);
|
||||||
|
|
||||||
final ApiClient _client;
|
final ApiClient _client;
|
||||||
|
final SecureStorageService _storage;
|
||||||
|
final AppConfig _appConfig;
|
||||||
|
final String Function() _languageGetter;
|
||||||
|
|
||||||
/// Recognizes food items from a receipt photo.
|
/// Recognizes food items from a receipt photo.
|
||||||
Future<ReceiptResult> recognizeReceipt(XFile image) async {
|
Future<ReceiptResult> recognizeReceipt(XFile image) async {
|
||||||
@@ -150,10 +209,10 @@ class RecognitionService {
|
|||||||
final data = await _client.post('/ai/recognize-receipt', data: payload);
|
final data = await _client.post('/ai/recognize-receipt', data: payload);
|
||||||
return ReceiptResult(
|
return ReceiptResult(
|
||||||
items: (data['items'] as List<dynamic>? ?? [])
|
items: (data['items'] as List<dynamic>? ?? [])
|
||||||
.map((e) => RecognizedItem.fromJson(e as Map<String, dynamic>))
|
.map((element) => RecognizedItem.fromJson(element as Map<String, dynamic>))
|
||||||
.toList(),
|
.toList(),
|
||||||
unrecognized: (data['unrecognized'] as List<dynamic>? ?? [])
|
unrecognized: (data['unrecognized'] as List<dynamic>? ?? [])
|
||||||
.map((e) => UnrecognizedItem.fromJson(e as Map<String, dynamic>))
|
.map((element) => UnrecognizedItem.fromJson(element as Map<String, dynamic>))
|
||||||
.toList(),
|
.toList(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -166,15 +225,102 @@ class RecognitionService {
|
|||||||
data: {'images': imageList},
|
data: {'images': imageList},
|
||||||
);
|
);
|
||||||
return (data['items'] as List<dynamic>? ?? [])
|
return (data['items'] as List<dynamic>? ?? [])
|
||||||
.map((e) => RecognizedItem.fromJson(e as Map<String, dynamic>))
|
.map((element) => RecognizedItem.fromJson(element as Map<String, dynamic>))
|
||||||
.toList();
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recognizes a dish and estimates its nutritional content.
|
/// Submits a dish image for async recognition.
|
||||||
Future<DishResult> recognizeDish(XFile image) async {
|
/// Returns a [DishJobCreated] with the job ID and queue position.
|
||||||
|
Future<DishJobCreated> submitDishRecognition(XFile image) async {
|
||||||
final payload = await _buildImagePayload(image);
|
final payload = await _buildImagePayload(image);
|
||||||
final data = await _client.post('/ai/recognize-dish', data: payload);
|
final data = await _client.post('/ai/recognize-dish', data: payload);
|
||||||
return DishResult.fromJson(data);
|
return DishJobCreated.fromJson(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Opens an SSE stream for job [jobId] and emits [DishJobEvent]s until the
|
||||||
|
/// job reaches a terminal state (done or failed) or the stream is cancelled.
|
||||||
|
Stream<DishJobEvent> streamJobEvents(String jobId) async* {
|
||||||
|
final token = await _storage.getAccessToken();
|
||||||
|
final language = _languageGetter();
|
||||||
|
final url = '${_appConfig.apiBaseUrl}/ai/jobs/$jobId/stream';
|
||||||
|
|
||||||
|
final dio = Dio(BaseOptions(
|
||||||
|
connectTimeout: const Duration(seconds: 30),
|
||||||
|
receiveTimeout: const Duration(minutes: 5),
|
||||||
|
));
|
||||||
|
|
||||||
|
final response = await dio.get<ResponseBody>(
|
||||||
|
url,
|
||||||
|
options: Options(
|
||||||
|
responseType: ResponseType.stream,
|
||||||
|
headers: {
|
||||||
|
'Authorization': token != null ? 'Bearer $token' : '',
|
||||||
|
'Accept': 'text/event-stream',
|
||||||
|
'Accept-Language': language,
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
final stream = response.data!.stream;
|
||||||
|
final buffer = StringBuffer();
|
||||||
|
String? currentEventName;
|
||||||
|
|
||||||
|
await for (final chunk in stream.map(utf8.decode)) {
|
||||||
|
buffer.write(chunk);
|
||||||
|
final text = buffer.toString();
|
||||||
|
|
||||||
|
// Process complete SSE messages (terminated by \n\n).
|
||||||
|
int doubleNewlineIndex;
|
||||||
|
var remaining = text;
|
||||||
|
while ((doubleNewlineIndex = remaining.indexOf('\n\n')) != -1) {
|
||||||
|
final message = remaining.substring(0, doubleNewlineIndex);
|
||||||
|
remaining = remaining.substring(doubleNewlineIndex + 2);
|
||||||
|
|
||||||
|
for (final line in message.split('\n')) {
|
||||||
|
if (line.startsWith('event:')) {
|
||||||
|
currentEventName = line.substring(6).trim();
|
||||||
|
} else if (line.startsWith('data:')) {
|
||||||
|
final dataPayload = line.substring(5).trim();
|
||||||
|
final event = _parseSseEvent(currentEventName, dataPayload);
|
||||||
|
if (event != null) {
|
||||||
|
yield event;
|
||||||
|
if (event is DishJobDone || event is DishJobFailed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentEventName = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer
|
||||||
|
..clear()
|
||||||
|
..write(remaining);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DishJobEvent? _parseSseEvent(String? eventName, String dataPayload) {
|
||||||
|
try {
|
||||||
|
final json = jsonDecode(dataPayload) as Map<String, dynamic>;
|
||||||
|
switch (eventName) {
|
||||||
|
case 'queued':
|
||||||
|
return DishJobQueued(
|
||||||
|
position: json['position'] as int? ?? 0,
|
||||||
|
estimatedSeconds: json['estimated_seconds'] as int? ?? 0,
|
||||||
|
);
|
||||||
|
case 'processing':
|
||||||
|
return DishJobProcessing();
|
||||||
|
case 'done':
|
||||||
|
return DishJobDone(DishResult.fromJson(json));
|
||||||
|
case 'failed':
|
||||||
|
return DishJobFailed(json['error'] as String? ?? 'Recognition failed');
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} catch (_) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<Map<String, String>> _buildImagePayload(XFile image) async {
|
Future<Map<String, String>> _buildImagePayload(XFile image) async {
|
||||||
@@ -188,5 +334,12 @@ class RecognitionService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final recognitionServiceProvider = Provider<RecognitionService>((ref) {
|
final recognitionServiceProvider = Provider<RecognitionService>((ref) {
|
||||||
return RecognitionService(ref.read(apiClientProvider));
|
final config = ref.read(appConfigProvider);
|
||||||
|
final storage = ref.read(secureStorageProvider);
|
||||||
|
return RecognitionService(
|
||||||
|
ref.read(apiClientProvider),
|
||||||
|
storage,
|
||||||
|
config,
|
||||||
|
() => ref.read(languageProvider),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -125,10 +125,10 @@ packages:
|
|||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
name: characters
|
name: characters
|
||||||
sha256: faf38497bda5ead2a8c7615f4f7939df04333478bf32e4173fcb06d428b5716b
|
sha256: f71061c654a3380576a52b451dd5532377954cf9dbd272a78fc8479606670803
|
||||||
url: "https://pub.dev"
|
url: "https://pub.dev"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "1.4.1"
|
version: "1.4.0"
|
||||||
checked_yaml:
|
checked_yaml:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
@@ -668,26 +668,26 @@ packages:
|
|||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
name: matcher
|
name: matcher
|
||||||
sha256: dc0b7dc7651697ea4ff3e69ef44b0407ea32c487a39fff6a4004fa585e901861
|
sha256: dc58c723c3c24bf8d3e2d3ad3f2f9d7bd9cf43ec6feaa64181775e60190153f2
|
||||||
url: "https://pub.dev"
|
url: "https://pub.dev"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "0.12.19"
|
version: "0.12.17"
|
||||||
material_color_utilities:
|
material_color_utilities:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
name: material_color_utilities
|
name: material_color_utilities
|
||||||
sha256: "9c337007e82b1889149c82ed242ed1cb24a66044e30979c44912381e9be4c48b"
|
sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec
|
||||||
url: "https://pub.dev"
|
url: "https://pub.dev"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "0.13.0"
|
version: "0.11.1"
|
||||||
meta:
|
meta:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
name: meta
|
name: meta
|
||||||
sha256: "23f08335362185a5ea2ad3a4e597f1375e78bce8a040df5c600c8d3552ef2394"
|
sha256: e3641ec5d63ebf0d9b41bd43201a66e3fc79a65db5f61fc181f04cd27aab950c
|
||||||
url: "https://pub.dev"
|
url: "https://pub.dev"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "1.17.0"
|
version: "1.16.0"
|
||||||
mime:
|
mime:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
@@ -993,10 +993,10 @@ packages:
|
|||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
name: test_api
|
name: test_api
|
||||||
sha256: "8161c84903fd860b26bfdefb7963b3f0b68fee7adea0f59ef805ecca346f0c7a"
|
sha256: "522f00f556e73044315fa4585ec3270f1808a4b186c936e612cab0b565ff1e00"
|
||||||
url: "https://pub.dev"
|
url: "https://pub.dev"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "0.7.10"
|
version: "0.7.6"
|
||||||
typed_data:
|
typed_data:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
|||||||
Reference in New Issue
Block a user