Files
food-ai/backend/internal/adapters/kafka/consumer.go
dbastrikin 39193ec13c feat: async dish recognition (Kafka/Watermill/SSE) + remove Wire + consolidate migrations
Async recognition pipeline:
- POST /ai/recognize-dish → 202 {job_id, queue_position, estimated_seconds}
- GET /ai/jobs/{id}/stream — SSE stream: queued → processing → done/failed
- Kafka topics: ai.recognize.paid (3 partitions) + ai.recognize.free (1 partition)
- 5-worker WorkerPool with priority loop (paid consumers first)
- SSEBroker via PostgreSQL LISTEN/NOTIFY
- Kafka adapter migrated from franz-go to Watermill (watermill-kafka/v2)
- Docker Compose: added Kafka + Zookeeper + kafka-init service
- Flutter: recognition_service.dart uses SSE; home_screen shows live job status

Remove google/wire (archived):
- Deleted wire.go (wireinject spec) and wire_gen.go
- Added cmd/server/init.go — plain Go manual DI, same initApp() logic
- Removed github.com/google/wire from go.mod

Consolidate migrations:
- Merged 001_initial_schema + 002_seed_data + 003_recognition_jobs into single 001_initial_schema.sql
- Deleted 002_seed_data.sql and 003_recognition_jobs.sql

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-18 16:32:06 +02:00

67 lines
1.9 KiB
Go

package kafka
import (
"context"
"log/slog"
"github.com/ThreeDotsLabs/watermill"
wmkafka "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka"
)
// Consumer wraps a Watermill Kafka subscriber for consuming a single topic within a consumer group.
type Consumer struct {
subscriber *wmkafka.Subscriber
topic string
}
// NewConsumer creates a Consumer subscribed to the given topic within a consumer group.
func NewConsumer(brokers []string, groupID, topic string) (*Consumer, error) {
subscriber, createError := wmkafka.NewSubscriber(
wmkafka.SubscriberConfig{
Brokers: brokers,
ConsumerGroup: groupID,
Unmarshaler: wmkafka.DefaultMarshaler{},
OverwriteSaramaConfig: wmkafka.DefaultSaramaSubscriberConfig(),
},
watermill.NopLogger{},
)
if createError != nil {
return nil, createError
}
return &Consumer{subscriber: subscriber, topic: topic}, nil
}
// Run subscribes to the Kafka topic and writes job IDs to the out channel until runContext is cancelled.
// Call this in a dedicated goroutine — it blocks until the context is done.
// Each message is Ack'd after its job ID is successfully forwarded to the channel,
// or Nack'd when the context is cancelled before forwarding completes.
func (consumer *Consumer) Run(runContext context.Context, out chan<- string) {
messageChannel, subscribeError := consumer.subscriber.Subscribe(runContext, consumer.topic)
if subscribeError != nil {
slog.Error("kafka consumer subscribe", "topic", consumer.topic, "err", subscribeError)
return
}
for {
select {
case msg, ok := <-messageChannel:
if !ok {
return
}
select {
case out <- string(msg.Payload):
msg.Ack()
case <-runContext.Done():
msg.Nack()
return
}
case <-runContext.Done():
return
}
}
}
// Close shuts down the underlying Kafka subscriber.
func (consumer *Consumer) Close() {
_ = consumer.subscriber.Close()
}