feat: async dish recognition (Kafka/Watermill/SSE) + remove Wire + consolidate migrations
Async recognition pipeline:
- POST /ai/recognize-dish → 202 {job_id, queue_position, estimated_seconds}
- GET /ai/jobs/{id}/stream — SSE stream: queued → processing → done/failed
- Kafka topics: ai.recognize.paid (3 partitions) + ai.recognize.free (1 partition)
- 5-worker WorkerPool with priority loop (paid consumers first)
- SSEBroker via PostgreSQL LISTEN/NOTIFY
- Kafka adapter migrated from franz-go to Watermill (watermill-kafka/v2)
- Docker Compose: added Kafka + Zookeeper + kafka-init service
- Flutter: recognition_service.dart uses SSE; home_screen shows live job status
Remove google/wire (archived):
- Deleted wire.go (wireinject spec) and wire_gen.go
- Added cmd/server/init.go — plain Go manual DI, same initApp() logic
- Removed github.com/google/wire from go.mod
Consolidate migrations:
- Merged 001_initial_schema + 002_seed_data + 003_recognition_jobs into single 001_initial_schema.sql
- Deleted 002_seed_data.sql and 003_recognition_jobs.sql
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
66
backend/internal/adapters/kafka/consumer.go
Normal file
66
backend/internal/adapters/kafka/consumer.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package kafka
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/ThreeDotsLabs/watermill"
|
||||
wmkafka "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka"
|
||||
)
|
||||
|
||||
// Consumer wraps a Watermill Kafka subscriber for consuming a single topic within a consumer group.
|
||||
type Consumer struct {
|
||||
subscriber *wmkafka.Subscriber
|
||||
topic string
|
||||
}
|
||||
|
||||
// NewConsumer creates a Consumer subscribed to the given topic within a consumer group.
|
||||
func NewConsumer(brokers []string, groupID, topic string) (*Consumer, error) {
|
||||
subscriber, createError := wmkafka.NewSubscriber(
|
||||
wmkafka.SubscriberConfig{
|
||||
Brokers: brokers,
|
||||
ConsumerGroup: groupID,
|
||||
Unmarshaler: wmkafka.DefaultMarshaler{},
|
||||
OverwriteSaramaConfig: wmkafka.DefaultSaramaSubscriberConfig(),
|
||||
},
|
||||
watermill.NopLogger{},
|
||||
)
|
||||
if createError != nil {
|
||||
return nil, createError
|
||||
}
|
||||
return &Consumer{subscriber: subscriber, topic: topic}, nil
|
||||
}
|
||||
|
||||
// Run subscribes to the Kafka topic and writes job IDs to the out channel until runContext is cancelled.
|
||||
// Call this in a dedicated goroutine — it blocks until the context is done.
|
||||
// Each message is Ack'd after its job ID is successfully forwarded to the channel,
|
||||
// or Nack'd when the context is cancelled before forwarding completes.
|
||||
func (consumer *Consumer) Run(runContext context.Context, out chan<- string) {
|
||||
messageChannel, subscribeError := consumer.subscriber.Subscribe(runContext, consumer.topic)
|
||||
if subscribeError != nil {
|
||||
slog.Error("kafka consumer subscribe", "topic", consumer.topic, "err", subscribeError)
|
||||
return
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case msg, ok := <-messageChannel:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case out <- string(msg.Payload):
|
||||
msg.Ack()
|
||||
case <-runContext.Done():
|
||||
msg.Nack()
|
||||
return
|
||||
}
|
||||
case <-runContext.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Close shuts down the underlying Kafka subscriber.
|
||||
func (consumer *Consumer) Close() {
|
||||
_ = consumer.subscriber.Close()
|
||||
}
|
||||
42
backend/internal/adapters/kafka/producer.go
Normal file
42
backend/internal/adapters/kafka/producer.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package kafka
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/ThreeDotsLabs/watermill"
|
||||
"github.com/ThreeDotsLabs/watermill/message"
|
||||
wmkafka "github.com/ThreeDotsLabs/watermill-kafka/v2/pkg/kafka"
|
||||
)
|
||||
|
||||
// Producer wraps a Watermill Kafka publisher for publishing messages to Kafka topics.
|
||||
type Producer struct {
|
||||
publisher message.Publisher
|
||||
}
|
||||
|
||||
// NewProducer creates a Producer connected to the given brokers.
|
||||
func NewProducer(brokers []string) (*Producer, error) {
|
||||
publisher, createError := wmkafka.NewPublisher(
|
||||
wmkafka.PublisherConfig{
|
||||
Brokers: brokers,
|
||||
Marshaler: wmkafka.DefaultMarshaler{},
|
||||
},
|
||||
watermill.NopLogger{},
|
||||
)
|
||||
if createError != nil {
|
||||
return nil, createError
|
||||
}
|
||||
return &Producer{publisher: publisher}, nil
|
||||
}
|
||||
|
||||
// Publish writes a single message to the named topic.
|
||||
// The context parameter is accepted for interface compatibility but is not forwarded
|
||||
// to the Watermill publisher, which does not accept a context.
|
||||
func (producer *Producer) Publish(_ context.Context, topic, jobID string) error {
|
||||
msg := message.NewMessage(watermill.NewUUID(), []byte(jobID))
|
||||
return producer.publisher.Publish(topic, msg)
|
||||
}
|
||||
|
||||
// Close shuts down the underlying Kafka publisher.
|
||||
func (producer *Producer) Close() {
|
||||
_ = producer.publisher.Close()
|
||||
}
|
||||
Reference in New Issue
Block a user