diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..ed1abc1
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+# macOS
+.DS_Store
+
diff --git a/module_1/go/.gitignore b/module_1/go/.gitignore
new file mode 100644
index 0000000..6915aaf
--- /dev/null
+++ b/module_1/go/.gitignore
@@ -0,0 +1,26 @@
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
+go.work.sum
+
+# env file
+.env
+.idea
diff --git a/module_1/go/README.md b/module_1/go/README.md
new file mode 100644
index 0000000..5a7f98a
--- /dev/null
+++ b/module_1/go/README.md
@@ -0,0 +1,77 @@
+## Проект Kafka Avro на Go
+
+Этот проект демонстрирует работу с Kafka и Avro на языке Go.
+
+### Описание
+
+Код проекта можно скомпилировать в два независимо запускаемых приложений:
+
+* Продьюсер (Producer): Записывает данные в формате Avro в топик Kafka.
+* Консьюмер (Consumer): Считывает данные из топика Kafka и выводит их в консоль.
+
+### Установка
+
+1. Установите Go с официального сайта: https://go.dev/doc/install
+
+2. Установите требуемые зависимости:
+```bash
+go mod tidy
+```
+
+3. Настройте конфигурационные файлы (можно оставить без изменений):
+ * `config/имя.yaml`: файл конфигурации для подключения к Kafka и установки параметров.
+
+### Запуск
+1. Запустите инфраструктуру
+```bash
+cd ./infra && docker compose up -d
+```
+
+2. Создайте топик и проверьте его конфигурацию:
+
+```bash
+ docker exec -it kafka-1 ../../usr/bin/kafka-topics --create --topic users --bootstrap-server kafka-1:9092 --partitions 3 --replication-factor 2
+```
+Параметры `localhost:9092` - адрес Kafka-брокера, `users` - имя топика, `3` - количество партиций, `2` - количество реплик.
+
+```bash
+docker exec -it kafka-1 ../../usr/bin/kafka-topics --describe --topic users --bootstrap-server localhost:9092
+```
+Ожидаемый результат:
+```
+Topic: users TopicId: YEE8ywCDR2mWWht0TNlZsw PartitionCount: 3 ReplicationFactor: 2 Configs:
+ Topic: users Partition: 0 Leader: 2 Replicas: 2,1 Isr: 2,1
+ Topic: users Partition: 1 Leader: 1 Replicas: 1,2 Isr: 1,2
+ Topic: users Partition: 2 Leader: 2 Replicas: 2,1 Isr: 2,1
+```
+
+3. Запустите Producer:
+```bash
+ go run ./avro-example/cmd/main.go -c ./avro-example/config/producer.yaml
+```
+ а. Программа у вас запросит требуемое действие (Command):
+ введите `send`, чтобы отправить сообщение в брокер
+ введите `exit`, чтобы выйти
+
+ б. Программа у вас запросит требуемые данные для отправки - имя (Enter name:),
+ любимое число (Enter favorite number:), любимый цвет (Enter favorite color:). Пожалуйста,
+ заполняйте правильно вводимые значения, так как основная задача показать возможность передачи
+ данных в формате Avro, используя Kafka, валидация данных не сделана.
+
+ Пример:
+ ```
+ Command:send
+ Enter name: alex
+ Enter favorite number: 55
+ Enter favorite color:black
+ ```
+
+
+4. Запустите consumer-push:
+```bash
+ go run ./avro-example/cmd/main.go -c ./avro-example/config/consumer1.yaml
+```
+5. Запустите consumer-pull:
+```bash
+ go run ./avro-example/cmd/main.go -c ./avro-example/config/consumer2.yaml
+```
diff --git a/module_1/go/avro-example/.golangci.yml b/module_1/go/avro-example/.golangci.yml
new file mode 100644
index 0000000..c0cff9b
--- /dev/null
+++ b/module_1/go/avro-example/.golangci.yml
@@ -0,0 +1,168 @@
+run:
+ tests: true
+
+issues:
+ max-same-issues: 0
+
+linters-settings:
+ exhaustive:
+ default-signifies-exhaustive: true
+
+ depguard:
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/pkg/errors"
+ desc: Should be replaced by standard lib errors package
+
+ godox:
+ keywords:
+ - FIXME
+ - TODO
+
+ govet:
+ enable-all: true
+ disable:
+ - fieldalignment
+ - shadow
+
+ lll:
+ line-length: 120
+
+ nolintlint:
+ allow-no-explanation: [ "lll" ]
+ require-explanation: true
+
+ revive:
+ severity: error
+ rules:
+ - name: argument-limit
+ - name: atomic
+ - name: bare-return
+ - name: blank-imports
+ - name: bool-literal-in-expr
+ - name: comment-spacings
+ - name: confusing-results
+ - name: context-as-argument
+ arguments:
+ - allowTypesBefore: "*testing.T" # https://go-review.googlesource.com/c/lint/+/145237
+ - name: context-keys-type
+ - name: datarace
+ - name: deep-exit
+ - name: defer
+ - name: dot-imports
+ - name: duplicated-imports
+ - name: early-return
+ - name: empty-lines
+ - name: empty-block
+ - name: error-naming
+ - name: error-return
+ - name: error-strings
+ - name: errorf
+ - name: exported
+ - name: identical-branches
+ - name: if-return
+ - name: increment-decrement
+ - name: indent-error-flow
+ - name: package-comments
+ - name: range
+ - name: range-val-address
+ - name: range-val-in-closure
+ - name: receiver-naming
+ - name: redefines-builtin-id
+ - name: string-of-int
+ - name: superfluous-else
+ - name: time-equal
+ - name: time-naming
+ - name: unexported-return
+ - name: unhandled-error
+ arguments: [ "fmt.Fprint", "fmt.Printf", "fmt.Println" ]
+ - name: unreachable-code
+ - name: use-any
+ - name: unused-parameter
+ - name: var-declaration
+ - name: var-naming
+ - name: waitgroup-by-value
+
+ tagliatelle:
+ # Check the struck tag name case.
+ case:
+ # Use the struct field name to check the name of the struct tag.
+ # Default: false
+ use-field-name: false
+ rules:
+ # Any struct tag type can be used.
+ # Support string case: `camel`, `pascal`, `kebab`, `snake`, `upperSnake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`, `header`.
+ json: camel
+ toml: snake
+
+linters:
+ disable-all: true
+ enable:
+ - asasalint
+ - asciicheck
+ - bidichk
+ - bodyclose
+ - contextcheck
+ - depguard
+ - durationcheck
+ - errcheck
+ - errchkjson
+ - errname
+ - exhaustive
+ - exportloopref
+ - ginkgolinter
+ - goconst
+ - gocritic
+ - gocyclo
+ - godot
+ - godox
+ - gofmt
+ - gofumpt
+ - goheader
+ - goimports
+ - gomoddirectives
+ - gomodguard
+ - goprintffuncname
+ - gosec
+ - gosimple
+ - govet
+ - importas
+ - ineffassign
+ - inamedparam
+ - lll
+ - makezero
+ - misspell
+ - musttag
+ - nakedret
+ - nestif
+ - nilerr
+ - nilnil
+ - noctx
+ - nolintlint
+ - nosprintfhostport
+ - perfsprint
+ - prealloc
+ - predeclared
+ - promlinter
+ - reassign
+ - revive
+ - rowserrcheck
+ - sloglint
+ - staticcheck
+ - stylecheck
+ - sqlclosecheck
+ - tagliatelle
+ - tenv
+ - testableexamples
+ - testifylint
+ - testpackage
+ - thelper
+ - tparallel
+ - typecheck
+ - unconvert
+ - unparam
+ - unused
+ - usestdlibvars
+ - wastedassign
+ - whitespace
\ No newline at end of file
diff --git a/module_1/go/avro-example/app/app.go b/module_1/go/avro-example/app/app.go
new file mode 100644
index 0000000..a596628
--- /dev/null
+++ b/module_1/go/avro-example/app/app.go
@@ -0,0 +1,37 @@
+package app
+
+import (
+ "context"
+ "errors"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/app/consumer"
+ "github.com/apache_kafka_course/module1/go/avro-example/app/producer"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/logger"
+)
+
+var ErrWrongType = errors.New("wrong type")
+
+type StartGetConfigStopper interface {
+ Start(ctx context.Context)
+ GetConfig() string
+ Stop()
+}
+
+func Fabric() (StartGetConfigStopper, error) {
+ cfg, err := config.New()
+ if err != nil {
+ return nil, err
+ }
+ log := logger.New(cfg.Env)
+ switch cfg.Kafka.Type {
+ case "producer":
+ return producer.New(cfg, log)
+ case "consumer-push":
+ return consumer.New(cfg, log)
+ case "consumer-pull":
+ return consumer.New(cfg, log)
+ default:
+ return nil, ErrWrongType
+ }
+}
diff --git a/module_1/go/avro-example/app/consumer/app.go b/module_1/go/avro-example/app/consumer/app.go
new file mode 100644
index 0000000..d639d1f
--- /dev/null
+++ b/module_1/go/avro-example/app/consumer/app.go
@@ -0,0 +1,73 @@
+package consumer
+
+import (
+ "context"
+ "log/slog"
+ "time"
+
+ consumerpull "github.com/apache_kafka_course/module1/go/avro-example/internal/broker/consumer/pull"
+ consumerpush "github.com/apache_kafka_course/module1/go/avro-example/internal/broker/consumer/push"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+)
+
+type consumeCloser interface {
+ Consume() error
+ Close() error
+}
+
+type App struct {
+ ServerConsumer consumeCloser
+ log *slog.Logger
+ Cfg *config.Config
+}
+
+func New(cfg *config.Config, log *slog.Logger) (*App, error) {
+ if cfg.Kafka.Type == "consumer-push" {
+ cons, err := consumerpush.New(cfg, log)
+ if err != nil {
+ return nil, err
+ }
+ return &App{
+ ServerConsumer: cons,
+ log: log,
+ Cfg: cfg,
+ }, nil
+ }
+ cons, err := consumerpull.New(cfg, log)
+ if err != nil {
+ return nil, err
+ }
+ return &App{
+ ServerConsumer: cons,
+ log: log,
+ Cfg: cfg,
+ }, nil
+}
+
+func (a *App) Start(ctx context.Context) {
+ a.log.Info("producer starts")
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ err := a.ServerConsumer.Consume()
+ if err != nil {
+ a.log.Error(err.Error())
+ }
+ time.Sleep(time.Second)
+ }
+ }
+}
+
+func (a *App) Stop() {
+ a.log.Info("close kafka client")
+ err := a.ServerConsumer.Close()
+ if err != nil {
+ a.log.Error(err.Error())
+ }
+}
+
+func (a *App) GetConfig() string {
+ return a.Cfg.String()
+}
diff --git a/module_1/go/avro-example/app/producer/app.go b/module_1/go/avro-example/app/producer/app.go
new file mode 100644
index 0000000..a61d84e
--- /dev/null
+++ b/module_1/go/avro-example/app/producer/app.go
@@ -0,0 +1,119 @@
+package producer
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log"
+ "log/slog"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/broker/producer"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/dto"
+)
+
+type sendCloser interface {
+ Send(msg dto.User, topic string, key string) error
+ Close()
+}
+
+type App struct {
+ ServerProducer sendCloser
+ log *slog.Logger
+ Cfg *config.Config
+}
+
+func New(cfg *config.Config, log *slog.Logger) (*App, error) {
+ prod, err := producer.New(cfg, log)
+ if err != nil {
+ return nil, err
+ }
+
+ return &App{
+ ServerProducer: prod,
+ log: log,
+ Cfg: cfg,
+ }, nil
+}
+
+func (a *App) Start(ctx context.Context) {
+ a.log.Info("producer starts")
+ for {
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ value, err := a.readInput()
+ if err != nil {
+ a.log.Error(err.Error())
+ }
+ err = a.ServerProducer.Send(*value, a.Cfg.Kafka.Topic, "53")
+ if err != nil {
+ a.log.Error(err.Error())
+ }
+ }
+ }
+}
+
+func (a *App) Stop() {
+ a.log.Info("close kafka client")
+ a.ServerProducer.Close()
+}
+
+func (a *App) GetConfig() string {
+ return a.Cfg.String()
+}
+
+func (a *App) readInput() (*dto.User, error) {
+ // it would be better to have data validation here
+ var (
+ name string
+ favoriteNumber int64
+ favoriteColor string
+ command string
+ )
+ for {
+ log.Print("Command: ")
+ _, err := fmt.Scanln(&command)
+ if err != nil {
+ a.log.Error(err.Error())
+ return nil, errors.New("reading command failed")
+ }
+
+ if command == "exit" {
+ return nil, errors.New("terminate")
+ }
+ if command == "send" {
+ break
+ }
+ a.log.Info("Введите команду exit для выхода или send для отправки сообщений")
+ }
+
+ log.Print("Enter name: ")
+ _, err := fmt.Scanln(&name)
+ if err != nil {
+ a.log.Error(err.Error())
+ return nil, errors.New("reading name failed")
+ }
+
+ log.Print("Enter favorite number: ")
+ _, err = fmt.Scanln(&favoriteNumber)
+ if err != nil {
+ a.log.Error(err.Error())
+ return nil, errors.New("reading favorite number failed")
+ }
+
+ log.Print("Enter favorite color: ")
+ _, err = fmt.Scanln(&favoriteColor)
+ if err != nil {
+ return nil, errors.New("reading favorite color failed")
+ }
+
+ value := &dto.User{
+ Name: name,
+ Favorite_number: favoriteNumber,
+ Favorite_color: favoriteColor,
+ }
+
+ return value, nil
+}
diff --git a/module_1/go/avro-example/cmd/main.go b/module_1/go/avro-example/cmd/main.go
new file mode 100644
index 0000000..c4f7f34
--- /dev/null
+++ b/module_1/go/avro-example/cmd/main.go
@@ -0,0 +1,21 @@
+package main
+
+import (
+ "context"
+ "log"
+ "os/signal"
+ "syscall"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/app"
+)
+
+func main() {
+ application, err := app.Fabric()
+ if err != nil {
+ log.Fatal(err)
+ }
+ log.Printf("application starts with cfg -> %s \n", application.GetConfig())
+ ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT)
+ defer cancel()
+ application.Start(ctx)
+}
diff --git a/module_1/go/avro-example/config/consumer1.yaml b/module_1/go/avro-example/config/consumer1.yaml
new file mode 100644
index 0000000..eaf76fb
--- /dev/null
+++ b/module_1/go/avro-example/config/consumer1.yaml
@@ -0,0 +1,8 @@
+env: "local"
+kafka:
+ kafkaUrl: "localhost:29092,localhost:39092,localhost:49092"
+ schemaRegistryUrl: "http://localhost:8081"
+ topic: "users"
+ type: "consumer-push"
+ groupId: "group_1"
+ timeout: 100
diff --git a/module_1/go/avro-example/config/consumer2.yaml b/module_1/go/avro-example/config/consumer2.yaml
new file mode 100644
index 0000000..c1ff991
--- /dev/null
+++ b/module_1/go/avro-example/config/consumer2.yaml
@@ -0,0 +1,7 @@
+env: "local"
+kafka:
+ kafkaUrl: "localhost:29092,localhost:39092,localhost:49092"
+ schemaRegistryUrl: "http://localhost:8081"
+ topic: "users"
+ type: "consumer-pull"
+ groupId: "group_2"
\ No newline at end of file
diff --git a/module_1/go/avro-example/config/producer.yaml b/module_1/go/avro-example/config/producer.yaml
new file mode 100644
index 0000000..8af6574
--- /dev/null
+++ b/module_1/go/avro-example/config/producer.yaml
@@ -0,0 +1,6 @@
+env: "local"
+kafka:
+ kafkaUrl: "localhost:29092,localhost:39092,localhost:49092"
+ schemaRegistryUrl: "http://localhost:8081"
+ topic: "users"
+ type: "producer"
\ No newline at end of file
diff --git a/module_1/go/avro-example/internal/broker/consumer/pull/consumer.go b/module_1/go/avro-example/internal/broker/consumer/pull/consumer.go
new file mode 100644
index 0000000..a9647a9
--- /dev/null
+++ b/module_1/go/avro-example/internal/broker/consumer/pull/consumer.go
@@ -0,0 +1,129 @@
+package pull
+
+import (
+ "context"
+ "log/slog"
+ "time"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/dto"
+
+ "github.com/confluentinc/confluent-kafka-go/v2/kafka"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro"
+)
+
+type Message struct {
+ UUID string
+ Balance int
+ Type string
+ Comment string
+}
+
+type MessageReceived struct {
+ Msg Message
+ Ctx context.Context
+ Err error
+}
+
+type Broker struct {
+ consumer *kafka.Consumer
+ deserializer serde.Deserializer
+ log *slog.Logger
+ cfg *config.Config
+}
+
+// New returns kafka consumer with schema registry.
+func New(cfg *config.Config, log *slog.Logger) (*Broker, error) {
+ confluentConsumer, err := kafka.NewConsumer(&kafka.ConfigMap{
+ "bootstrap.servers": cfg.Kafka.KafkaURL,
+ "group.id": cfg.Kafka.GroupID,
+ "enable.auto.commit": false,
+ "auto.offset.reset": "earliest",
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ client, err := schemaregistry.NewClient(schemaregistry.NewConfig(cfg.Kafka.SchemaRegistryURL))
+ if err != nil {
+ return nil, err
+ }
+
+ deser, err := avro.NewSpecificDeserializer(client, serde.ValueSerde, avro.NewDeserializerConfig())
+ if err != nil {
+ return nil, err
+ }
+
+ err = confluentConsumer.Subscribe("users", nil)
+ if err != nil {
+ return nil, err
+ }
+ broker := &Broker{
+ consumer: confluentConsumer,
+ deserializer: deser,
+ log: log,
+ cfg: cfg,
+ }
+ return broker, nil
+}
+
+// Close closes deserialization agent and kafka consumer
+// WARNING: Consume method need to be finished before.
+// https://github.com/confluentinc/confluent-kafka-go/issues/136#issuecomment-586166364
+func (b *Broker) Close() error {
+ b.deserializer.Close()
+ // https://docs.confluent.io/platform/current/clients/confluent-kafka-go/index.html#hdr-High_level_Consumer
+ err := b.consumer.Close()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func (b *Broker) Consume() error {
+ ev := b.consumer.Poll(b.cfg.Kafka.Timeout)
+ if ev == nil {
+ return nil
+ }
+
+ switch e := ev.(type) {
+ case *kafka.Message:
+ var msg dto.User
+
+ err := b.deserializer.DeserializeInto(*e.TopicPartition.Topic, e.Value, &msg)
+ if err != nil {
+ b.log.Error(
+ "Failed to deserialize payload",
+ "err", err.Error(),
+ )
+ return err
+ } else {
+ b.log.Info(
+ "Message received",
+ "topic", e.TopicPartition, "message", msg,
+ )
+ // эмулируем обработку данных
+ time.Sleep(5 * time.Second)
+ b.log.Info(
+ "Message processed",
+ "topic", e.TopicPartition, "message", msg,
+ )
+ _, err = b.consumer.Commit()
+ if err != nil {
+ b.log.Error("Failed to commit message", "err", err.Error())
+ return err
+ }
+ }
+
+ case kafka.Error:
+ // Errors should generally be considered
+ // informational, the client will try to
+ // automatically recover.
+ b.log.Error("kafka.Error", "code", e.Code(), "err", e.Error())
+ default:
+ b.log.Warn("Event:", "msg", e.String())
+ }
+ return nil
+}
diff --git a/module_1/go/avro-example/internal/broker/consumer/push/consumer.go b/module_1/go/avro-example/internal/broker/consumer/push/consumer.go
new file mode 100644
index 0000000..3c401cb
--- /dev/null
+++ b/module_1/go/avro-example/internal/broker/consumer/push/consumer.go
@@ -0,0 +1,136 @@
+package pull
+
+import (
+ "context"
+ "log/slog"
+ "time"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/dto"
+
+ "github.com/confluentinc/confluent-kafka-go/v2/kafka"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro"
+)
+
+type Message struct {
+ UUID string
+ Balance int
+ Type string
+ Comment string
+}
+
+type MessageReceived struct {
+ Msg Message
+ Ctx context.Context
+ Err error
+}
+
+type Broker struct {
+ consumer *kafka.Consumer
+ deserializer serde.Deserializer
+ log *slog.Logger
+ cfg *config.Config
+ dataChan chan *dto.User
+}
+
+// New returns kafka consumer with schema registry.
+func New(cfg *config.Config, log *slog.Logger) (*Broker, error) {
+ confluentConsumer, err := kafka.NewConsumer(&kafka.ConfigMap{
+ "bootstrap.servers": cfg.Kafka.KafkaURL,
+ "group.id": cfg.Kafka.GroupID,
+ "auto.offset.reset": "earliest",
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ client, err := schemaregistry.NewClient(schemaregistry.NewConfig(cfg.Kafka.SchemaRegistryURL))
+ if err != nil {
+ return nil, err
+ }
+
+ deser, err := avro.NewSpecificDeserializer(client, serde.ValueSerde, avro.NewDeserializerConfig())
+ if err != nil {
+ return nil, err
+ }
+
+ err = confluentConsumer.Subscribe("users", nil)
+ if err != nil {
+ return nil, err
+ }
+ dataChan := make(chan *dto.User, 10)
+
+ go func() {
+ for {
+ data := <-dataChan
+ // обработка данных
+ log.Info(
+ "Message processed",
+ "message", data,
+ )
+ // Эмулируем задержку на обработку данных
+ time.Sleep(10 * time.Second)
+ }
+ }()
+
+ broker := &Broker{
+ consumer: confluentConsumer,
+ deserializer: deser,
+ log: log,
+ cfg: cfg,
+ dataChan: dataChan,
+ }
+ return broker, nil
+}
+
+// Close closes deserialization agent and kafka consumer
+// WARNING: Consume method need to be finished before.
+// https://github.com/confluentinc/confluent-kafka-go/issues/136#issuecomment-586166364
+func (b *Broker) Close() error {
+ b.deserializer.Close()
+ // https://docs.confluent.io/platform/current/clients/confluent-kafka-go/index.html#hdr-High_level_Consumer
+ err := b.consumer.Close()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func (b *Broker) Consume() error {
+ ev := b.consumer.Poll(b.cfg.Kafka.Timeout)
+ if ev == nil {
+ return nil
+ }
+
+ switch e := ev.(type) {
+ case *kafka.Message:
+ var msg dto.User
+
+ err := b.deserializer.DeserializeInto(*e.TopicPartition.Topic, e.Value, &msg)
+ if err != nil {
+ b.log.Error(
+ "Failed to deserialize payload",
+ "err", err.Error(),
+ )
+ return err
+ } else {
+ b.log.Info(
+ "Message received",
+ "topic", e.TopicPartition, "message", msg,
+ )
+ // отправка данных в канал
+ b.dataChan <- &msg
+ }
+
+ case kafka.Error:
+ // Errors should generally be considered
+ // informational, the client will try to
+ // automatically recover.
+ b.log.Error("kafka.Error", "code", e.Code(), "err", e.Error())
+ default:
+ b.log.Warn("Event:", "msg", e.String())
+ }
+ return nil
+}
diff --git a/module_1/go/avro-example/internal/broker/producer/producer.go b/module_1/go/avro-example/internal/broker/producer/producer.go
new file mode 100644
index 0000000..02f4f02
--- /dev/null
+++ b/module_1/go/avro-example/internal/broker/producer/producer.go
@@ -0,0 +1,114 @@
+package producer
+
+import (
+ "log/slog"
+
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/config"
+ "github.com/apache_kafka_course/module1/go/avro-example/internal/dto"
+
+ "github.com/confluentinc/confluent-kafka-go/v2/kafka"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde"
+ "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro"
+)
+
+type Broker struct {
+ producer *kafka.Producer
+ serializer serde.Serializer
+ log *slog.Logger
+}
+
+type Response struct {
+ UserUUID string
+ Err error
+}
+
+var FlushBrokerTimeMs = 100
+
+// New returns kafka producer with schema registry.
+func New(cfg *config.Config, log *slog.Logger) (*Broker, error) {
+ p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": cfg.Kafka.KafkaURL})
+ if err != nil {
+ return nil, err
+ }
+
+ client, err := schemaregistry.NewClient(schemaregistry.NewConfig(cfg.Kafka.SchemaRegistryURL))
+ if err != nil {
+ return nil, err
+ }
+ ser, err := avro.NewSpecificSerializer(client, serde.ValueSerde, avro.NewSerializerConfig())
+ if err != nil {
+ return nil, err
+ }
+
+ // Delivery report handler for produced messages
+ go func() {
+ for {
+ for e := range p.Events() {
+ switch e := e.(type) {
+ // https://github.com/confluentinc/confluent-kafka-go/blob/master/examples/producer_example/producer_example.go
+ case *kafka.Message:
+ // The message delivery report, indicating success or
+ // permanent failure after retries have been exhausted.
+ // Application level retries won't help since the client
+ // is already configured to do that.
+ if e.TopicPartition.Error != nil {
+ log.Error("sending message finished with failure", "err", e.TopicPartition.Error, "key", string(e.Key))
+ continue
+ }
+ log.Debug("sending message finished with success ", "key", string(e.Key))
+ case kafka.Error:
+ // Generic client instance-level errors, such as
+ // broker connection failures, authentication issues, etc.
+ //
+ // These errors should generally be considered informational
+ // as the underlying client will automatically try to
+ // recover from any errors encountered, the application
+ // does not need to take action on them.
+ log.Error("kafka general error", "err", e.Error())
+ }
+ }
+ }
+ }()
+
+ return &Broker{
+ producer: p,
+ serializer: ser,
+ log: log,
+ },
+ nil
+}
+
+// Close closes serialization agent and kafka producer.
+func (b *Broker) Close() {
+ b.log.Info("kafka stops")
+ b.serializer.Close()
+ // https://docs.confluent.io/platform/current/clients/confluent-kafka-go/index.html#hdr-Producer
+ // * When done producing messages it's necessary to make sure all messages are
+ // indeed delivered to the broker (or failed),
+ // because this is an asynchronous client so some messages may be
+ // lingering in internal channels or transmission queues.
+ // Call the convenience function `.Flush()` will block code until all
+ // message deliveries are done or the provided timeout elapses.
+ b.producer.Flush(FlushBrokerTimeMs)
+ b.producer.Close()
+}
+
+// Send sends serialized message to kafka using schema registry.
+func (b *Broker) Send(msg dto.User, topic string, key string) error {
+ b.log.Info("sending message", "msg", msg)
+ payload, err := b.serializer.Serialize(topic, &msg)
+ if err != nil {
+ return err
+ }
+ err = b.producer.Produce(&kafka.Message{
+ Key: []byte(key),
+ TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny},
+ Value: payload,
+ Headers: []kafka.Header{{Key: "Course", Value: []byte("Kafka")}},
+ }, nil)
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/module_1/go/avro-example/internal/config/config.go b/module_1/go/avro-example/internal/config/config.go
new file mode 100644
index 0000000..6d90e6a
--- /dev/null
+++ b/module_1/go/avro-example/internal/config/config.go
@@ -0,0 +1,74 @@
+package config
+
+import (
+ "errors"
+ "flag"
+ "fmt"
+ "os"
+
+ "github.com/ilyakaznacheev/cleanenv"
+)
+
+var (
+ ErrAbsentConfigFile = errors.New("config file does not exists")
+ ErrReadConfigFailed = errors.New("reading config file failed")
+)
+
+type KafkaConfig struct {
+ KafkaURL string `yaml:"kafkaUrl" env-required:"true"`
+ SchemaRegistryURL string `yaml:"schemaRegistryUrl" env-required:"true"`
+ Type string `yaml:"type" env-required:"true"`
+ GroupID string `yaml:"groupId"`
+ Topic string `yaml:"topic" env-required:"true"`
+ Timeout int `yaml:"timeout"`
+}
+
+type Config struct {
+ // without this param will be used "local" as param value
+ Env string `yaml:"env" env-default:"local"`
+ Kafka KafkaConfig `yaml:"kafka"`
+}
+
+func (c *Config) String() string {
+ return fmt.Sprintf(
+ "type: %s, env: %s, kafka url %s, schema registry url %s",
+ c.Kafka.Type, c.Env, c.Kafka.KafkaURL, c.Kafka.SchemaRegistryURL,
+ )
+}
+
+// New loads config.
+func New() (*Config, error) {
+ cfg := &Config{}
+ var err error
+ var configPath string
+ // path to config yaml file
+ flag.StringVar(&configPath, "c", "", "path to config file")
+ flag.Parse()
+ if configPath == "" {
+ configPath = os.Getenv("CONFIG_PATH")
+ }
+ if configPath != "" {
+ cfg, err = LoadByPath(configPath)
+ if err != nil {
+ return nil, err
+ }
+ return cfg, nil
+ }
+ return cfg, nil
+}
+
+// LoadByPath loads config by path.
+func LoadByPath(configPath string) (*Config, error) {
+ _, err := os.Stat(configPath)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, ErrAbsentConfigFile
+ }
+ return nil, fmt.Errorf("LoadByPath stat error: %w", err)
+ }
+ var cfg Config
+ if err := cleanenv.ReadConfig(configPath, &cfg); err != nil {
+ return nil, ErrReadConfigFailed
+ }
+ return &cfg, nil
+}
diff --git a/module_1/go/avro-example/internal/dto/user.avsc b/module_1/go/avro-example/internal/dto/user.avsc
new file mode 100644
index 0000000..255183c
--- /dev/null
+++ b/module_1/go/avro-example/internal/dto/user.avsc
@@ -0,0 +1,10 @@
+{
+ "namespace": "kafkapracticum",
+ "name": "User",
+ "type": "record",
+ "fields": [
+ {"name": "name", "type": "string"},
+ {"name": "favorite_number", "type": "long"},
+ {"name": "favorite_color", "type": "string"}
+ ]
+}
\ No newline at end of file
diff --git a/module_1/go/avro-example/internal/dto/user.go b/module_1/go/avro-example/internal/dto/user.go
new file mode 100644
index 0000000..58a5762
--- /dev/null
+++ b/module_1/go/avro-example/internal/dto/user.go
@@ -0,0 +1,202 @@
+// Code generated by github.com/actgardner/gogen-avro/v10. DO NOT EDIT.
+package dto
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+
+ "github.com/actgardner/gogen-avro/v10/compiler"
+ "github.com/actgardner/gogen-avro/v10/vm"
+ "github.com/actgardner/gogen-avro/v10/vm/types"
+)
+
+var _ = fmt.Printf
+
+type User struct {
+ Name string `json:"name"`
+
+ Favorite_number int64 `json:"favorite_number"`
+
+ Favorite_color string `json:"favorite_color"`
+}
+
+const UserAvroCRC64Fingerprint = "\x8d\t\x1dg\r\xa8\xf9r"
+
+func NewUser() User {
+ r := User{}
+ return r
+}
+
+func DeserializeUser(r io.Reader) (User, error) {
+ t := NewUser()
+ deser, err := compiler.CompileSchemaBytes([]byte(t.Schema()), []byte(t.Schema()))
+ if err != nil {
+ return t, err
+ }
+
+ err = vm.Eval(r, deser, &t)
+ return t, err
+}
+
+func DeserializeUserFromSchema(r io.Reader, schema string) (User, error) {
+ t := NewUser()
+
+ deser, err := compiler.CompileSchemaBytes([]byte(schema), []byte(t.Schema()))
+ if err != nil {
+ return t, err
+ }
+
+ err = vm.Eval(r, deser, &t)
+ return t, err
+}
+
+func writeUser(r User, w io.Writer) error {
+ var err error
+ err = vm.WriteString(r.Name, w)
+ if err != nil {
+ return err
+ }
+ err = vm.WriteLong(r.Favorite_number, w)
+ if err != nil {
+ return err
+ }
+ err = vm.WriteString(r.Favorite_color, w)
+ if err != nil {
+ return err
+ }
+ return err
+}
+
+func (r User) Serialize(w io.Writer) error {
+ return writeUser(r, w)
+}
+
+func (r User) Schema() string {
+ return "{\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":\"long\"},{\"name\":\"favorite_color\",\"type\":\"string\"}],\"name\":\"kafkapracticum.User\",\"type\":\"record\"}"
+}
+
+func (r User) SchemaName() string {
+ return "kafkapracticum.User"
+}
+
+func (_ User) SetBoolean(v bool) { panic("Unsupported operation") }
+func (_ User) SetInt(v int32) { panic("Unsupported operation") }
+func (_ User) SetLong(v int64) { panic("Unsupported operation") }
+func (_ User) SetFloat(v float32) { panic("Unsupported operation") }
+func (_ User) SetDouble(v float64) { panic("Unsupported operation") }
+func (_ User) SetBytes(v []byte) { panic("Unsupported operation") }
+func (_ User) SetString(v string) { panic("Unsupported operation") }
+func (_ User) SetUnionElem(v int64) { panic("Unsupported operation") }
+
+func (r *User) Get(i int) types.Field {
+ switch i {
+ case 0:
+ w := types.String{Target: &r.Name}
+
+ return w
+
+ case 1:
+ w := types.Long{Target: &r.Favorite_number}
+
+ return w
+
+ case 2:
+ w := types.String{Target: &r.Favorite_color}
+
+ return w
+
+ }
+ panic("Unknown field index")
+}
+
+func (r *User) SetDefault(i int) {
+ switch i {
+ }
+ panic("Unknown field index")
+}
+
+func (r *User) NullField(i int) {
+ switch i {
+ }
+ panic("Not a nullable field index")
+}
+
+func (_ User) AppendMap(key string) types.Field { panic("Unsupported operation") }
+func (_ User) AppendArray() types.Field { panic("Unsupported operation") }
+func (_ User) HintSize(int) { panic("Unsupported operation") }
+func (_ User) Finalize() {}
+
+func (_ User) AvroCRC64Fingerprint() []byte {
+ return []byte(UserAvroCRC64Fingerprint)
+}
+
+func (r User) MarshalJSON() ([]byte, error) {
+ var err error
+ output := make(map[string]json.RawMessage)
+ output["name"], err = json.Marshal(r.Name)
+ if err != nil {
+ return nil, err
+ }
+ output["favorite_number"], err = json.Marshal(r.Favorite_number)
+ if err != nil {
+ return nil, err
+ }
+ output["favorite_color"], err = json.Marshal(r.Favorite_color)
+ if err != nil {
+ return nil, err
+ }
+ return json.Marshal(output)
+}
+
+func (r *User) UnmarshalJSON(data []byte) error {
+ var fields map[string]json.RawMessage
+ if err := json.Unmarshal(data, &fields); err != nil {
+ return err
+ }
+
+ var val json.RawMessage
+ val = func() json.RawMessage {
+ if v, ok := fields["name"]; ok {
+ return v
+ }
+ return nil
+ }()
+
+ if val != nil {
+ if err := json.Unmarshal([]byte(val), &r.Name); err != nil {
+ return err
+ }
+ } else {
+ return fmt.Errorf("no value specified for name")
+ }
+ val = func() json.RawMessage {
+ if v, ok := fields["favorite_number"]; ok {
+ return v
+ }
+ return nil
+ }()
+
+ if val != nil {
+ if err := json.Unmarshal([]byte(val), &r.Favorite_number); err != nil {
+ return err
+ }
+ } else {
+ return fmt.Errorf("no value specified for favorite_number")
+ }
+ val = func() json.RawMessage {
+ if v, ok := fields["favorite_color"]; ok {
+ return v
+ }
+ return nil
+ }()
+
+ if val != nil {
+ if err := json.Unmarshal([]byte(val), &r.Favorite_color); err != nil {
+ return err
+ }
+ } else {
+ return fmt.Errorf("no value specified for favorite_color")
+ }
+ return nil
+}
diff --git a/module_1/go/avro-example/internal/logger/logger.go b/module_1/go/avro-example/internal/logger/logger.go
new file mode 100644
index 0000000..7d4bf4e
--- /dev/null
+++ b/module_1/go/avro-example/internal/logger/logger.go
@@ -0,0 +1,48 @@
+package logger
+
+import (
+ "log/slog"
+ "os"
+)
+
+const (
+ envLocal = "local"
+ envDemo = "demo"
+ envProd = "prod"
+)
+
+// New creates logger with predefine setting (depends on environment).
+func New(env string) *slog.Logger {
+ var log *slog.Logger
+
+ switch env {
+ case envLocal:
+ log = slog.New(
+ slog.NewTextHandler(
+ os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelInfo,
+ AddSource: true,
+ },
+ ),
+ )
+ case envDemo:
+ log = slog.New(
+ slog.NewJSONHandler(
+ os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelDebug,
+ AddSource: true,
+ },
+ ),
+ )
+ case envProd:
+ log = slog.New(
+ slog.NewJSONHandler(
+ os.Stdout, &slog.HandlerOptions{
+ Level: slog.LevelInfo,
+ AddSource: true,
+ },
+ ),
+ )
+ }
+ return log
+}
diff --git a/module_1/go/go.mod b/module_1/go/go.mod
new file mode 100644
index 0000000..1b022cb
--- /dev/null
+++ b/module_1/go/go.mod
@@ -0,0 +1,18 @@
+module github.com/apache_kafka_course/module1/go
+
+go 1.23.3
+
+require (
+ github.com/actgardner/gogen-avro/v10 v10.2.1
+ github.com/confluentinc/confluent-kafka-go/v2 v2.6.1
+ github.com/ilyakaznacheev/cleanenv v1.5.0
+)
+
+require (
+ github.com/BurntSushi/toml v1.2.1 // indirect
+ github.com/google/uuid v1.6.0 // indirect
+ github.com/heetch/avro v0.4.5 // indirect
+ github.com/joho/godotenv v1.5.1 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+ olympos.io/encoding/edn v0.0.0-20201019073823-d3554ca0b0a3 // indirect
+)
diff --git a/module_1/go/go.sum b/module_1/go/go.sum
new file mode 100644
index 0000000..7eba5bd
--- /dev/null
+++ b/module_1/go/go.sum
@@ -0,0 +1,404 @@
+dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
+dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
+github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
+github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
+github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
+github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
+github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
+github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
+github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
+github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
+github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
+github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
+github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
+github.com/Microsoft/hcsshim v0.11.5 h1:haEcLNpj9Ka1gd3B3tAEs9CpE0c+1IhoL59w/exYU38=
+github.com/Microsoft/hcsshim v0.11.5/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU=
+github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
+github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
+github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930=
+github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ=
+github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA=
+github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
+github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro=
+github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs=
+github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk=
+github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8=
+github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak=
+github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU=
+github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw=
+github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
+github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
+github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
+github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
+github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY=
+github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE=
+github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
+github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
+github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
+github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/compose-spec/compose-go/v2 v2.1.3 h1:bD67uqLuL/XgkAK6ir3xZvNLFPxPScEi1KW7R5esrLE=
+github.com/compose-spec/compose-go/v2 v2.1.3/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
+github.com/confluentinc/confluent-kafka-go/v2 v2.6.1 h1:XFkytnGvk/ZcY2qU0ql4E4h+ftBaGqkLO7tlZ4kRbr4=
+github.com/confluentinc/confluent-kafka-go/v2 v2.6.1/go.mod h1:hScqtFIGUI1wqHIgM3mjoqEou4VweGGGX7dMpcUKves=
+github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
+github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
+github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
+github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4=
+github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
+github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
+github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM=
+github.com/containerd/errdefs v0.1.0/go.mod h1:YgWiiHtLmSeBrvpw+UfPijzbLaB77mEG1WwJTDETIV0=
+github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
+github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
+github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
+github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
+github.com/containerd/ttrpc v1.2.5 h1:IFckT1EFQoFBMG4c3sMdT8EP3/aKfumK1msY+Ze4oLU=
+github.com/containerd/ttrpc v1.2.5/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o=
+github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4=
+github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0=
+github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E=
+github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
+github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
+github.com/docker/buildx v0.15.1 h1:1cO6JIc0rOoC8tlxfXoh1HH1uxaNvYH1q7J7kv5enhw=
+github.com/docker/buildx v0.15.1/go.mod h1:16DQgJqoggmadc1UhLaUTPqKtR+PlByN/kyXFdkhFCo=
+github.com/docker/cli v27.0.3+incompatible h1:usGs0/BoBW8MWxGeEtqPMkzOY56jZ6kYlSN5BLDioCQ=
+github.com/docker/cli v27.0.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
+github.com/docker/compose/v2 v2.28.1 h1:ORPfiVHrpnRQBDoC3F8JJyWAY8N5gWuo3FgwyivxFdM=
+github.com/docker/compose/v2 v2.28.1/go.mod h1:wDtGQFHe99sPLCHXeVbCkc+Wsl4Y/2ZxiAJa/nga6rA=
+github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
+github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
+github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY=
+github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8=
+github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40=
+github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
+github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
+github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
+github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
+github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8=
+github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
+github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
+github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
+github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg=
+github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg=
+github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
+github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
+github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
+github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
+github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss=
+github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og=
+github.com/fsnotify/fsevents v0.2.0 h1:BRlvlqjvNTfogHfeBOFvSC9N0Ddy+wzQCQukyoD7o/c=
+github.com/fsnotify/fsevents v0.2.0/go.mod h1:B3eEk39i4hz8y1zaWS/wPrAP4O6wkIl7HQwKBr1qH/w=
+github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo=
+github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
+github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
+github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
+github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
+github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE=
+github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
+github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE=
+github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
+github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g=
+github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
+github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc=
+github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
+github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
+github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
+github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0=
+github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4=
+github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
+github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I=
+github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
+github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
+github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
+github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
+github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
+github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg=
+github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
+github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
+github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
+github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
+github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
+github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
+github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU=
+github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws=
+github.com/ilyakaznacheev/cleanenv v1.5.0 h1:0VNZXggJE2OYdXE87bfSSwGxeiGt9moSR2lOrsHHvr4=
+github.com/ilyakaznacheev/cleanenv v1.5.0/go.mod h1:a5aDzaJrLCQZsazHol1w8InnDcOX0OColm64SlIi6gk=
+github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
+github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
+github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY=
+github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
+github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
+github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
+github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
+github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
+github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
+github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
+github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
+github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
+github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
+github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
+github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk=
+github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
+github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
+github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
+github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU=
+github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/moby/buildkit v0.14.1 h1:2epLCZTkn4CikdImtsLtIa++7DzCimrrZCT1sway+oI=
+github.com/moby/buildkit v0.14.1/go.mod h1:1XssG7cAqv5Bz1xcGMxJL123iCv5TYN4Z/qf647gfuk=
+github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
+github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
+github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
+github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
+github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
+github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
+github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=
+github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
+github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g=
+github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI=
+github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
+github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
+github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI=
+github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg=
+github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc=
+github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs=
+github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg=
+github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU=
+github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
+github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
+github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
+github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=
+github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
+github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
+github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
+github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
+github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
+github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
+github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
+github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
+github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q=
+github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY=
+github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
+github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
+github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY=
+github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
+github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
+github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
+github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o=
+github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8=
+github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=
+github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs=
+github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU=
+github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc=
+github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
+github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
+github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4=
+github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM=
+github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
+github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
+github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
+github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
+github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
+github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
+github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/testcontainers/testcontainers-go v0.33.0 h1:zJS9PfXYT5O0ZFXM2xxXfk4J5UMw/kRiISng037Gxdw=
+github.com/testcontainers/testcontainers-go v0.33.0/go.mod h1:W80YpTa8D5C3Yy16icheD01UTDu+LmXIA2Keo+jWtT8=
+github.com/testcontainers/testcontainers-go/modules/compose v0.33.0 h1:PyrUOF+zG+xrS3p+FesyVxMI+9U+7pwhZhyFozH3jKY=
+github.com/testcontainers/testcontainers-go/modules/compose v0.33.0/go.mod h1:oqZaUnFEskdZriO51YBquku/jhgzoXHPot6xe1DqKV4=
+github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
+github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
+github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA=
+github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g=
+github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
+github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
+github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
+github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
+github.com/tonistiigi/fsutil v0.0.0-20240424095704-91a3fc46842c h1:+6wg/4ORAbnSoGDzg2Q1i3CeMcT/jjhye/ZfnBHy7/M=
+github.com/tonistiigi/fsutil v0.0.0-20240424095704-91a3fc46842c/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM=
+github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
+github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
+github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Qb2z5hI1UHxSQt4JMyxebFR15KnApw=
+github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
+github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
+github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
+github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
+github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
+github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
+go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A=
+go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1/go.mod h1:GnOaBaFQ2we3b9AGWJpsBa7v1S5RlQzlC3O7dRMxZhM=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
+go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
+go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 h1:wNMDy/LVGLj2h3p6zg4d0gypKfWKSWI14E1C4smOgl8=
+go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0/go.mod h1:YfbDdXAAkemWJK3H/DshvlrxqFB2rtW4rY6ky/3x/H0=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruOgJP5QiA1pw4fYYdv6nc6CBWw=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I=
+go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
+go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
+go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw=
+go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
+go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0=
+go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q=
+go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
+go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
+go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
+go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
+go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU=
+go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc=
+golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
+golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
+golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
+golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
+golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
+golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
+golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
+golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
+golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
+golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
+golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
+golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
+golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
+golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
+golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
+google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
+google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
+google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0=
+google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM=
+google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4=
+google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
+google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
+google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
+google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
+google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y=
+gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
+gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+k8s.io/api v0.29.2 h1:hBC7B9+MU+ptchxEqTNW2DkUosJpp1P+Wn6YncZ474A=
+k8s.io/api v0.29.2/go.mod h1:sdIaaKuU7P44aoyyLlikSLayT6Vb7bvJNCX105xZXY0=
+k8s.io/apimachinery v0.29.2 h1:EWGpfJ856oj11C52NRCHuU7rFDwxev48z+6DSlGNsV8=
+k8s.io/apimachinery v0.29.2/go.mod h1:6HVkd1FwxIagpYrHSwJlQqZI3G9LfYWRPAkUvLnXTKU=
+k8s.io/client-go v0.29.2 h1:FEg85el1TeZp+/vYJM7hkDlSTFZ+c5nnK44DJ4FyoRg=
+k8s.io/client-go v0.29.2/go.mod h1:knlvFZE58VpqbQpJNbCbctTVXcd35mMyAAwBdpt4jrA=
+k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0=
+k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo=
+k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780=
+k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA=
+k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI=
+k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
+olympos.io/encoding/edn v0.0.0-20201019073823-d3554ca0b0a3 h1:slmdOY3vp8a7KQbHkL+FLbvbkgMqmXojpFUO/jENuqQ=
+olympos.io/encoding/edn v0.0.0-20201019073823-d3554ca0b0a3/go.mod h1:oVgVk4OWVDi43qWBEyGhXgYxt7+ED4iYNpTngSLX2Iw=
+sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo=
+sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0=
+sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4=
+sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08=
+sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
+sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
+tags.cncf.io/container-device-interface v0.7.2 h1:MLqGnWfOr1wB7m08ieI4YJ3IoLKKozEnnNYBtacDPQU=
+tags.cncf.io/container-device-interface v0.7.2/go.mod h1:Xb1PvXv2BhfNb3tla4r9JL129ck1Lxv9KuU6eVOfKto=
diff --git a/module_1/go/infra/bash-commands.txt b/module_1/go/infra/bash-commands.txt
new file mode 100644
index 0000000..5927c0a
--- /dev/null
+++ b/module_1/go/infra/bash-commands.txt
@@ -0,0 +1,2 @@
+docker exec -it kafka-1 ../../usr/bin/kafka-topics --create --topic test-topic --bootstrap-server localhost:9092 --partitions 3 --replication-factor 2
+docker exec -it kafka-1 ../../usr/bin/kafka-topics --describe --topic test-topic --bootstrap-server localhost:9092
diff --git a/module_1/go/infra/docker-compose.yaml b/module_1/go/infra/docker-compose.yaml
new file mode 100644
index 0000000..6acd25b
--- /dev/null
+++ b/module_1/go/infra/docker-compose.yaml
@@ -0,0 +1,79 @@
+ version: '2'
+ services:
+ zookeeper:
+ ports:
+ - 22181:2181
+ container_name: zookeeper
+ image: confluentinc/cp-zookeeper:7.4.4
+ environment:
+ ZOOKEEPER_CLIENT_PORT: 2181
+ networks:
+ - kafka-network
+
+ schema-registry:
+ image: bitnami/schema-registry:7.6
+ ports:
+ - '127.0.0.1:8081:8081'
+ depends_on:
+ - kafka-1
+ - kafka-2
+ environment:
+ SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
+ SCHEMA_REGISTRY_KAFKA_BROKERS: PLAINTEXT://kafka-1:9092,PLAINTEXT://kafka-2:9092
+ networks:
+ - kafka-network
+
+ kafka-1:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-1
+ ports:
+ - 29092:29092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1:9092,PLAINTEXT_HOST://localhost:29092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+
+ kafka-2:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-2
+ ports:
+ - 39092:39092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 2
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-2:9092,PLAINTEXT_HOST://localhost:39092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+
+ kafka-3:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-3
+ ports:
+ - 49092:49092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 3
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-3:9092,PLAINTEXT_HOST://localhost:49092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+
+ networks:
+ kafka-network:
+ driver: bridge
\ No newline at end of file
diff --git a/module_1/java/.gitignore b/module_1/java/.gitignore
new file mode 100644
index 0000000..5ff6309
--- /dev/null
+++ b/module_1/java/.gitignore
@@ -0,0 +1,38 @@
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**/target/
+!**/src/test/**/target/
+
+### IntelliJ IDEA ###
+.idea/modules.xml
+.idea/jarRepositories.xml
+.idea/compiler.xml
+.idea/libraries/
+*.iws
+*.iml
+*.ipr
+
+### Eclipse ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### VS Code ###
+.vscode/
+
+### Mac OS ###
+.DS_Store
\ No newline at end of file
diff --git a/module_1/java/README.md b/module_1/java/README.md
new file mode 100644
index 0000000..7142f65
--- /dev/null
+++ b/module_1/java/README.md
@@ -0,0 +1,21 @@
+### Запуск продюсера (ProducerApp)
+Продюсер непрерывно отправляет 10 сообщений в Kafka топик (раз в 600 миллисекунд)
+
+### Запуск Push Consumer (PushConsumerApp)
+Push Consumer слушает новые сообщения и обрабатывает их в реальном времени (реализация через очень частый запрос команды poll с низким timeout)
+
+### Pull Consumer (PullConsumerApp)
+Pull Consumer периодически проверяет Kafka топик на наличие новых сообщений и обрабатывает их.
+Этот консьюмер будет опрашивать Kafka топик на наличие новых сообщений через заданные интервалы времени (10 секунд в данном
+случае).
+
+### Проверка работы приложения
+1. Запустить kafka кластер : ```docker-compose up -d```
+2. Создать топик: ```docker exec -it kafka-1 ../../usr/bin/kafka-topics --create --topic test-topic --bootstrap-server localhost:9092 --partitions
+ 3 --replication-factor 2```
+2. Запустить PullConsumerApp и PushConsumerApp в разных окнах терминала
+2. Запустить ProducerApp в 3ем окне терминала
+3. Продюсер отправляет сообщения: В окне терминала продюсера будет напечатано, что продюсер отправил сообщение и информация по нему
+4. В терминале PushConsumerApp проверьте, что консьюмер получает сообщения в реальном времени и выводит информацию в лог
+5. В терминале PullConsumerApp проверьте, что консьюмер получает сообщения с периодичностью и группами
+6. Проверка логов Kafka: Вы можете проверить логи Kafka для отладки или для уверенности, что сообщения успешно записываются и читаются
\ No newline at end of file
diff --git a/module_1/java/docker-compose.yaml b/module_1/java/docker-compose.yaml
new file mode 100644
index 0000000..0ca21d6
--- /dev/null
+++ b/module_1/java/docker-compose.yaml
@@ -0,0 +1,63 @@
+version: '2'
+services:
+ zookeeper:
+ ports:
+ - 22181:2181
+ container_name: zookeeper
+ image: confluentinc/cp-zookeeper:7.4.4
+ environment:
+ ZOOKEEPER_CLIENT_PORT: 2181
+ networks:
+ - kafka-network
+ kafka-1:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-1
+ ports:
+ - 29092:29092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1:9092,PLAINTEXT_HOST://localhost:29092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+ kafka-2:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-2
+ ports:
+ - 39092:39092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 2
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-2:9092,PLAINTEXT_HOST://localhost:39092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+ kafka-3:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-3
+ ports:
+ - 49092:49092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 3
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-3:9092,PLAINTEXT_HOST://localhost:49092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+
+networks:
+ kafka-network:
+ driver: bridge
\ No newline at end of file
diff --git a/module_1/java/pom.xml b/module_1/java/pom.xml
new file mode 100644
index 0000000..a0f8518
--- /dev/null
+++ b/module_1/java/pom.xml
@@ -0,0 +1,56 @@
+
+
+ 4.0.0
+
+ org.example
+ Kafka_course_module_1
+ 1.0-SNAPSHOT
+
+
+ 17
+ 17
+ UTF-8
+
+
+
+
+
+ org.apache.kafka
+ kafka-clients
+ 3.4.0
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ 2.18.1
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+ 2.18.1
+
+
+
+ org.slf4j
+ slf4j-api
+ 2.0.9
+
+
+
+ org.apache.kafka
+ connect-json
+ 3.9.0
+
+
+ ch.qos.logback
+ logback-classic
+ 1.5.12
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module_1/java/src/main/java/org/example/Message.java b/module_1/java/src/main/java/org/example/Message.java
new file mode 100644
index 0000000..d9b1a9b
--- /dev/null
+++ b/module_1/java/src/main/java/org/example/Message.java
@@ -0,0 +1,47 @@
+package org.example;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class Message {
+ private String userId;
+ private String content;
+
+
+ public Message() {
+ }
+
+ public Message(String userId, String content) {
+ this.userId = userId;
+ this.content = content;
+ }
+
+ public String getUserId() {
+ return userId;
+ }
+
+ public void setUserId(String userId) {
+ this.userId = userId;
+ }
+
+ public String getContent() {
+ return content;
+ }
+
+ public void setContent(String content) {
+ this.content = content;
+ }
+
+ // Метод для сериализации в JSON
+ public static String serialize(Message message) throws JsonProcessingException {
+ ObjectMapper objectMapper = new ObjectMapper();
+ return objectMapper.writeValueAsString(message);
+ }
+
+ // Метод для десериализации из JSON в объект
+ public static Message deserialize(String data) throws Exception {
+ ObjectMapper objectMapper = new ObjectMapper();
+ return objectMapper.readValue(data, Message.class);
+ }
+
+}
diff --git a/module_1/java/src/main/java/org/example/ProducerApp.java b/module_1/java/src/main/java/org/example/ProducerApp.java
new file mode 100644
index 0000000..31957bb
--- /dev/null
+++ b/module_1/java/src/main/java/org/example/ProducerApp.java
@@ -0,0 +1,50 @@
+package org.example;
+
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Properties;
+import java.util.Random;
+
+/**
+ * This producer sends messages to Kafka with "at-least-once" delivery guarantee
+ */
+public class ProducerApp {
+ private static final Logger logger = LoggerFactory.getLogger(ProducerApp.class);
+
+ public static void main(String[] args) {
+ Properties props = new Properties();
+ props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:29092,localhost:39092,localhost:49092");
+ props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+ props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
+
+ // Настройка подтверждения доставки
+ props.put(ProducerConfig.ACKS_CONFIG, "all"); // гарантия «at-least-once»
+ props.put(ProducerConfig.RETRIES_CONFIG, 3); // поведение при сбое
+
+ for (int i = 1; i < 11; i++) {
+ logger.info(String.format("Preparing to send message number %s", i));
+ try (KafkaProducer producer = new KafkaProducer<>(props)) {
+
+ Message message = new Message("user123" + new Random().nextInt(1, 100), "Hello Kafka!");
+ ProducerRecord record = new ProducerRecord<>("test-topic", message.getUserId(),
+ Message.serialize(message));
+
+ producer.send(record, (metadata, exception) -> {
+ if (exception == null) {
+ logger.info(String.format("Message sent successfully: %s", metadata.toString()));
+ } else {
+ logger.error("Error while producing: ", exception);
+ }
+ });
+ Thread.sleep(600);
+ } catch (Exception e) {
+ logger.error("Error while producing: ", e);
+ }
+ }
+ }
+}
diff --git a/module_1/java/src/main/java/org/example/PullConsumerApp.java b/module_1/java/src/main/java/org/example/PullConsumerApp.java
new file mode 100644
index 0000000..117eed7
--- /dev/null
+++ b/module_1/java/src/main/java/org/example/PullConsumerApp.java
@@ -0,0 +1,49 @@
+package org.example;
+
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Duration;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * This consumer manually polls Kafka for new messages, implementing a pull model
+ */
+public class PullConsumerApp {
+ private static final Logger logger = LoggerFactory.getLogger(PullConsumerApp.class);
+
+ public static void main(String[] args) {
+ Properties props = new Properties();
+ props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:29092,localhost:39092,localhost:49092");
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group-1");
+ props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); //отключает автоматическое сохранение смещения
+ props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, 10 * 1024 * 1024); // для большей наглядности ограничен минимальный размер пакета сообщений
+
+ KafkaConsumer consumer = new KafkaConsumer<>(props);
+ consumer.subscribe(List.of("test-topic"));
+
+ try {
+ while (true) {
+ ConsumerRecords records = consumer.poll(Duration.ofMillis(10000));
+ for (ConsumerRecord record : records) {
+ Message message = Message.deserialize(record.value());
+ logger.info("Pulled Message: " + message.getContent());
+ consumer.commitSync();
+ }
+ }
+ } catch (Exception e) {
+ logger.error("Failure", e);
+ } finally {
+ consumer.close();
+ }
+ }
+}
diff --git a/module_1/java/src/main/java/org/example/PushConsumerApp.java b/module_1/java/src/main/java/org/example/PushConsumerApp.java
new file mode 100644
index 0000000..42a6b7d
--- /dev/null
+++ b/module_1/java/src/main/java/org/example/PushConsumerApp.java
@@ -0,0 +1,45 @@
+package org.example;
+
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Duration;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * This consumer reacts to messages immediately using a listener
+ */
+public class PushConsumerApp {
+ private static final Logger logger = LoggerFactory.getLogger(PushConsumerApp.class);
+
+ public static void main(String[] args) {
+ Properties props = new Properties();
+ props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:29092,localhost:39092,localhost:49092");
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group-2");
+ props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
+
+ Consumer consumer = new KafkaConsumer<>(props);
+ consumer.subscribe(List.of("test-topic"));
+
+ while (true) {
+ ConsumerRecords records = consumer.poll(Duration.ofMillis(10));
+ records.forEach(record -> {
+ try {
+ Message message = Message.deserialize(record.value());
+ logger.info("Pushed Message: " + message.getContent());
+ } catch (Exception e) {
+ logger.error("Failure", e);
+ }
+ });
+ }
+ }
+}
diff --git a/module_1/java/src/main/resources/logback.xml b/module_1/java/src/main/resources/logback.xml
new file mode 100644
index 0000000..5fd54f2
--- /dev/null
+++ b/module_1/java/src/main/resources/logback.xml
@@ -0,0 +1,34 @@
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
+
+
+
+
+ logs/application.log
+ true
+
+ %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
+
+
+
+
+ logs/rolling-application.log
+
+ logs/rolling-application.%d{yyyy-MM-dd}.log
+ 30
+
+
+ %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n
+
+
+
+
+
+
+
+
+
+
+
diff --git a/module_1/java/topic.txt b/module_1/java/topic.txt
new file mode 100644
index 0000000..9f4a6bd
--- /dev/null
+++ b/module_1/java/topic.txt
@@ -0,0 +1,9 @@
+Команда: docker exec -it kafka-1 ../../usr/bin/kafka-topics --create --topic test-topic --bootstrap-server localhost:9092 --partitions
+ 3 --replication-factor 2
+
+Вывод по топику:
+PS C:\Users\-\IdeaProjects\Kafka_course_module_1> docker exec -it kafka_course_module_1_broker-1_1 ../../usr/bin/kafka-topics --describe --topic test-topic --bootstrap-server localhost:9092
+Topic: test-topic TopicId: WHH4P3r1Q2OT_N8H4Kdzsw PartitionCount: 3 ReplicationFactor: 2 Configs:
+ Topic: test-topic Partition: 0 Leader: 1 Replicas: 1,2 Isr: 1,2
+ Topic: test-topic Partition: 1 Leader: 2 Replicas: 2,1 Isr: 2,1
+ Topic: test-topic Partition: 2 Leader: 1 Replicas: 1,2 Isr: 1,2
diff --git a/module_1/python/.gitignore b/module_1/python/.gitignore
new file mode 100644
index 0000000..b4db92e
--- /dev/null
+++ b/module_1/python/.gitignore
@@ -0,0 +1,165 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
+.pdm.toml
+.pdm-python
+.pdm-build/
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+.idea/
+
+# macOS
+.DS_Store
\ No newline at end of file
diff --git a/module_1/python/README.md b/module_1/python/README.md
new file mode 100644
index 0000000..7f56773
--- /dev/null
+++ b/module_1/python/README.md
@@ -0,0 +1,28 @@
+### Запуск продюсера (producer_app.py)
+Продюсер непрерывно отправляет 10 сообщений в Kafka топик (раз в 600
+миллисекунд).
+
+### Запуск Push Consumer (push_consumer_app.py)
+Push Consumer слушает новые сообщения и обрабатывает их в реальном времени
+(реализация через очень частый запрос команды consume с низким timeout).
+
+### Pull Consumer (pull_consumer_app.py)
+Pull Consumer периодически проверяет Kafka топик на наличие новых сообщений
+и обрабатывает их. Этот консьюмер будет опрашивать Kafka топик на наличие
+новых сообщений через заданные интервалы времени (10 секунд в данном случае).
+
+### Проверка работы приложения
+1. Запустить kafka кластер : ```docker-compose up -d``` (на macOS: ```docker
+compose up -d```)
+2. Создать топик: ```docker exec -it kafka-1 ../../usr/bin/kafka-topics --create --topic test-topic --bootstrap-server localhost:9092 --partitions 3 --replication-factor 2```
+3. Запустить `push_consumer_app.py` и `pull_consumer_app.py` в разных окнах
+ терминала
+4. Запустить `producer_app.py` в 3-ем окне терминала
+5. Продюсер отправляет сообщения: в окне терминала продюсера будет
+ напечатано, что продюсер отправил сообщение, и информация об отправке
+6. В терминале `push_consumer_app` проверьте, что консьюмер получает
+ сообщения в реальном времени и выводит информацию в лог
+7. В терминале `pull_consumer_app` проверьте, что консьюмер получает
+ сообщения с периодичностью и батчами (пачками)
+8. Проверка логов Kafka: вы можете проверить логи Kafka для отладки или для
+ уверенности, что сообщения успешно записываются и читаются
diff --git a/module_1/python/docker-compose.yaml b/module_1/python/docker-compose.yaml
new file mode 100644
index 0000000..0ca21d6
--- /dev/null
+++ b/module_1/python/docker-compose.yaml
@@ -0,0 +1,63 @@
+version: '2'
+services:
+ zookeeper:
+ ports:
+ - 22181:2181
+ container_name: zookeeper
+ image: confluentinc/cp-zookeeper:7.4.4
+ environment:
+ ZOOKEEPER_CLIENT_PORT: 2181
+ networks:
+ - kafka-network
+ kafka-1:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-1
+ ports:
+ - 29092:29092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 1
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1:9092,PLAINTEXT_HOST://localhost:29092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+ kafka-2:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-2
+ ports:
+ - 39092:39092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 2
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-2:9092,PLAINTEXT_HOST://localhost:39092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+ kafka-3:
+ image: confluentinc/cp-kafka:7.4.4
+ container_name: kafka-3
+ ports:
+ - 49092:49092
+ depends_on:
+ - zookeeper
+ environment:
+ KAFKA_BROKER_ID: 3
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-3:9092,PLAINTEXT_HOST://localhost:49092
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ networks:
+ - kafka-network
+
+networks:
+ kafka-network:
+ driver: bridge
\ No newline at end of file
diff --git a/module_1/python/src/models/__init__.py b/module_1/python/src/models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/module_1/python/src/models/message.py b/module_1/python/src/models/message.py
new file mode 100644
index 0000000..51f7f69
--- /dev/null
+++ b/module_1/python/src/models/message.py
@@ -0,0 +1,37 @@
+import json
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class Message:
+ def __init__(self, user_id: str = None, content: str = None):
+ self.user_id = user_id
+ self.content = content
+
+ def serialize(self) -> str:
+ """
+ Метод для сериализации в JSON.
+
+ :return: JSON в виде строки.
+ """
+ try:
+ return json.dumps({"user_id": self.user_id, "content": self.content})
+ except Exception as e:
+ logger.error(f"Serialization error: {e}")
+ raise
+
+ @staticmethod
+ def deserialize(data: str) -> 'Message':
+ """
+ Метод для десериализации из JSON в объект.
+
+ :param data: JSON в виде строки.
+ :return: JSON, как объект класса Message.
+ """
+ try:
+ json_data = json.loads(data)
+ return Message(json_data["user_id"], json_data["content"])
+ except Exception as e:
+ logger.error(f"Deserialization error: {e}")
+ raise
diff --git a/module_1/python/src/producer_app.py b/module_1/python/src/producer_app.py
new file mode 100644
index 0000000..356a9b3
--- /dev/null
+++ b/module_1/python/src/producer_app.py
@@ -0,0 +1,57 @@
+import logging.config
+import random
+import time
+
+from confluent_kafka import Producer
+
+from models.message import Message
+from settings.logging import LOGGING_CONFIG
+
+logging.config.dictConfig(LOGGING_CONFIG)
+logger = logging.getLogger("producer_app")
+
+
+def delivery_report(err, msg):
+ if err:
+ logger.error(f"Error while producing: {err}")
+ else:
+ logger.info(
+ f"Message sent successfully: topic='{msg.topic()}', partition={msg.partition()}, offset={msg.offset()}"
+ )
+
+
+def produce():
+ """
+ This producer sends messages to Kafka with "at-least-once" delivery guarantee.
+ """
+ producer_conf = {
+ "bootstrap.servers": "localhost:29092,localhost:39092,localhost:49092",
+ # Настройка подтверждения доставки:
+ "acks": "all", # Гарантия «at-least-once»
+ "retries": 3, # Поведение при сбоях
+ }
+ producer = Producer(producer_conf)
+
+ try:
+ for i in range(1, 11):
+ logger.info(f"Preparing to send message number {i}")
+ message = Message(f"user123{random.randint(1, 100)}", "Hello Kafka!")
+
+ try:
+ serialized_message = message.serialize()
+ producer.produce(
+ topic="test-topic",
+ key=message.user_id.encode("utf-8"),
+ value=serialized_message.encode("utf-8"),
+ callback=delivery_report,
+ )
+ producer.poll(0)
+ time.sleep(0.6)
+ except Exception as e:
+ logger.error(f"Error while producing: {e}")
+ finally:
+ producer.flush()
+
+
+if __name__ == "__main__":
+ produce()
diff --git a/module_1/python/src/pull_consumer_app.py b/module_1/python/src/pull_consumer_app.py
new file mode 100644
index 0000000..c3eb8e9
--- /dev/null
+++ b/module_1/python/src/pull_consumer_app.py
@@ -0,0 +1,42 @@
+import logging.config
+
+from confluent_kafka import Consumer
+
+from models.message import Message
+from settings.logging import LOGGING_CONFIG
+
+logging.config.dictConfig(LOGGING_CONFIG)
+logger = logging.getLogger("pull_consumer_app")
+
+
+def pull_consume():
+ """
+ This consumer manually polls Kafka for new messages, implementing a pull model.
+ """
+ consumer_conf = {
+ "bootstrap.servers": "localhost:29092,localhost:39092,localhost:49092",
+ "group.id": "consumer-group-1",
+ "auto.offset.reset": "earliest",
+ "enable.auto.commit": False, # Отключает автоматическое сохранение смещения
+ "fetch.min.bytes": 10 * 1024 * 1024, # Для большей наглядности ограничен минимальный размер пакета сообщений
+ "fetch.wait.max.ms": 10_000,
+ }
+ consumer = Consumer(consumer_conf)
+ consumer.subscribe(["test-topic"])
+
+ try:
+ while True:
+ records = consumer.consume(timeout=10)
+
+ for record in records:
+ message = Message.deserialize(record.value().decode("utf-8"))
+ logger.info(f"Pulled Message: {message.content}")
+ consumer.commit()
+ except Exception as e:
+ logger.exception(f"Failure: {e}")
+ finally:
+ consumer.close()
+
+
+if __name__ == "__main__":
+ pull_consume()
diff --git a/module_1/python/src/push_consumer_app.py b/module_1/python/src/push_consumer_app.py
new file mode 100644
index 0000000..590e1ad
--- /dev/null
+++ b/module_1/python/src/push_consumer_app.py
@@ -0,0 +1,39 @@
+import logging.config
+
+from confluent_kafka import Consumer
+
+from models.message import Message
+from settings.logging import LOGGING_CONFIG
+
+logging.config.dictConfig(LOGGING_CONFIG)
+logger = logging.getLogger("push_consumer_app")
+
+
+def push_consume():
+ """
+ This consumer reacts to messages immediately using a listener.
+ """
+ consumer_conf = {
+ "bootstrap.servers": "localhost:29092,localhost:39092,localhost:49092",
+ "group.id": "consumer-group-2",
+ "auto.offset.reset": "earliest",
+ "enable.auto.commit": True,
+ }
+ consumer = Consumer(consumer_conf)
+ consumer.subscribe(["test-topic"])
+
+ try:
+ while True:
+ records = consumer.consume(timeout=0.0001)
+
+ for record in records:
+ message = Message.deserialize(record.value().decode("utf-8"))
+ logger.info(f"Pushed Message: {message.content}")
+ except Exception as e:
+ logger.exception(f"Failure: {e}")
+ finally:
+ consumer.close()
+
+
+if __name__ == "__main__":
+ push_consume()
diff --git a/module_1/python/src/requirements.txt b/module_1/python/src/requirements.txt
new file mode 100644
index 0000000..433a97c
--- /dev/null
+++ b/module_1/python/src/requirements.txt
@@ -0,0 +1 @@
+confluent-kafka==2.6.1
diff --git a/module_1/python/src/settings/__init__.py b/module_1/python/src/settings/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/module_1/python/src/settings/__init__.py
@@ -0,0 +1 @@
+
diff --git a/module_1/python/src/settings/logging.py b/module_1/python/src/settings/logging.py
new file mode 100644
index 0000000..50ba141
--- /dev/null
+++ b/module_1/python/src/settings/logging.py
@@ -0,0 +1,24 @@
+LOGGING_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "standard": {
+ "format": "%(asctime)s [%(threadName)s] %(levelname)-5s %(name)s - %(message)s",
+ "datefmt": "%Y-%m-%d %H:%M:%S",
+ },
+ },
+ "handlers": {
+ "console": {
+ "class": "logging.StreamHandler",
+ "formatter": "standard",
+ "level": "INFO",
+ "stream": "ext://sys.stdout",
+ },
+ },
+ "root": {
+ "handlers": ["console"],
+ "level": "INFO",
+ },
+}
+
+