-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* created SchedulePublisher class and added in relevant dependencies * added relevant dependency files and their corresponding unit tests - WIP * fixed compilation error * removed json related * removed SchedulePublisher changes * reintroduced json files * added EventSusbscriber * updated application.conf with avro and json topics * added eventSubscriber to Scheduler * update from pr comments * fmt --------- Co-authored-by: susanabrahamtharakan <[email protected]> Co-authored-by: tba32 <[email protected]>
- Loading branch information
1 parent
1ecf4d1
commit 162b9af
Showing
6 changed files
with
165 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
137 changes: 136 additions & 1 deletion
137
scheduler-3/src/main/scala/uk/sky/scheduler/EventSubscriber.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,145 @@ | ||
package uk.sky.scheduler | ||
|
||
import fs2.Stream | ||
import cats.effect.Resource.ExitCase | ||
import cats.effect.{Async, Deferred, Ref, Resource} | ||
import cats.syntax.all.* | ||
import cats.{Monad, Parallel, Show} | ||
import fs2.* | ||
import fs2.kafka.* | ||
import mouse.all.* | ||
import org.typelevel.log4cats.LoggerFactory | ||
import org.typelevel.otel4s.metrics.Meter | ||
import org.typelevel.otel4s.{Attribute, Attributes} | ||
import uk.sky.fs2.kafka.topicloader.TopicLoader | ||
import uk.sky.scheduler.circe.jsonScheduleDecoder | ||
import uk.sky.scheduler.config.KafkaConfig | ||
import uk.sky.scheduler.converters.all.* | ||
import uk.sky.scheduler.domain.ScheduleEvent | ||
import uk.sky.scheduler.error.ScheduleError | ||
import uk.sky.scheduler.kafka.avro.{avroBinaryDeserializer, avroScheduleCodec, AvroSchedule} | ||
import uk.sky.scheduler.kafka.json.{jsonDeserializer, JsonSchedule} | ||
import uk.sky.scheduler.message.Message | ||
|
||
trait EventSubscriber[F[_]] { | ||
def messages: Stream[F, Message[Either[ScheduleError, Option[ScheduleEvent]]]] | ||
} | ||
|
||
object EventSubscriber { | ||
private type Output = Either[ScheduleError, Option[ScheduleEvent]] | ||
|
||
def kafka[F[_] : Async : Parallel : LoggerFactory]( | ||
config: KafkaConfig, | ||
loaded: Deferred[F, Unit] | ||
): F[EventSubscriber[F]] = { | ||
|
||
val avroConsumerSettings: ConsumerSettings[F, String, Either[ScheduleError, Option[AvroSchedule]]] = { | ||
given Resource[F, Deserializer[F, Either[ScheduleError, Option[AvroSchedule]]]] = | ||
avroBinaryDeserializer[F, AvroSchedule].map(_.option.map(_.sequence)) | ||
|
||
config.consumerSettings[F, String, Either[ScheduleError, Option[AvroSchedule]]] | ||
} | ||
|
||
val jsonConsumerSettings: ConsumerSettings[F, String, Either[ScheduleError, Option[JsonSchedule]]] = { | ||
given Deserializer[F, Either[ScheduleError, Option[JsonSchedule]]] = | ||
jsonDeserializer[F, JsonSchedule].option.map(_.sequence) | ||
|
||
config.consumerSettings[F, String, Either[ScheduleError, Option[JsonSchedule]]] | ||
} | ||
|
||
for { | ||
avroLoadedRef <- Ref.of[F, Boolean](false) | ||
jsonLoadedRef <- Ref.of[F, Boolean](false) | ||
} yield new EventSubscriber[F] { | ||
|
||
/** If both topics have finished loading, complete the Deferred to allow Queueing schedules. | ||
*/ | ||
private def onLoadCompare(exitCase: ExitCase): F[Unit] = | ||
exitCase match { | ||
case ExitCase.Succeeded => | ||
for { | ||
avroLoaded <- avroLoadedRef.get | ||
jsonLoaded <- jsonLoadedRef.get | ||
_ <- Async[F].whenA(avroLoaded && jsonLoaded)(loaded.complete(())) | ||
} yield () | ||
case ExitCase.Errored(_) | ExitCase.Canceled => Async[F].unit | ||
} | ||
|
||
private val avroStream: Stream[F, ConsumerRecord[String, Either[ScheduleError, Option[AvroSchedule]]]] = | ||
config.topics.avro.toNel | ||
.fold(Stream.exec(avroLoadedRef.set(true) *> onLoadCompare(ExitCase.Succeeded)))( | ||
TopicLoader.loadAndRun(_, avroConsumerSettings) { exitCase => | ||
avroLoadedRef.set(true) *> onLoadCompare(exitCase) | ||
} | ||
) | ||
|
||
private val jsonStream: Stream[F, ConsumerRecord[String, Either[ScheduleError, Option[JsonSchedule]]]] = | ||
config.topics.json.toNel | ||
.fold(Stream.exec(jsonLoadedRef.set(true) *> onLoadCompare(ExitCase.Succeeded)))( | ||
TopicLoader.loadAndRun(_, jsonConsumerSettings) { exitCase => | ||
jsonLoadedRef.set(true) *> onLoadCompare(exitCase) | ||
} | ||
) | ||
|
||
override def messages: Stream[F, Message[Output]] = | ||
avroStream.merge(jsonStream).map(_.toMessage) | ||
} | ||
} | ||
|
||
def observed[F[_] : Monad : Parallel : LoggerFactory : Meter](delegate: EventSubscriber[F]): F[EventSubscriber[F]] = { | ||
given Show[ScheduleError] = { | ||
case _: ScheduleError.InvalidAvroError => "invalid-avro" | ||
case _: ScheduleError.NotJsonError => "not-json" | ||
case _: ScheduleError.InvalidJsonError => "invalid-json" | ||
case _: ScheduleError.DecodeError => "decode" | ||
case _: ScheduleError.TransformationError => "transformation" | ||
} | ||
|
||
def updateAttributes(source: String) = Attributes( | ||
Attribute("message.type", "update"), | ||
Attribute("message.source", source) | ||
) | ||
|
||
def deleteAttributes(source: String, deleteType: String) = Attributes( | ||
Attribute("message.type", "delete"), | ||
Attribute("message.source", source), | ||
Attribute("message.delete.type", deleteType) | ||
) | ||
|
||
def errorAttributes(source: String, error: ScheduleError) = Attributes( | ||
Attribute("message.type", "error"), | ||
Attribute("message.source", source), | ||
Attribute("message.error.type", error.show) | ||
) | ||
|
||
for { | ||
counter <- Meter[F].counter[Long]("event-subscriber").create | ||
logger <- LoggerFactory[F].create | ||
} yield new EventSubscriber[F] { | ||
override def messages: Stream[F, Message[Output]] = | ||
delegate.messages.evalTapChunk { case Message(key, source, value, metadata) => | ||
val logCtx = Map("key" -> key, "source" -> source) | ||
|
||
value match { | ||
case Right(Some(_)) => | ||
logger.info(logCtx)(show"Decoded UPDATE for [$key] from $source") &> | ||
counter.inc(updateAttributes(source)) | ||
|
||
case Right(None) => | ||
lazy val deleteType = metadata.isExpired.fold("expired", "canceled") | ||
logger.info(logCtx)(show"Decoded DELETE type=[$deleteType] for [$key] from $source") &> | ||
counter.inc(deleteAttributes(source, deleteType)) | ||
|
||
case Left(error) => | ||
logger.error(logCtx, error)(show"Error decoding [$key] from $source") &> | ||
counter.inc(errorAttributes(source, error)) | ||
} | ||
} | ||
} | ||
} | ||
|
||
def live[F[_] : Async : Parallel : LoggerFactory : Meter]( | ||
config: KafkaConfig, | ||
loaded: Deferred[F, Unit] | ||
): F[EventSubscriber[F]] = | ||
kafka[F](config, loaded).flatMap(observed) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -58,5 +58,4 @@ class ScheduleEventConverterSpec | |
scheduleEvent.toTombstone should equalProducerRecord(tombstone) | ||
} | ||
} | ||
|
||
} |