Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Monitoring Reactive Streams

Monitoring Reactive Streams

Reactive Streams are the key to build asynchronous, data-intensive applications with no predetermined data volumes. By enabling non-blocking backpressure, they boost the resiliency of your systems by design. But how do you tune and debug such applications? When productionizing Reactive Streams, the same backpressure that preserves the safety of your pipeline can get in the way of effectively monitoring its status. In this talk we’ll present a line of action to

- measure the throughput of your pipeline
- identify its bottlenecks and look at possible tuning counteractions
- diagnose liveness issues.

Examples will be in Scala and Akka Streams, however these patterns are generic and applicable to any Reactive Streams implementation out there.

Stefano Bonetti

November 15, 2018
Tweet

More Decks by Stefano Bonetti

Other Decks in Programming

Transcript

  1. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) randomInt .via(evenOnly) .runWith(print) AKKA STREAMS - THROUGHPUT
  2. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) randomInt .via(evenOnly) .runWith(print) AKKA STREAMS - THROUGHPUT
  3. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) def meter[T](name: String): Flow[T, T, NotUsed] = { val msgCounter = Kamon.metrics.counter(name) Flow[T].map { x ⇒ msgCounter.increment(); x } } randomInt .via(meter("produced")) .via(evenOnly) .via(meter("processed")) .runWith(print) AKKA STREAMS - THROUGHPUT
  4. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) def meter[T](name: String): Flow[T, T, NotUsed] = { val msgCounter = Kamon.metrics.counter(name) Flow[T].map { x ⇒ msgCounter.increment(); x } } randomInt .via(meter("produced")) .via(evenOnly) .via(meter("processed")) .runWith(print) AKKA STREAMS - THROUGHPUT
  5. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  6. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  7. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  8. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  9. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  10. final case class Map[In, Out](f: In ⇒ Out) extends GraphStage[FlowShape[In,

    Out]] { val in = Inlet[In]("Map.in") val out = Outlet[Out]("Map.out") override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler { override def onPush(): Unit = push(out, f(grab(in))) override def onPull(): Unit = pull(in) setHandlers(in, out, this) } } AKKA STREAMS - GRAPHSTAGE API
  11. var lastPulled: Long = System.nanoTime() var lastPushed: Long = lastPulled

    private val backpressure = Kamon.histogram(label + "_backpressure") override def onPush(): Unit = { push(out, grab(in)) val now = System.nanoTime() backpressure.record((lastPulled - lastPushed) * 100 / now - lastPushed) lastPushed = now } override def onPull(): Unit = { pull(in) lastPulled = System.nanoTime() } setHandlers(in, out, this) AKKA STREAMS - BACKPRESSURE
  12. var lastPulled: Long = System.nanoTime() var lastPushed: Long = lastPulled

    private val backpressure = Kamon.histogram(label + "_backpressure") override def onPush(): Unit = { push(out, grab(in)) val now = System.nanoTime() backpressure.record((lastPulled - lastPushed) * 100 / now - lastPushed) lastPushed = now } override def onPull(): Unit = { pull(in) lastPulled = System.nanoTime() } setHandlers(in, out, this) AKKA STREAMS - BACKPRESSURE
  13. var lastPulled: Long = System.nanoTime() var lastPushed: Long = lastPulled

    private val backpressure = Kamon.histogram(label + "_backpressure") override def onPush(): Unit = { push(out, grab(in)) val now = System.nanoTime() backpressure.record((lastPulled - lastPushed) * 100 / now - lastPushed) lastPushed = now } override def onPull(): Unit = { pull(in) lastPulled = System.nanoTime() } setHandlers(in, out, this) AKKA STREAMS - BACKPRESSURE
  14. var lastPulled: Long = System.nanoTime() var lastPushed: Long = lastPulled

    private val backpressure = Kamon.histogram(label + "_backpressure") override def onPush(): Unit = { push(out, grab(in)) val now = System.nanoTime() backpressure.record((lastPulled - lastPushed) * 100 / now - lastPushed) lastPushed = now } override def onPull(): Unit = { pull(in) lastPulled = System.nanoTime() } setHandlers(in, out, this) AKKA STREAMS - BACKPRESSURE
  15. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) randomInt .via(backpressureMeter("produced")) .via(evenOnly) .via(backpressureMeter("processed")) .runWith(print) AKKA STREAMS - BACKPRESSURE
  16. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    evenOnly = Flow[Int].filter(_ % 2 == 0) val print = Sink.foreach(println) randomInt .via(backpressureMeter("produced")) .via(evenOnly) .via(backpressureMeter("processed")) .runWith(print) AKKA STREAMS - BACKPRESSURE
  17. val randomInt = Source.tick(10.millis, 10.millis, NotUsed) .map(_ ⇒ Random.nextInt()) val

    heavyEvenOnly = Flow[Int].mapAsync(parallelism = 1) { n => after(250.millis, system.scheduler)(Future.successful(n)) }.filter(_ % 2 == 0) val print = Sink.foreach(println) randomInt .via(backpressureMeter("produced")) .via(heavyEvenOnly) .via(backpressureMeter("processed")) .runWith(print) AKKA STREAMS - BACKPRESSURE