From aca8440817db881a48eaffc8220d637cf7d5a133 Mon Sep 17 00:00:00 2001 From: Stas Date: Thu, 10 Oct 2019 11:09:34 +0200 Subject: [PATCH 01/35] Introduce scalaLogging library for lazy logging features. --- .../com/ing/baker/types/Converters.scala | 9 +- build.sbt | 12 +- .../com/ing/baker/il/RecipeVisualStyle.scala | 27 ++- .../com/ing/baker/il/RecipeVisualizer.scala | 40 ++-- project/Dependencies.scala | 1 + .../ing/baker/runtime/akka/AkkaBaker.scala | 221 +++++++++--------- .../actor/ClusterBakerActorProvider.scala | 58 +++-- .../runtime/akka/actor/GracefulShutdown.scala | 21 +- .../actor/GracefulShutdownShardRegions.scala | 33 ++- .../SensoryEventResponseHandler.scala | 30 ++- .../ProcessInstanceRuntime.scala | 109 +++++---- .../BakerTypedProtobufSerializer.scala | 32 ++- .../baker/runtime/akka/BakerEventsSpec.scala | 45 ++-- .../runtime/akka/BakerExecutionSpec.scala | 21 +- .../baker/runtime/akka/BakerInquireSpec.scala | 9 +- .../recipe_manager/RecipeManagerSpec.scala | 12 +- .../akka/actor/downing/MajorityStrategy.scala | 8 +- 17 files changed, 325 insertions(+), 363 deletions(-) diff --git a/bakertypes/src/main/scala/com/ing/baker/types/Converters.scala b/bakertypes/src/main/scala/com/ing/baker/types/Converters.scala index ace84609..a2ec2db2 100644 --- a/bakertypes/src/main/scala/com/ing/baker/types/Converters.scala +++ b/bakertypes/src/main/scala/com/ing/baker/types/Converters.scala @@ -1,15 +1,12 @@ package com.ing.baker.types import com.typesafe.config.ConfigFactory -import org.slf4j.LoggerFactory - +import com.typesafe.scalalogging.LazyLogging import scala.collection.JavaConverters._ import scala.reflect.runtime.universe import scala.reflect.runtime.universe.TypeTag -object Converters { - - private val log = LoggerFactory.getLogger("com.ing.baker.types") +object Converters extends LazyLogging{ def loadDefaultModulesFromConfig(): Map[Class[_], TypeModule] = { val defaultConfig = ConfigFactory.load() @@ -32,7 +29,7 @@ object Converters { } catch { case e: Exception => - log.error("Failed to load type module: ", e) + logger.error("Failed to load type module: ", e) None } }.collect { diff --git a/build.sbt b/build.sbt index a07964c8..47ea11ed 100644 --- a/build.sbt +++ b/build.sbt @@ -60,7 +60,8 @@ lazy val bakertypes = project.in(file("bakertypes")) objenisis, scalapbRuntime, jodaTime, - scalaReflect(scalaVersion.value) + scalaReflect(scalaVersion.value), + scalaLogging ) ++ testDeps(scalaTest, scalaCheck, logback, scalaCheck) ) @@ -73,7 +74,8 @@ lazy val intermediateLanguage = project.in(file("intermediate-language")) slf4jApi, scalaGraphDot, objenisis, - typeSafeConfig + typeSafeConfig, + scalaLogging ) ++ testDeps(scalaTest, scalaCheck, logback) ).dependsOn(bakertypes) @@ -105,7 +107,8 @@ lazy val runtime = project.in(file("runtime")) protobufJava, kryo, kryoSerializers, - slf4jApi + slf4jApi, + scalaLogging ) ++ testDeps( akkaTestKit, akkaMultiNodeTestkit, @@ -138,7 +141,8 @@ lazy val splitBrainResolver = project.in(file("split-brain-resolver")) akkaActor, akkaCluster, akkaSlf4j, - ficusConfig + ficusConfig, + scalaLogging ) ++ testDeps( akkaTestKit, akkaMultiNodeTestkit, diff --git a/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualStyle.scala b/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualStyle.scala index 754501dc..306fbf93 100644 --- a/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualStyle.scala +++ b/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualStyle.scala @@ -1,13 +1,12 @@ package com.ing.baker.il -import com.ing.baker.il.RecipeVisualizer.log import com.typesafe.config.Config -import scalax.collection.io.dot.{DotAttr, DotAttrStmt, Elem} - +import com.typesafe.scalalogging.LazyLogging import scala.collection.JavaConverters._ import scalax.collection.io.dot.implicits._ +import scalax.collection.io.dot.{ DotAttr, DotAttrStmt, Elem } -object RecipeVisualStyle { +object RecipeVisualStyle extends LazyLogging { def default: RecipeVisualStyle = RecipeVisualStyle() @@ -16,7 +15,7 @@ object RecipeVisualStyle { val visualizationConfig = config.getConfig("baker.visualization") val configuredStyle = visualizationConfig.getString("style") val pickedStyle = if (!visualizationConfig.hasPath(s"styles.$configuredStyle")) { - log.warn(s"no configuration for recipe style '$configuredStyle' found, falling back to 'default' style") + logger.warn(s"no configuration for recipe style '$configuredStyle' found, falling back to 'default' style") "default" } else configuredStyle @@ -33,15 +32,15 @@ object RecipeVisualStyle { values .-("shape") // shape is not allowed to be overriden .map { - case (key, s: String) => Some(DotAttr(key, s)) - case (key, n: java.lang.Integer) => Some(DotAttr(key, n.intValue())) - case (key, n: java.lang.Long) => Some(DotAttr(key, n.longValue())) - case (key, n: java.lang.Float) => Some(DotAttr(key, n.floatValue())) - case (key, n: java.lang.Double) => Some(DotAttr(key, n.doubleValue())) - case (key, other) => - RecipeVisualizer.log.warn(s"unusable configuration: $key = $other"); - None - }.toList.flatten + case (key, s: String) => Some(DotAttr(key, s)) + case (key, n: java.lang.Integer) => Some(DotAttr(key, n.intValue())) + case (key, n: java.lang.Long) => Some(DotAttr(key, n.longValue())) + case (key, n: java.lang.Float) => Some(DotAttr(key, n.floatValue())) + case (key, n: java.lang.Double) => Some(DotAttr(key, n.doubleValue())) + case (key, other) => + RecipeVisualizer.logger.warn(s"unusable configuration: $key = $other") + None + }.toList.flatten } RecipeVisualStyle( diff --git a/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualizer.scala b/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualizer.scala index 8ee450a6..d5b5bd42 100644 --- a/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualizer.scala +++ b/intermediate-language/src/main/scala/com/ing/baker/il/RecipeVisualizer.scala @@ -3,18 +3,18 @@ package com.ing.baker.il import com.ing.baker.il.petrinet.Place._ import com.ing.baker.il.petrinet._ import com.ing.baker.petrinet.api._ -import com.typesafe.config.{Config, ConfigFactory} +import com.typesafe.scalalogging.{ LazyLogging, Logger } import org.slf4j.LoggerFactory +import scala.language.higherKinds import scalax.collection.Graph import scalax.collection.edge.WLDiEdge import scalax.collection.io.dot.implicits._ -import scalax.collection.io.dot.{DotAttr, _} - -import scala.language.higherKinds +import scalax.collection.io.dot.{ DotAttr, _ } object RecipeVisualizer { - val log = LoggerFactory.getLogger("com.ing.baker.il.RecipeVisualizer") + @transient + lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName)) type RecipePetriNetGraph = Graph[Either[Place, Transition], WLDiEdge] @@ -23,7 +23,7 @@ object RecipeVisualizer { def compactNode(node: RecipePetriNetGraph#NodeT): RecipePetriNetGraph = { // create direct edges from all incoming to outgoing nodes - val newEdges = node.incomingNodes.flatMap { incomingNode => + val newEdges = node.incomingNodes.flatMap {incomingNode => node.outgoingNodes.map(n => WLDiEdge[Node, String](incomingNode, n)(0, "")) } @@ -34,13 +34,13 @@ object RecipeVisualizer { def compactAllNodes(fn: RecipePetriNetGraph#NodeT => Boolean): RecipePetriNetGraph = graph.nodes.foldLeft(graph) { case (acc, node) if fn(node) => acc.compactNode(node) - case (acc, _) => acc + case (acc, _) => acc } } /** - * Returns the label for a node. - */ + * Returns the label for a node. + */ private def nodeLabelFn: Either[Place, Transition] ⇒ String = { case Left(Place(label, EmptyEventIngredientPlace)) ⇒ s"empty:${label}" case Left(place) ⇒ place.label @@ -49,8 +49,8 @@ object RecipeVisualizer { } /** - * Returns the style attributes for a node. - */ + * Returns the style attributes for a node. + */ private def nodeDotAttrFn(style: RecipeVisualStyle): (RecipePetriNetGraph#NodeT, Set[String], Set[String]) => List[DotAttr] = (node: RecipePetriNetGraph#NodeT, eventNames: Set[String], ingredientNames: Set[String]) ⇒ node.value match { @@ -90,10 +90,10 @@ object RecipeVisualizer { // specifies which places to compact (remove) val placesToCompact = (node: RecipePetriNetGraph#NodeT) => node.value match { - case Left(Place(_, IngredientPlace)) => false + case Left(Place(_, IngredientPlace)) => false case Left(Place(_, EmptyEventIngredientPlace)) => false - case Left(Place(_, EventOrPreconditionPlace)) => false - case Left(Place(_, _)) => true + case Left(Place(_, EventOrPreconditionPlace)) => false + case Left(Place(_, _)) => true case _ => false } @@ -118,22 +118,22 @@ object RecipeVisualizer { } def visualizeRecipe(recipe: CompiledRecipe, - style: RecipeVisualStyle, - filter: String => Boolean = _ => true, - eventNames: Set[String] = Set.empty, - ingredientNames: Set[String] = Set.empty): String = + style: RecipeVisualStyle, + filter: String => Boolean = _ => true, + eventNames: Set[String] = Set.empty, + ingredientNames: Set[String] = Set.empty): String = generateDot(recipe.petriNet.innerGraph, style, filter, eventNames, ingredientNames) def visualizePetriNet[P, T](graph: PetriNetGraph[P, T]): String = { val nodeLabelFn: Either[P, T] ⇒ String = node ⇒ node match { - case Left(p) ⇒ p.toString + case Left(p) ⇒ p.toString case Right(t) ⇒ t.toString } val nodeDotAttrFn: Either[P, T] => List[DotAttr] = node ⇒ node match { - case Left(_) ⇒ List(DotAttr("shape", "circle")) + case Left(_) ⇒ List(DotAttr("shape", "circle")) case Right(_) ⇒ List(DotAttr("shape", "square")) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..fdbac90c 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -82,6 +82,7 @@ object Dependencies { val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.25" val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.7.5" val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4" + val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.9.2" def scopeDeps(scope: String, modules: Seq[ModuleID]) = modules.map(m => m % scope) def compileDeps(modules: ModuleID*) = modules.toSeq diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/AkkaBaker.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/AkkaBaker.scala index 27a19635..b8975961 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/AkkaBaker.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/AkkaBaker.scala @@ -1,34 +1,31 @@ package com.ing.baker.runtime.akka -import akka.actor.{Actor, ActorRef, Props} -import akka.pattern.{FutureRef, ask} +import akka.actor.{ Actor, ActorRef, Props } +import akka.pattern.{ FutureRef, ask } import akka.util.Timeout import com.ing.baker.il._ import com.ing.baker.il.failurestrategy.ExceptionStrategyOutcome import com.ing.baker.runtime.akka.actor._ import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol._ -import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol.{Initialized, InstanceState, Uninitialized} +import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol.{ Initialized, InstanceState, Uninitialized } import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProtocol import com.ing.baker.runtime.common.BakerException._ import com.ing.baker.runtime.common.SensoryEventStatus import com.ing.baker.runtime.scaladsl._ import com.ing.baker.types.Value -import org.slf4j.{Logger, LoggerFactory} - +import com.typesafe.scalalogging.LazyLogging import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.language.postfixOps import scala.util.Try /** - * The Baker is the component of the Baker library that runs one or multiples recipes. - * For each recipe a new instance can be baked, sensory events can be send and state can be inquired upon - */ -class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { + * The Baker is the component of the Baker library that runs one or multiples recipes. + * For each recipe a new instance can be baked, sensory events can be send and state can be inquired upon + */ +class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker with LazyLogging { - import config.{materializer, system} - - private val log: Logger = LoggerFactory.getLogger(classOf[AkkaBaker]) + import config.{ materializer, system } val recipeManager: ActorRef = config.bakerActorProvider.createRecipeManagerActor() @@ -37,13 +34,13 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { config.bakerActorProvider.createProcessIndexActor(config.interactionManager, recipeManager) /** - * Adds a recipe to baker and returns a recipeId for the recipe. - * - * This function is idempotent, if the same (equal) recipe was added earlier this will return the same recipeId - * - * @param compiledRecipe The compiled recipe. - * @return A recipeId - */ + * Adds a recipe to baker and returns a recipeId for the recipe. + * + * This function is idempotent, if the same (equal) recipe was added earlier this will return the same recipeId + * + * @param compiledRecipe The compiled recipe. + * @return A recipeId + */ override def addRecipe(compiledRecipe: CompiledRecipe): Future[String] = { // check if every interaction has an implementation @@ -66,11 +63,11 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Returns the recipe information for the given RecipeId - * - * @param recipeId - * @return - */ + * Returns the recipe information for the given RecipeId + * + * @param recipeId + * @return + */ override def getRecipe(recipeId: String): Future[RecipeInformation] = { // here we ask the RecipeManager actor to return us the recipe for the given id recipeManager.ask(RecipeManagerProtocol.GetRecipe(recipeId))(config.defaultInquireTimeout).flatMap { @@ -82,24 +79,24 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Returns all recipes added to this baker instance. - * - * @return All recipes in the form of map of recipeId -> CompiledRecipe - */ + * Returns all recipes added to this baker instance. + * + * @return All recipes in the form of map of recipeId -> CompiledRecipe + */ override def getAllRecipes: Future[Map[String, RecipeInformation]] = recipeManager.ask(RecipeManagerProtocol.GetAllRecipes)(config.defaultInquireTimeout) .mapTo[RecipeManagerProtocol.AllRecipes] - .map(_.recipes.map { ri => + .map(_.recipes.map {ri => ri.compiledRecipe.recipeId -> RecipeInformation(ri.compiledRecipe, ri.timestamp, getImplementationErrors(ri.compiledRecipe)) }.toMap) /** - * Creates a process instance for the given recipeId with the given RecipeInstanceId as identifier - * - * @param recipeId The recipeId for the recipe to bake - * @param recipeInstanceId The identifier for the newly baked process - * @return - */ + * Creates a process instance for the given recipeId with the given RecipeInstanceId as identifier + * + * @param recipeId The recipeId for the recipe to bake + * @param recipeInstanceId The identifier for the newly baked process + * @return + */ override def bake(recipeId: String, recipeInstanceId: String): Future[Unit] = { processIndexActor.ask(CreateProcess(recipeId, recipeInstanceId))(config.defaultBakeTimeout).flatMap { case _: Initialized => @@ -232,44 +229,44 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Retries a blocked interaction. - * - * @return - */ + * Retries a blocked interaction. + * + * @return + */ override def retryInteraction(recipeInstanceId: String, interactionName: String): Future[Unit] = { processIndexActor.ask(RetryBlockedInteraction(recipeInstanceId, interactionName))(config.defaultProcessEventTimeout).map(_ => ()) } /** - * Resolves a blocked interaction by specifying it's output. - * - * !!! You should provide an event of the original interaction. Event / ingredient renames are done by Baker. - * - * @return - */ + * Resolves a blocked interaction by specifying it's output. + * + * !!! You should provide an event of the original interaction. Event / ingredient renames are done by Baker. + * + * @return + */ override def resolveInteraction(recipeInstanceId: String, interactionName: String, event: EventInstance): Future[Unit] = { processIndexActor.ask(ResolveBlockedInteraction(recipeInstanceId, interactionName, event))(config.defaultProcessEventTimeout).map(_ => ()) } /** - * Stops the retrying of an interaction. - * - * @return - */ + * Stops the retrying of an interaction. + * + * @return + */ override def stopRetryingInteraction(recipeInstanceId: String, interactionName: String): Future[Unit] = { processIndexActor.ask(StopRetryingInteraction(recipeInstanceId, interactionName))(config.defaultProcessEventTimeout).map(_ => ()) } /** - * Returns an index of all processes. - * - * Can potentially return a partial index when baker runs in cluster mode - * and not all shards can be reached within the given timeout. - * - * Does not include deleted processes. - * - * @return An index of all processes - */ + * Returns an index of all processes. + * + * Can potentially return a partial index when baker runs in cluster mode + * and not all shards can be reached within the given timeout. + * + * Does not include deleted processes. + * + * @return An index of all processes + */ override def getAllRecipeInstancesMetadata: Future[Set[RecipeInstanceMetadata]] = { Future.successful(config.bakerActorProvider .getAllProcessesMetadata(processIndexActor)(system, config.defaultInquireTimeout) @@ -277,11 +274,11 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Returns the process state. - * - * @param recipeInstanceId The process identifier - * @return The process state. - */ + * Returns the process state. + * + * @param recipeInstanceId The process identifier + * @return The process state. + */ override def getRecipeInstanceState(recipeInstanceId: String): Future[RecipeInstanceState] = processIndexActor .ask(GetProcessState(recipeInstanceId))(Timeout.durationToTimeout(config.defaultInquireTimeout)) @@ -292,38 +289,38 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Returns all provided ingredients for a given process id. - * - * @param recipeInstanceId The process id. - * @return The provided ingredients. - */ + * Returns all provided ingredients for a given process id. + * + * @param recipeInstanceId The process id. + * @return The provided ingredients. + */ override def getIngredients(recipeInstanceId: String): Future[Map[String, Value]] = getRecipeInstanceState(recipeInstanceId).map(_.ingredients) /** - * Returns all fired events for a given RecipeInstance id. - * - * @param recipeInstanceId The process id. - * @return The events - */ + * Returns all fired events for a given RecipeInstance id. + * + * @param recipeInstanceId The process id. + * @return The events + */ override def getEvents(recipeInstanceId: String): Future[Seq[EventMoment]] = getRecipeInstanceState(recipeInstanceId).map(_.events) /** - * Returns all names of fired events for a given RecipeInstance id. - * - * @param recipeInstanceId The process id. - * @return The event names - */ + * Returns all names of fired events for a given RecipeInstance id. + * + * @param recipeInstanceId The process id. + * @return The event names + */ override def getEventNames(recipeInstanceId: String): Future[Seq[String]] = getRecipeInstanceState(recipeInstanceId).map(_.eventNames) /** - * Returns the visual state (.dot) for a given process. - * - * @param recipeInstanceId The process identifier. - * @return A visual (.dot) representation of the process state. - */ + * Returns the visual state (.dot) for a given process. + * + * @param recipeInstanceId The process identifier. + * @return A visual (.dot) representation of the process state. + */ @throws[ProcessDeletedException]("If the process is already deleted") @throws[NoSuchProcessException]("If the process is not found") override def getVisualState(recipeInstanceId: String, style: RecipeVisualStyle = RecipeVisualStyle.default): Future[String] = { @@ -358,38 +355,38 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Registers a listener to all runtime events for recipes with the given name run in this baker instance. - * - * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality - */ + * Registers a listener to all runtime events for recipes with the given name run in this baker instance. + * + * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality + */ override def registerEventListener(recipeName: String, listenerFunction: (String, EventInstance) => Unit): Future[Unit] = doRegisterEventListener(listenerFunction, _ == recipeName) /** - * Registers a listener to all runtime events for all recipes that run in this Baker instance. - * - * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality - */ + * Registers a listener to all runtime events for all recipes that run in this Baker instance. + * + * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality + */ // @deprecated("Use event bus instead", "1.4.0") override def registerEventListener(listenerFunction: (String, EventInstance) => Unit): Future[Unit] = doRegisterEventListener(listenerFunction, _ => true) /** - * Registers a listener function that listens to all BakerEvents - * - * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality - * - * @param listenerFunction - * @return - */ + * Registers a listener function that listens to all BakerEvents + * + * Note that the delivery guarantee is *AT MOST ONCE*. Do not use it for critical functionality + * + * @param listenerFunction + * @return + */ override def registerBakerEventListener(listenerFunction: BakerEvent => Unit): Future[Unit] = { Future.successful { val listenerActor = system.actorOf(Props(new Actor() { override def receive: Receive = { case event: BakerEvent => Try { listenerFunction.apply(event) - }.failed.foreach { e => - log.warn(s"Listener function threw exception for event: $event", e) + }.failed.foreach {e => + logger.warn(s"Listener function threw exception for event: $event", e) } } })) @@ -398,26 +395,26 @@ class AkkaBaker private[runtime](config: AkkaBakerConfig) extends Baker { } /** - * Adds an interaction implementation to baker. - * - * This is assumed to be a an object with a method named 'apply' defined on it. - * - * @param implementation The implementation object - */ + * Adds an interaction implementation to baker. + * + * This is assumed to be a an object with a method named 'apply' defined on it. + * + * @param implementation The implementation object + */ override def addInteractionInstance(implementation: InteractionInstance): Future[Unit] = Future.successful(config.interactionManager.addImplementation(implementation)) /** - * Adds a sequence of interaction implementation to baker. - * - * @param implementations The implementation object - */ + * Adds a sequence of interaction implementation to baker. + * + * @param implementations The implementation object + */ override def addInteractionInstances(implementations: Seq[InteractionInstance]): Future[Unit] = Future.successful(implementations.foreach(addInteractionInstance)) /** - * Attempts to gracefully shutdown the baker system. - */ + * Attempts to gracefully shutdown the baker system. + */ override def gracefulShutdown: Future[Unit] = Future.successful(GracefulShutdown.gracefulShutdownActorSystem(system, config.defaultShutdownTimeout)) } diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/ClusterBakerActorProvider.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/ClusterBakerActorProvider.scala index bf9881be..ab6c482e 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/ClusterBakerActorProvider.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/ClusterBakerActorProvider.scala @@ -1,10 +1,10 @@ package com.ing.baker.runtime.akka.actor -import akka.actor.{ActorRef, ActorSystem, Address, PoisonPill} +import akka.actor.{ ActorRef, ActorSystem, Address, PoisonPill } import akka.cluster.Cluster import akka.cluster.sharding.ShardRegion._ -import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings, ShardRegion} -import akka.cluster.singleton.{ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings} +import akka.cluster.sharding.{ ClusterSharding, ClusterShardingSettings, ShardRegion } +import akka.cluster.singleton.{ ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings } import akka.stream.Materializer import akka.util.Timeout import cats.data.NonEmptyList @@ -14,38 +14,36 @@ import com.ing.baker.runtime.akka.actor.process_index.ProcessIndex.ActorMetadata import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol._ import com.ing.baker.runtime.akka.actor.process_index._ import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManager -import com.ing.baker.runtime.akka.actor.serialization.Encryption -import com.ing.baker.runtime.akka.actor.serialization.BakerSerializable +import com.ing.baker.runtime.akka.actor.serialization.{ BakerSerializable, Encryption } import com.ing.baker.runtime.akka.internal.InteractionManager -import org.slf4j.LoggerFactory - -import scala.concurrent.{Await, TimeoutException} +import com.typesafe.scalalogging.LazyLogging import scala.concurrent.duration._ +import scala.concurrent.{ Await, TimeoutException } object ClusterBakerActorProvider { case class GetShardIndex(entityId: String) extends BakerSerializable /** - * This function calculates the names of the ActorIndex actors - * gets the least significant bits of the UUID, and returns the MOD 10 - * So we have at most 10 manager actors created, all the petrinet actors will fall under these 10 actors - * Note, the nrOfShards used here has to be aligned with the nrOfShards used in the shardIdExtractor - */ + * This function calculates the names of the ActorIndex actors + * gets the least significant bits of the UUID, and returns the MOD 10 + * So we have at most 10 manager actors created, all the petrinet actors will fall under these 10 actors + * Note, the nrOfShards used here has to be aligned with the nrOfShards used in the shardIdExtractor + */ def entityId(recipeInstanceId: String, nrOfShards: Int): String = s"index-${Math.abs(sha256HashCode(recipeInstanceId) % nrOfShards)}" // extracts the actor id -> message from the incoming message // Entity id is the first character of the UUID def entityIdExtractor(nrOfShards: Int): ExtractEntityId = { - case msg:ProcessIndexMessage => (entityId(msg.recipeInstanceId, nrOfShards), msg) + case msg: ProcessIndexMessage => (entityId(msg.recipeInstanceId, nrOfShards), msg) case GetShardIndex(entityId) => (entityId, GetIndex) case msg => throw new IllegalArgumentException(s"Message of type ${msg.getClass} not recognized") } // extracts the shard id from the incoming message def shardIdExtractor(nrOfShards: Int): ExtractShardId = { - case msg:ProcessIndexMessage => Math.abs(sha256HashCode(msg.recipeInstanceId) % nrOfShards).toString + case msg: ProcessIndexMessage => Math.abs(sha256HashCode(msg.recipeInstanceId) % nrOfShards).toString case GetShardIndex(entityId) => entityId.split(s"index-").last case ShardRegion.StartEntity(entityId) => entityId.split(s"index-").last case msg => throw new IllegalArgumentException(s"Message of type ${msg.getClass} not recognized") @@ -55,28 +53,26 @@ object ClusterBakerActorProvider { } class ClusterBakerActorProvider( - nrOfShards: Int, - retentionCheckInterval: FiniteDuration, - actorIdleTimeout: Option[FiniteDuration], - journalInitializeTimeout: FiniteDuration, - seedNodes: NonEmptyList[Address], - ingredientsFilter: List[String], - configuredEncryption: Encryption - ) extends BakerActorProvider { + nrOfShards: Int, + retentionCheckInterval: FiniteDuration, + actorIdleTimeout: Option[FiniteDuration], + journalInitializeTimeout: FiniteDuration, + seedNodes: NonEmptyList[Address], + ingredientsFilter: List[String], + configuredEncryption: Encryption +) extends BakerActorProvider with LazyLogging { - private val log = LoggerFactory.getLogger(classOf[ClusterBakerActorProvider]) - - private def initializeCluster()(implicit actorSystem: ActorSystem) = { + private def initializeCluster()(implicit actorSystem: ActorSystem): Unit = { /** - * Join cluster after waiting for the persistenceInit actor, otherwise terminate here. - */ + * Join cluster after waiting for the persistenceInit actor, otherwise terminate here. + */ try { Await.result(Util.persistenceInit(journalInitializeTimeout), journalInitializeTimeout) } catch { case _: TimeoutException => throw new IllegalStateException(s"Timeout when trying to initialize the akka journal, waited $journalInitializeTimeout") } // join the cluster - log.info("PersistenceInit actor started successfully, joining cluster seed nodes {}", seedNodes) + logger.info("PersistenceInit actor started successfully, joining cluster seed nodes {}", seedNodes) Cluster.get(actorSystem).joinSeedNodes(seedNodes.toList) } @@ -109,13 +105,13 @@ class ClusterBakerActorProvider( actorSystem.actorOf(props = singletonProxyProps, name = "RecipeManagerProxy") } - def getAllProcessesMetadata(actor: ActorRef)(implicit system: ActorSystem, timeout: FiniteDuration) = { + def getAllProcessesMetadata(actor: ActorRef)(implicit system: ActorSystem, timeout: FiniteDuration): Seq[ActorMetadata] = { import akka.pattern.ask import system.dispatcher implicit val akkaTimeout: Timeout = timeout - val futures = (0 to nrOfShards).map { shard => actor.ask(GetShardIndex(s"index-$shard")).mapTo[Index].map(_.entries) } + val futures = (0 to nrOfShards).map {shard => actor.ask(GetShardIndex(s"index-$shard")).mapTo[Index].map(_.entries)} val collected: Seq[ActorMetadata] = Util.collectFuturesWithin(futures, timeout, system.scheduler).flatten collected diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdown.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdown.scala index 014cf821..23aa99cd 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdown.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdown.scala @@ -5,17 +5,18 @@ import akka.cluster.Cluster import akka.pattern.ask import akka.util.Timeout import com.ing.baker.runtime.akka.actor.GracefulShutdownShardRegions.InitiateGracefulShutdown +import com.typesafe.scalalogging.Logger import org.slf4j.LoggerFactory - import scala.concurrent.duration.FiniteDuration -import scala.concurrent.{Await, Promise, TimeoutException} -import scala.util.{Failure, Success, Try} +import scala.concurrent.{ Await, Promise, TimeoutException } +import scala.util.{ Failure, Success, Try } object GracefulShutdown { - val log = LoggerFactory.getLogger("com.ing.baker.runtime.core.actor.GracefulShutdown") + @transient + lazy val logger: Logger = Logger(LoggerFactory.getLogger(getClass.getName)) - def gracefulShutdownActorSystem(actorSystem: ActorSystem, timeout: FiniteDuration) = { + def gracefulShutdownActorSystem(actorSystem: ActorSystem, timeout: FiniteDuration): Any = { Try { Cluster.get(actorSystem) @@ -26,12 +27,12 @@ object GracefulShutdown { gracefulShutdownShards(Seq("ProcessIndexActor"))(Timeout(timeout), actorSystem) // then leave the cluster - log.warn("Leaving the akka cluster") + logger.warn("Leaving the akka cluster") val promise: Promise[Boolean] = Promise() cluster.registerOnMemberRemoved { - log.warn("Successfully left the akka cluster, terminating the actor system") + logger.warn("Successfully left the akka cluster, terminating the actor system") promise.success(true) actorSystem.terminate() } @@ -41,10 +42,10 @@ object GracefulShutdown { Await.result(promise.future, timeout) case Success(_) => - log.warn("Not a member of a cluster, terminating the actor system") + logger.warn("Not a member of a cluster, terminating the actor system") actorSystem.terminate() case Failure(exception) => - log.warn("Cluster not available for actor system", exception) + logger.warn("Cluster not available for actor system", exception) actorSystem.terminate() } } @@ -57,7 +58,7 @@ object GracefulShutdown { Await.result(actor.ask(InitiateGracefulShutdown), timeout.duration) } catch { case _: TimeoutException => - log.warn(s"Graceful shutdown of shards timed out after $timeout") + logger.warn(s"Graceful shutdown of shards timed out after $timeout") } } } diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdownShardRegions.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdownShardRegions.scala index cbe477b0..c812e16a 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdownShardRegions.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/GracefulShutdownShardRegions.scala @@ -1,10 +1,8 @@ package com.ing.baker.runtime.akka.actor import akka.actor._ -import akka.cluster.sharding.{ClusterSharding, ShardRegion} -import com.ing.baker.runtime.akka.actor.GracefulShutdownShardRegions.{GracefulShutdownSuccessful, GracefulShutdownTimedOut, InitiateGracefulShutdown} -import com.typesafe.config.Config - +import akka.cluster.sharding.{ ClusterSharding, ShardRegion } +import com.ing.baker.runtime.akka.actor.GracefulShutdownShardRegions.{ GracefulShutdownSuccessful, GracefulShutdownTimedOut, InitiateGracefulShutdown } import scala.collection._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -13,7 +11,9 @@ import scala.language.postfixOps object GracefulShutdownShardRegions { case object InitiateGracefulShutdown + case object GracefulShutdownTimedOut + case object GracefulShutdownSuccessful def props(shardHandOverTimeout: FiniteDuration, typeNames: Seq[String]): Props = @@ -21,23 +21,21 @@ object GracefulShutdownShardRegions { } class GracefulShutdownShardRegions(shardHandOverTimeout: FiniteDuration, typeNames: Seq[String]) extends Actor - with ActorLogging { + with ActorLogging { - val system = context.system + private val system = context.system // all the shard region actor refs - val shardRegionsRefs = typeNames.map(name => ClusterSharding(system).shardRegion(name)).toSet + private val shardRegionsRefs = typeNames.map(name => ClusterSharding(system).shardRegion(name)).toSet - implicit val ec: ExecutionContext = system.dispatcher + private implicit val ec: ExecutionContext = system.dispatcher - val config: Config = system.settings.config - - override def receive = waitForLeaveCommand(shardRegionsRefs) + override def receive: Receive = waitForLeaveCommand(shardRegionsRefs) def waitForLeaveCommand(regions: Set[ActorRef]): Receive = { case InitiateGracefulShutdown => - GracefulShutdown.log.warn(s"Initiating graceful shut down of shard regions: ${typeNames.mkString(",")}") - regions.foreach { region => + GracefulShutdown.logger.warn(s"Initiating graceful shut down of shard regions: ${typeNames.mkString(",")}") + regions.foreach {region => context watch region region ! ShardRegion.GracefulShutdown } @@ -46,16 +44,17 @@ class GracefulShutdownShardRegions(shardHandOverTimeout: FiniteDuration, typeNam } def waitingForTermination(regions: Set[ActorRef], initiator: ActorRef): Receive = { - case GracefulShutdownTimedOut => - GracefulShutdown.log.warn(s"Graceful shutdown of shard regions timed out after $shardHandOverTimeout") + case GracefulShutdownTimedOut => + GracefulShutdown.logger.warn(s"Graceful shutdown of shard regions timed out after $shardHandOverTimeout") context.stop(self) case Terminated(region) => val newRegions = regions - region if (newRegions.isEmpty) { - GracefulShutdown.log.warn("Graceful shutdown of shard regions successful") + GracefulShutdown.logger.warn("Graceful shutdown of shard regions successful") initiator ! GracefulShutdownSuccessful context.stop(self) - } else + } else { context.become(waitingForTermination(newRegions, initiator)) + } } } diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_index/SensoryEventResponseHandler.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_index/SensoryEventResponseHandler.scala index 9ca78d79..0b6e5dcf 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_index/SensoryEventResponseHandler.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_index/SensoryEventResponseHandler.scala @@ -1,14 +1,13 @@ package com.ing.baker.runtime.akka.actor.process_index -import akka.actor.{Actor, ActorRef, Props, ReceiveTimeout} +import akka.actor.{ Actor, ActorLogging, ActorRef, Props, ReceiveTimeout } import com.ing.baker.il.CompiledRecipe -import com.ing.baker.runtime.scaladsl.{EventInstance, EventReceived, EventRejected, SensoryEventResult, RecipeInstanceState} -import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol.{FireSensoryEventReaction, FireSensoryEventRejection, ProcessEvent, ProcessEventCompletedResponse, ProcessEventReceivedResponse} +import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol._ import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol._ import com.ing.baker.runtime.common.SensoryEventStatus -import com.ing.baker.types.{PrimitiveValue, Value} -import org.slf4j.{Logger, LoggerFactory} +import com.ing.baker.runtime.scaladsl.{ EventInstance, EventReceived, EventRejected, SensoryEventResult } +import com.ing.baker.types.{ PrimitiveValue, Value } object SensoryEventResponseHandler { @@ -17,17 +16,16 @@ object SensoryEventResponseHandler { } /** - * An actor which builds the response to fireSensoryEvent* requests - * - Obtains the data from the process instance (by accumulating transition firing outcomes) - * - Publishes events to the system event stream - * - Does involving logging - */ -class SensoryEventResponseHandler(receiver: ActorRef, command: ProcessEvent, ingredientsFilter: Seq[String]) extends Actor { + * An actor which builds the response to fireSensoryEvent* requests + * - Obtains the data from the process instance (by accumulating transition firing outcomes) + * - Publishes events to the system event stream + * - Does involving logging + */ +class SensoryEventResponseHandler(receiver: ActorRef, command: ProcessEvent, ingredientsFilter: Seq[String]) + extends Actor with ActorLogging { context.setReceiveTimeout(command.timeout) - val log: Logger = LoggerFactory.getLogger(classOf[SensoryEventResponseHandler]) - val waitForRetries: Boolean = command.reaction match { case FireSensoryEventReaction.NotifyWhenReceived => false case FireSensoryEventReaction.NotifyWhenCompleted(waitForRetries0) => waitForRetries0 @@ -154,15 +152,15 @@ class SensoryEventResponseHandler(receiver: ActorRef, command: ProcessEvent, ing case FireSensoryEventReaction.NotifyOnEvent(_, onEvent) if Option(cache.head.asInstanceOf[TransitionFired].output.asInstanceOf[EventInstance]).exists(_.name == onEvent) => notifyComplete(cache.reverse) - PartialFunction { _ => () } + PartialFunction {_ => ()} case FireSensoryEventReaction.NotifyWhenCompleted(_) if runningJobs.isEmpty => notifyComplete(cache.reverse) - PartialFunction { _ => () } + PartialFunction {_ => ()} case FireSensoryEventReaction.NotifyBoth(_, _) if runningJobs.isEmpty => notifyComplete(cache.reverse) - PartialFunction { _ => () } + PartialFunction {_ => ()} case _ => PartialFunction { diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceRuntime.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceRuntime.scala index 99f639a2..7abf4241 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceRuntime.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceRuntime.scala @@ -1,15 +1,14 @@ package com.ing.baker.runtime.akka.actor.process_instance -import java.io.{PrintWriter, StringWriter} - import cats.data.State import cats.effect.IO import com.ing.baker.petrinet.api._ import com.ing.baker.runtime.akka._ import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceEventSourcing._ import com.ing.baker.runtime.akka.actor.process_instance.internal.ExceptionStrategy.BlockTransition -import com.ing.baker.runtime.akka.actor.process_instance.internal.{ExceptionStrategy, Instance, Job} -import org.slf4j.LoggerFactory +import com.ing.baker.runtime.akka.actor.process_instance.internal.{ ExceptionStrategy, Instance, Job } +import com.typesafe.scalalogging.LazyLogging +import java.io.{ PrintWriter, StringWriter } /** * Encapsulates all components required to 'run' a petri net instance @@ -19,56 +18,54 @@ import org.slf4j.LoggerFactory * @tparam S The state type * @tparam E The event type */ -trait ProcessInstanceRuntime[P, T, S, E] { - - val log = LoggerFactory.getLogger("com.ing.baker.runtime.core.actor.process_instance.ProcessInstanceRuntime") +trait ProcessInstanceRuntime[P, T, S, E] extends LazyLogging { /** - * The event source function for the state associated with a process instance. - * - * By default the identity function is used. - */ - val eventSource: T ⇒ (S ⇒ E ⇒ S) = _ ⇒ (s ⇒ _ ⇒ s) + * The event source function for the state associated with a process instance. + * + * By default the identity function is used. + */ + val eventSource: T ⇒ S ⇒ E ⇒ S = _ ⇒ s ⇒ _ ⇒ s /** - * This function is called when a transition throws an exception. - * - * By default the transition is blocked. - */ + * This function is called when a transition throws an exception. + * + * By default the transition is blocked. + */ def handleException(job: Job[P, T, S])(throwable: Throwable, failureCount: Int, startTime: Long, outMarking: MultiSet[P]): ExceptionStrategy = BlockTransition /** - * Returns the task that should be executed for a transition. - */ + * Returns the task that should be executed for a transition. + */ def transitionTask(petriNet: PetriNet[P, T], t: T)(marking: Marking[P], state: S, input: Any): IO[(Marking[P], E)] /** - * Checks if a transition is automatically 'fireable' by the runtime (not triggered by some outside input). - * - * By default, cold transitions (without in adjacent places) are not auto fireable. - */ - def isAutoFireable(instance: Instance[P, T, S], t: T): Boolean = !instance.petriNet.incomingPlaces(t).isEmpty + * Checks if a transition is automatically 'fireable' by the runtime (not triggered by some outside input). + * + * By default, cold transitions (without in adjacent places) are not auto fireable. + */ + def isAutoFireable(instance: Instance[P, T, S], t: T): Boolean = instance.petriNet.incomingPlaces(t).nonEmpty /** - * Defines which tokens from a marking for a particular place are consumable by a transition. - * - * By default ALL tokens from that place are consumable. - * - * You can override this for example in case you use a colored (data) petri net model with filter rules on the edges. - */ + * Defines which tokens from a marking for a particular place are consumable by a transition. + * + * By default ALL tokens from that place are consumable. + * + * You can override this for example in case you use a colored (data) petri net model with filter rules on the edges. + */ def consumableTokens(petriNet: PetriNet[P, T])(marking: Marking[P], p: P, t: T): MultiSet[Any] = marking.getOrElse(p, MultiSet.empty) /** - * Takes a Job specification, executes it and returns a TransitionEvent (asychronously using cats.effect.IO) - * - * TODO - * - * The use of cats.effect.IO is not really necessary at this point. It was mainly chosen to support cancellation in - * the future: https://typelevel.org/cats-effect/datatypes/io.html#cancelable-processes - * - * However, since that is not used this can be refactored to a simple function: Job -> TransitionEvent - * - */ + * Takes a Job specification, executes it and returns a TransitionEvent (asychronously using cats.effect.IO) + * + * TODO + * + * The use of cats.effect.IO is not really necessary at this point. It was mainly chosen to support cancellation in + * the future: https://typelevel.org/cats-effect/datatypes/io.html#cancelable-processes + * + * However, since that is not used this can be refactored to a simple function: Job -> TransitionEvent + * + */ def jobExecutor(topology: PetriNet[P, T])(implicit transitionIdentifier: Identifiable[T], placeIdentifier: Identifiable[P]): Job[P, T, S] ⇒ IO[TransitionEvent] = { def exceptionStackTrace(e: Throwable): String = { @@ -83,7 +80,7 @@ trait ProcessInstanceRuntime[P, T, S, E] { val transition = job.transition val consumed: Marking[Id] = job.consume.marshall - IO.unit.flatMap { _ => + IO.unit.flatMap {_ => // calling transitionTask(...) could potentially throw an exception // TODO I don't believe the last statement is true transitionTask(topology, transition)(job.consume, job.processState, job.input) @@ -99,7 +96,7 @@ trait ProcessInstanceRuntime[P, T, S, E] { }.handleException { // If an exception was thrown while computing the failure strategy we block the interaction from firing case e: Throwable => - log.error(s"Exception while handling transition failure", e) + logger.error(s"Exception while handling transition failure", e) TransitionFailedEvent(job.id, transition.getId, job.correlationId, startTime, System.currentTimeMillis(), consumed, job.input, exceptionStackTrace(e), ExceptionStrategy.BlockTransition) } } @@ -115,7 +112,7 @@ trait ProcessInstanceRuntime[P, T, S, E] { } // check if any any places have an insufficient number of tokens - if (consumable.exists { case (_, count, tokens) ⇒ tokens.multisetSize < count }) + if (consumable.exists {case (_, count, tokens) ⇒ tokens.multisetSize < count}) Seq.empty else { val consume = consumable.map { @@ -128,21 +125,21 @@ trait ProcessInstanceRuntime[P, T, S, E] { } /** - * Checks whether a transition is 'enabled' in a marking. - */ + * Checks whether a transition is 'enabled' in a marking. + */ def isEnabled(petriNet: PetriNet[P, T])(marking: Marking[P], t: T): Boolean = consumableMarkings(petriNet)(marking, t).nonEmpty /** - * Returns all enabled transitions for a marking. - */ + * Returns all enabled transitions for a marking. + */ def enabledTransitions(petriNet: PetriNet[P, T])(marking: Marking[P]): Iterable[T] = petriNet.transitions.filter(t ⇒ consumableMarkings(petriNet)(marking, t).nonEmpty) /** - * Creates a job for a specific transition with input, computes the marking it should consume - */ + * Creates a job for a specific transition with input, computes the marking it should consume + */ def createJob(transition: T, input: Any, correlationId: Option[String] = None): State[Instance[P, T, S], Either[String, Job[P, T, S]]] = - State { instance ⇒ + State {instance ⇒ if (instance.isBlocked(transition)) (instance, Left("Transition is blocked by a previous failure")) else @@ -157,11 +154,11 @@ trait ProcessInstanceRuntime[P, T, S, E] { } /** - * Finds the (optional) first transition that is enabled & automatically fireable - */ - def firstEnabledJob: State[Instance[P, T, S], Option[Job[P, T, S]]] = State { instance ⇒ + * Finds the (optional) first transition that is enabled & automatically fireable + */ + def firstEnabledJob: State[Instance[P, T, S], Option[Job[P, T, S]]] = State {instance ⇒ enabledParameters(instance.petriNet)(instance.availableMarking).find { - case (t, markings) ⇒ !instance.isBlocked(t) && isAutoFireable(instance, t) + case (t, _) ⇒ !instance.isBlocked(t) && isAutoFireable(instance, t) }.map { case (t, markings) ⇒ val job = Job[P, T, S](instance.nextJobId(), None, instance.state, t, markings.head, null) @@ -170,11 +167,11 @@ trait ProcessInstanceRuntime[P, T, S, E] { } /** - * Finds all automated enabled transitions. - */ + * Finds all automated enabled transitions. + */ def allEnabledJobs: State[Instance[P, T, S], Set[Job[P, T, S]]] = firstEnabledJob.flatMap { - case None ⇒ State.pure(Set.empty) + case None ⇒ State.pure(Set.empty) case Some(job) ⇒ allEnabledJobs.map(_ + job) } } \ No newline at end of file diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/serialization/BakerTypedProtobufSerializer.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/serialization/BakerTypedProtobufSerializer.scala index 47a17f74..ebbc293b 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/serialization/BakerTypedProtobufSerializer.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/actor/serialization/BakerTypedProtobufSerializer.scala @@ -2,27 +2,24 @@ package com.ing.baker.runtime.akka.actor.serialization import akka.actor.ExtendedActorSystem import akka.serialization.SerializerWithStringManifest -import com.ing.baker.{il, runtime} import com.ing.baker.runtime.akka.actor.ClusterBakerActorProvider import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProto._ -import com.ing.baker.runtime.akka.actor.process_index.{ProcessIndex, ProcessIndexProtocol} +import com.ing.baker.runtime.akka.actor.process_index.{ ProcessIndex, ProcessIndexProtocol } import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProto._ import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProto._ -import com.ing.baker.runtime.akka.actor.recipe_manager.{RecipeManager, RecipeManagerProtocol} +import com.ing.baker.runtime.akka.actor.recipe_manager.{ RecipeManager, RecipeManagerProtocol } import com.ing.baker.runtime.akka.actor.serialization.BakerTypedProtobufSerializer.BinarySerializable -import org.slf4j.LoggerFactory - +import com.ing.baker.{ il, runtime } +import com.typesafe.scalalogging.LazyLogging import scala.reflect.ClassTag import scala.util.Try object BakerTypedProtobufSerializer { - private val log = LoggerFactory.getLogger(classOf[BakerTypedProtobufSerializer]) - /** Hardcoded serializerId for this serializer. This should not conflict with other serializers. - * Values from 0 to 40 are reserved for Akka internal usage. - */ + * Values from 0 to 40 are reserved for Akka internal usage. + */ val identifier = 101 def entries(implicit ev0: SerializersProvider): List[BinarySerializable] = @@ -43,7 +40,7 @@ object BakerTypedProtobufSerializer { ) def processIndexEntries(implicit ev0: SerializersProvider): List[BinarySerializable] = - List ( + List( forType[ClusterBakerActorProvider.GetShardIndex] .register("ProcessIndex.GetShardIndex"), forType[ProcessIndex.ActorCreated] @@ -100,8 +97,8 @@ object BakerTypedProtobufSerializer { .register("ProcessIndexProtocol.FireSensoryEventRejection.FiringLimitMet") ) - def processInstanceEntries(implicit ev0: SerializersProvider): List[BinarySerializable] = - List( + def processInstanceEntries(implicit ev0: SerializersProvider): List[BinarySerializable] = + List( forType[ProcessInstanceProtocol.Stop] .register("ProcessInstanceProtocol.Stop"), forType[ProcessInstanceProtocol.GetState.type] @@ -195,7 +192,7 @@ object BakerTypedProtobufSerializer { def toBinary(a: Type): Array[Byte] // The actor resolver is commented for future Akka Typed implementation - def fromBinary(binary: Array[Byte]/*, resolver: ActorRefResolver*/): Try[Type] + def fromBinary(binary: Array[Byte] /*, resolver: ActorRefResolver*/): Try[Type] def isInstance(o: AnyRef): Boolean = tag.isInstance(o) @@ -204,13 +201,14 @@ object BakerTypedProtobufSerializer { toBinary(a.asInstanceOf[Type]) // The actor resolver is commented for future Akka Typed implementation - def fromBinaryAnyRef(binary: Array[Byte]/*, resolver: ActorRefResolver*/): Try[AnyRef] = + def fromBinaryAnyRef(binary: Array[Byte] /*, resolver: ActorRefResolver*/): Try[AnyRef] = fromBinary(binary) } + } -class BakerTypedProtobufSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { +class BakerTypedProtobufSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest with LazyLogging { implicit def serializersProvider: SerializersProvider = SerializersProvider(system, system.provider) @@ -238,8 +236,8 @@ class BakerTypedProtobufSerializer(system: ExtendedActorSystem) extends Serializ .map(_.fromBinaryAnyRef(bytes)) .getOrElse(throw new IllegalStateException(s"Unsupported object with manifest $manifest")) .fold( - { e => BakerTypedProtobufSerializer.log.error(s"Failed to deserialize bytes with manifest $manifest", e); throw e }, - identity + {e => logger.error(s"Failed to deserialize bytes with manifest $manifest", e); throw e}, + identity ) } diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerEventsSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerEventsSpec.scala index f520fc7d..85852d5d 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerEventsSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerEventsSpec.scala @@ -1,33 +1,28 @@ package com.ing.baker.runtime.akka -import java.util.UUID - import akka.actor.ActorRef -import akka.persistence.inmemory.extension.{InMemoryJournalStorage, StorageExtension} +import akka.persistence.inmemory.extension.{ InMemoryJournalStorage, StorageExtension } import akka.testkit.TestProbe import com.ing.baker._ import com.ing.baker.recipe.TestRecipe._ import com.ing.baker.recipe.common.InteractionFailureStrategy import com.ing.baker.recipe.scaladsl.Recipe import com.ing.baker.runtime.common.RejectReason._ -import com.ing.baker.runtime.scaladsl._ -import com.ing.baker.runtime.scaladsl.EventInstance +import com.ing.baker.runtime.scaladsl.{ EventInstance, _ } import com.ing.baker.types.PrimitiveValue -import org.slf4j.LoggerFactory - +import com.typesafe.scalalogging.LazyLogging +import java.util.UUID import scala.concurrent.Future import scala.concurrent.duration._ import scala.language.postfixOps -object BakerEventsSpec { - - val log = LoggerFactory.getLogger(classOf[BakerEventsSpec]) +object BakerEventsSpec extends LazyLogging { def listenerFunction(probe: ActorRef, logEvents: Boolean = false): PartialFunction[BakerEvent, Unit] = { case event: BakerEvent => if (logEvents) { - log.info("Listener consumed event {}", event) + logger.info("Listener consumed event {}", event) } probe ! event } @@ -76,9 +71,7 @@ class BakerEventsSpec extends BakerRuntimeTestBase { override def actorSystemName = "BakerEventsSpec" - val log = LoggerFactory.getLogger(classOf[BakerEventsSpec]) - - val eventReceiveTimeout = 1 seconds + private val eventReceiveTimeout = 1 seconds before { resetMocks @@ -104,18 +97,18 @@ class BakerEventsSpec extends BakerRuntimeTestBase { _ <- baker.fireEventAndResolveWhenCompleted(recipeInstanceId, EventInstance.unsafeFrom(InitialEvent(initialIngredientValue)), "someId") // TODO check the order of the timestamps later _ = expectMsgInAnyOrderPF(listenerProbe, - { case msg@RecipeInstanceCreated(_, `recipeId`, `recipeName`, `recipeInstanceId`) => msg }, - { case msg@EventReceived(_, _, _, `recipeInstanceId`, Some("someId"), EventInstance("InitialEvent", ingredients)) if ingredients == Map("initialIngredient" -> PrimitiveValue(`initialIngredientValue`)) => msg }, - { case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "SieveInteraction") => msg }, - { case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionOne") => msg }, - { case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionTwo") => msg }, - { case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionThree") => msg }, - { case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "ProvidesNothingInteraction") => msg }, - { case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionOne", Some(EventInstance("InteractionOneSuccessful", ingredients))) if ingredients == Map("interactionOneIngredient" -> PrimitiveValue("interactionOneIngredient")) => msg }, - { case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionTwo", Some(EventInstance("EventFromInteractionTwo", ingredients))) if ingredients == Map("interactionTwoIngredient" -> PrimitiveValue("interactionTwoIngredient")) => msg }, - { case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionThree", Some(EventInstance("InteractionThreeSuccessful", ingredients))) if ingredients == Map("interactionThreeIngredient" -> PrimitiveValue("interactionThreeIngredient")) => msg }, - { case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "ProvidesNothingInteraction", None) => msg }, - { case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "SieveInteraction", Some(EventInstance("SieveInteractionSuccessful", ingredients))) if ingredients == Map("sievedIngredient" -> PrimitiveValue("sievedIngredient")) => msg } + {case msg@RecipeInstanceCreated(_, `recipeId`, `recipeName`, `recipeInstanceId`) => msg}, + {case msg@EventReceived(_, _, _, `recipeInstanceId`, Some("someId"), EventInstance("InitialEvent", ingredients)) if ingredients == Map("initialIngredient" -> PrimitiveValue(`initialIngredientValue`)) => msg}, + {case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "SieveInteraction") => msg}, + {case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionOne") => msg}, + {case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionTwo") => msg}, + {case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "InteractionThree") => msg}, + {case msg@InteractionStarted(_, _, _, `recipeInstanceId`, "ProvidesNothingInteraction") => msg}, + {case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionOne", Some(EventInstance("InteractionOneSuccessful", ingredients))) if ingredients == Map("interactionOneIngredient" -> PrimitiveValue("interactionOneIngredient")) => msg}, + {case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionTwo", Some(EventInstance("EventFromInteractionTwo", ingredients))) if ingredients == Map("interactionTwoIngredient" -> PrimitiveValue("interactionTwoIngredient")) => msg}, + {case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "InteractionThree", Some(EventInstance("InteractionThreeSuccessful", ingredients))) if ingredients == Map("interactionThreeIngredient" -> PrimitiveValue("interactionThreeIngredient")) => msg}, + {case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "ProvidesNothingInteraction", None) => msg}, + {case msg@InteractionCompleted(_, _, _, _, `recipeInstanceId`, "SieveInteraction", Some(EventInstance("SieveInteractionSuccessful", ingredients))) if ingredients == Map("sievedIngredient" -> PrimitiveValue("sievedIngredient")) => msg} ) _ = listenerProbe.expectNoMessage(eventReceiveTimeout) } yield succeed diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerExecutionSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerExecutionSpec.scala index 07988532..d34572b3 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerExecutionSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerExecutionSpec.scala @@ -1,29 +1,26 @@ package com.ing.baker.runtime.akka -import java.util.concurrent.TimeUnit -import java.util.{Optional, UUID} - import akka.actor.ActorSystem -import akka.persistence.inmemory.extension.{InMemoryJournalStorage, StorageExtension} +import akka.persistence.inmemory.extension.{ InMemoryJournalStorage, StorageExtension } import akka.stream.ActorMaterializer -import akka.testkit.{TestDuration, TestKit, TestProbe} +import akka.testkit.{ TestDuration, TestKit, TestProbe } import com.ing.baker._ import com.ing.baker.compiler.RecipeCompiler import com.ing.baker.recipe.TestRecipe._ import com.ing.baker.recipe.common.InteractionFailureStrategy import com.ing.baker.recipe.common.InteractionFailureStrategy.FireEventAfterFailure -import com.ing.baker.recipe.scaladsl.{Event, Ingredient, Interaction, Recipe} +import com.ing.baker.recipe.scaladsl.{ Event, Ingredient, Interaction, Recipe } import com.ing.baker.runtime.common.BakerException._ import com.ing.baker.runtime.common._ -import com.ing.baker.runtime.scaladsl.{Baker, EventInstance, InteractionInstance} -import com.ing.baker.types.{CharArray, Int32, PrimitiveValue} +import com.ing.baker.runtime.scaladsl.{ Baker, EventInstance, InteractionInstance } +import com.ing.baker.types.{ CharArray, Int32, PrimitiveValue } import com.typesafe.config.ConfigFactory -import org.mockito.Matchers.{eq => mockitoEq, _} +import java.util.concurrent.TimeUnit +import java.util.{ Optional, UUID } +import org.mockito.Matchers.{ eq => mockitoEq, _ } import org.mockito.Mockito._ import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer -import org.slf4j.LoggerFactory - import scala.concurrent.Future import scala.concurrent.duration._ import scala.language.postfixOps @@ -34,8 +31,6 @@ class BakerExecutionSpec extends BakerRuntimeTestBase { override def actorSystemName = "BakerExecutionSpec" - val log = LoggerFactory.getLogger(classOf[BakerExecutionSpec]) - before { resetMocks setupMockResponse() diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerInquireSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerInquireSpec.scala index 02b27315..5d1f4be4 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerInquireSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/BakerInquireSpec.scala @@ -1,23 +1,16 @@ package com.ing.baker.runtime.akka -import akka.persistence.inmemory.extension.{InMemoryJournalStorage, StorageExtension} +import akka.persistence.inmemory.extension.{ InMemoryJournalStorage, StorageExtension } import akka.testkit.TestProbe import com.ing.baker._ import com.ing.baker.compiler.RecipeCompiler -import com.ing.baker.il.CompiledRecipe import com.ing.baker.recipe.TestRecipe.getRecipe -import com.ing.baker.runtime.common.RecipeInformation -import org.slf4j.LoggerFactory - import scala.language.postfixOps class BakerInquireSpec extends BakerRuntimeTestBase { override def actorSystemName = "BakerInquireSpec" - val log = LoggerFactory.getLogger(classOf[BakerInquireSpec]) - - before { resetMocks setupMockResponse() diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/recipe_manager/RecipeManagerSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/recipe_manager/RecipeManagerSpec.scala index b5f1f716..cc3d8806 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/recipe_manager/RecipeManagerSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/recipe_manager/RecipeManagerSpec.scala @@ -1,15 +1,13 @@ package com.ing.baker.runtime.akka.actor.recipe_manager -import java.util.UUID - import akka.actor.ActorRef import akka.pattern.ask import com.ing.baker.BakerRuntimeTestBase import com.ing.baker.compiler.RecipeCompiler import com.ing.baker.recipe.TestRecipe import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProtocol._ -import com.typesafe.config.{Config, ConfigFactory} -import org.slf4j.LoggerFactory +import com.typesafe.config.{ Config, ConfigFactory } +import java.util.UUID object RecipeManagerSpec { val config: Config = ConfigFactory.parseString( @@ -20,16 +18,14 @@ object RecipeManagerSpec { """.stripMargin) } -class RecipeManagerSpec extends BakerRuntimeTestBase { +class RecipeManagerSpec extends BakerRuntimeTestBase { override def actorSystemName = "RecipeManagerSpec" - val log = LoggerFactory.getLogger(classOf[RecipeManagerSpec]) - "The RecipeManagerSpec" should { "Add a recipe to the list when a AddRecipe message is received" in { val compiledRecipe = RecipeCompiler.compileRecipe(TestRecipe.getRecipe("AddRecipeRecipe")) - val recipeManager: ActorRef = defaultActorSystem.actorOf(RecipeManager.props(), s"recipeManager-${UUID.randomUUID().toString}") + val recipeManager: ActorRef = defaultActorSystem.actorOf(RecipeManager.props(), s"recipeManager-${UUID.randomUUID().toString}") for { futureAddResult <- recipeManager.ask(AddRecipe(compiledRecipe))(timeout) diff --git a/split-brain-resolver/src/main/scala/com/ing/baker/runtime/akka/actor/downing/MajorityStrategy.scala b/split-brain-resolver/src/main/scala/com/ing/baker/runtime/akka/actor/downing/MajorityStrategy.scala index 81011e0c..2f2fc98b 100644 --- a/split-brain-resolver/src/main/scala/com/ing/baker/runtime/akka/actor/downing/MajorityStrategy.scala +++ b/split-brain-resolver/src/main/scala/com/ing/baker/runtime/akka/actor/downing/MajorityStrategy.scala @@ -1,17 +1,15 @@ package com.ing.baker.runtime.akka.actor.downing import akka.cluster.UniqueAddress -import org.slf4j.LoggerFactory +import com.typesafe.scalalogging.LazyLogging -class MajorityStrategy extends Strategy { - - private val log = LoggerFactory.getLogger(classOf[MajorityStrategy]) +class MajorityStrategy extends Strategy with LazyLogging { override def sbrDecision(clusterHelper: ClusterHelper): Unit = { if (clusterHelper.amIMember && clusterHelper.amILeader && clusterHelper.unreachables.nonEmpty) { val nodesToDown = this.nodesToDown(clusterHelper) - log.info(s"SplitBrainResolver: ${clusterHelper.myUniqueAddress} downing these nodes $nodesToDown") + logger.info(s"SplitBrainResolver: ${clusterHelper.myUniqueAddress} downing these nodes $nodesToDown") if (nodesToDown contains clusterHelper.myUniqueAddress) { // leader going down clusterHelper.downSelf() From a22a667b32ecee47f570adfe9cae9c5f5486ce71 Mon Sep 17 00:00:00 2001 From: Sander Dijkhuis Date: Tue, 15 Oct 2019 18:17:02 +0200 Subject: [PATCH 02/35] docs: document RuntimeEvent access removal in v3 Based on an explanation by @Tim-Linschoten: https://github.com/ing-bank/baker/issues/287#issuecomment-542261760 --- docs/sections/versions/baker-3-release-notes.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/sections/versions/baker-3-release-notes.md b/docs/sections/versions/baker-3-release-notes.md index 7c597bac..13d93a7e 100644 --- a/docs/sections/versions/baker-3-release-notes.md +++ b/docs/sections/versions/baker-3-release-notes.md @@ -221,4 +221,12 @@ In this release we have removed sieves completely. In JBaker we supported using a UUID as a processId. In this release this has been removed completely and we accept Strings. This is again in line with being clearer what Baker does. -Internally we were just transforming this to a String. \ No newline at end of file +Internally we were just transforming this to a String. + +### Accesss to RuntimeEvents +Version 2 would provided undocumented public interfaces returning RuntimeEvent instances. +In version 3, the possibility to get Ingredients provided by a specific Event is gone. +Users should not care where ingredients are provided from. +This could be from an SensoryEvent or as output of a Event from an Interaction. +This should not matter, only if the ingredient is available should matter. +This allow users to flexibly recipes without impacting client code. From 2bb0d6119dfe5a07eec82dd208be24c46f275681 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:55:18 +0100 Subject: [PATCH 03/35] Update sbt-assembly to 0.14.10 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d93b9ec7..7e49d1c8 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,7 @@ addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0") From b19ea8ae3318abf9562a2fe2fc94ec069592c7cf Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:55:30 +0100 Subject: [PATCH 04/35] Update akka-persistence-inmemory to 2.5.15.2 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..968866c0 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -12,7 +12,7 @@ object Dependencies { val typeSafeConfig = "com.typesafe" % "config" % "1.3.1" - val akkaInmemoryJournal = ("com.github.dnvriend" %% "akka-persistence-inmemory" % "2.5.1.1") + val akkaInmemoryJournal = ("com.github.dnvriend" %% "akka-persistence-inmemory" % "2.5.15.2") .exclude("com.typesafe.akka", "akka-actor") .exclude("com.typesafe.akka", "akka-persistence") .exclude("com.typesafe.akka", "akka-persistence-query") From 08c393af24e3c89415c919fa06072dceb4b0579d Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:55:42 +0100 Subject: [PATCH 05/35] Update sbt-release to 1.0.12 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d93b9ec7..79353586 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.7") +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") From 03042da9b052d67bb5b7910b1b6f89a05c32ba73 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:55:48 +0100 Subject: [PATCH 06/35] Update better-files to 3.8.0 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..1eb3f280 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -73,7 +73,7 @@ object Dependencies { val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.5.1" - val betterFiles = "com.github.pathikrit" %% "better-files" % "3.6.0" + val betterFiles = "com.github.pathikrit" %% "better-files" % "3.8.0" val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.41" val objenisis = "org.objenesis" % "objenesis" % "2.5.1" From 54217f0240214f821ef5a7a88b86b011db1070da Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:56:31 +0100 Subject: [PATCH 07/35] Update sbt-pgp to 1.1.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d93b9ec7..f8d11ca5 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -14,7 +14,7 @@ addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.4") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2") addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0") From c5819afa107232bad09aa7a860279fe14702455f Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:56:37 +0100 Subject: [PATCH 08/35] Update sbt-protoc to 0.99.26 --- project/scalapb.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/scalapb.sbt b/project/scalapb.sbt index 2a20c389..0b302d5e 100644 --- a/project/scalapb.sbt +++ b/project/scalapb.sbt @@ -1,3 +1,3 @@ -addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.18") +addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.26") libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.7.4" \ No newline at end of file From 0d2f4ef0e742eb9817f4c84b7826f0d7985229a0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:57:08 +0100 Subject: [PATCH 09/35] Update akka-actor, akka-cluster, ... to 2.5.26 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..15dff63e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -3,7 +3,7 @@ import sbt._ //noinspection TypeAnnotation object Dependencies { - val akkaVersion = "2.5.22" + val akkaVersion = "2.5.26" val http4sVersion = "0.20.0" val circeVersion = "0.11.1" From 5744f604506e44aee817216e81a036592d0c7f50 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:57:25 +0100 Subject: [PATCH 10/35] Update akka-http to 10.0.15 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..4a7e421b 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -34,7 +34,7 @@ object Dependencies { val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % akkaVersion val akkaStreamTestKit = "com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion val akkaMultiNodeTestkit = "com.typesafe.akka" %% "akka-multi-node-testkit" % akkaVersion - val akkaHttp = "com.typesafe.akka" %% "akka-http" % "10.0.10" + val akkaHttp = "com.typesafe.akka" %% "akka-http" % "10.0.15" val levelDB = "org.iq80.leveldb" % "leveldb" % "0.7" val levelDBJni = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" From e0d1c5a3bab1aa844f69502c9b0936318f71f793 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:57:43 +0100 Subject: [PATCH 11/35] Update akka-persistence-cassandra to 0.100 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..429ea445 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -27,7 +27,7 @@ object Dependencies { val akkaStream = "com.typesafe.akka" %% "akka-stream" % akkaVersion val akkaPersistence = "com.typesafe.akka" %% "akka-persistence" % akkaVersion val akkaPersistenceQuery = "com.typesafe.akka" %% "akka-persistence-query" % akkaVersion - val akkaPersistenceCassandra = "com.typesafe.akka" %% "akka-persistence-cassandra" % "0.54" + val akkaPersistenceCassandra = "com.typesafe.akka" %% "akka-persistence-cassandra" % "0.100" val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % akkaVersion val akkaClusterSharding = "com.typesafe.akka" %% "akka-cluster-sharding" % akkaVersion val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % akkaVersion From 2dca0a224300afe13b14a457e7b02555f5f414a2 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:58:00 +0100 Subject: [PATCH 12/35] Update graphviz-java to 0.8.10 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..13f34bdf 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -43,7 +43,7 @@ object Dependencies { val scalaGraph = "org.scala-graph" %% "graph-core" % "1.11.5" val scalaGraphDot = "org.scala-graph" %% "graph-dot" % "1.11.5" - val graphvizJava = "guru.nidi" % "graphviz-java" % "0.8.0" + val graphvizJava = "guru.nidi" % "graphviz-java" % "0.8.10" val kamon = "io.kamon" %% "kamon-bundle" % "2.0.0" val kamonAkka = "io.kamon" %% "kamon-akka" % "2.0.0" From 0d752afa23b6f8ec5c64e1c6915e05bd68c64aa6 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:58:13 +0100 Subject: [PATCH 13/35] Update joda-time to 2.10.5 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..68e1190a 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -78,7 +78,7 @@ object Dependencies { val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.41" val objenisis = "org.objenesis" % "objenesis" % "2.5.1" - val jodaTime = "joda-time" % "joda-time" % "2.9.9" + val jodaTime = "joda-time" % "joda-time" % "2.10.5" val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.25" val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.7.5" val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4" From c578197c079be4988f839aa58f3e98dc8dd5e562 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:58:37 +0100 Subject: [PATCH 14/35] Update leveldb to 0.12 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..90d000d4 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -35,7 +35,7 @@ object Dependencies { val akkaStreamTestKit = "com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion val akkaMultiNodeTestkit = "com.typesafe.akka" %% "akka-multi-node-testkit" % akkaVersion val akkaHttp = "com.typesafe.akka" %% "akka-http" % "10.0.10" - val levelDB = "org.iq80.leveldb" % "leveldb" % "0.7" + val levelDB = "org.iq80.leveldb" % "leveldb" % "0.12" val levelDBJni = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" val logback = "ch.qos.logback" % "logback-classic" % "1.2.2" From d8675100f33721c848c0f8d26ae881a86dd860ca Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:58:43 +0100 Subject: [PATCH 15/35] Update junit-jupiter-engine to 5.0.3 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..8f41f3fc 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -21,7 +21,7 @@ object Dependencies { val scalaTest = "org.scalatest" %% "scalatest" % "3.0.5" val mockito = "org.mockito" % "mockito-all" % "1.10.19" val junitInterface = "com.novocode" % "junit-interface" % "0.11" - val junitJupiter = "org.junit.jupiter" % "junit-jupiter-engine" % "5.0.0" + val junitJupiter = "org.junit.jupiter" % "junit-jupiter-engine" % "5.0.3" val akkaActor = "com.typesafe.akka" %% "akka-actor" % akkaVersion val akkaStream = "com.typesafe.akka" %% "akka-stream" % akkaVersion From b9cac4ba3c722341ef85ce4de1e0c1a115b01900 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:58:55 +0100 Subject: [PATCH 16/35] Update objenesis to 2.6 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..0b6bd6dd 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -76,7 +76,7 @@ object Dependencies { val betterFiles = "com.github.pathikrit" %% "better-files" % "3.6.0" val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.41" - val objenisis = "org.objenesis" % "objenesis" % "2.5.1" + val objenisis = "org.objenesis" % "objenesis" % "2.6" val jodaTime = "joda-time" % "joda-time" % "2.9.9" val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.25" From 6b6063fd6c74a96cf01928806f540eecde878da5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 05:59:42 +0100 Subject: [PATCH 17/35] Update scalacheck to 1.13.5 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..d3ecf808 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -81,7 +81,7 @@ object Dependencies { val jodaTime = "joda-time" % "joda-time" % "2.9.9" val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.25" val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.7.5" - val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4" + val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.5" def scopeDeps(scope: String, modules: Seq[ModuleID]) = modules.map(m => m % scope) def compileDeps(modules: ModuleID*) = modules.toSeq From 5a1eb8c9782047ae435d47a5a69ff5ee6b4b7735 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 06:00:16 +0100 Subject: [PATCH 18/35] Update cats-core to 1.6.1 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..3bb952f5 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -56,7 +56,7 @@ object Dependencies { val circe = "io.circe" %% "circe-core" % circeVersion val circeGeneric = "io.circe" %% "circe-generic" % circeVersion val catsEffect = "org.typelevel" %% "cats-effect" % "1.2.0" - val catsCore = "org.typelevel" %% "cats-core" % "1.5.0" + val catsCore = "org.typelevel" %% "cats-core" % "1.6.1" def scalaReflect(scalaV: String): ModuleID = "org.scala-lang"% "scala-reflect" % scalaV val javaxInject = "javax.inject" % "javax.inject" % "1" From 230dc0cf144f75000f66148a84229131426759c8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 06:00:30 +0100 Subject: [PATCH 19/35] Update cats-effect to 1.4.0 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..31bafb33 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -55,7 +55,7 @@ object Dependencies { val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion val circe = "io.circe" %% "circe-core" % circeVersion val circeGeneric = "io.circe" %% "circe-generic" % circeVersion - val catsEffect = "org.typelevel" %% "cats-effect" % "1.2.0" + val catsEffect = "org.typelevel" %% "cats-effect" % "1.4.0" val catsCore = "org.typelevel" %% "cats-core" % "1.5.0" def scalaReflect(scalaV: String): ModuleID = "org.scala-lang"% "scala-reflect" % scalaV From c61f36a4d42755c343b07f6c057e7ac21f13283c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 29 Oct 2019 06:00:44 +0100 Subject: [PATCH 20/35] Update sbt-sonatype to 2.6 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d93b9ec7..74da8cf0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,7 +12,7 @@ addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.4") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.6") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") From 125c0b061ca5af3189b6695cb974005237db7bb5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 1 Nov 2019 00:08:01 +0100 Subject: [PATCH 21/35] Update slf4j-api, slf4j-nop to 1.7.29 --- project/Dependencies.scala | 2 +- project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 18521c92..1693e458 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -79,7 +79,7 @@ object Dependencies { val objenisis = "org.objenesis" % "objenesis" % "2.5.1" val jodaTime = "joda-time" % "joda-time" % "2.9.9" - val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.25" + val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.29" val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.7.5" val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.4" diff --git a/project/plugins.sbt b/project/plugins.sbt index d93b9ec7..720eaac9 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,7 +20,7 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0") addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.25") -libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.25" +libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.29" // For the example application addSbtPlugin("io.kamon" % "sbt-kanela-runner" % "2.0.1") \ No newline at end of file From 52d897974cd3821d0b23f9b2fe5825daeffc2104 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 9 Nov 2019 23:26:34 +0100 Subject: [PATCH 22/35] Update sbt-protoc to 0.99.27 --- project/scalapb.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/scalapb.sbt b/project/scalapb.sbt index 2a20c389..2dd43cc4 100644 --- a/project/scalapb.sbt +++ b/project/scalapb.sbt @@ -1,3 +1,3 @@ -addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.18") +addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.27") libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.7.4" \ No newline at end of file From cf9eabf7bbccf1b0852200cc709cb48755df1a80 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Nov 2019 20:36:37 +0100 Subject: [PATCH 23/35] Update sbt-native-packager to 1.5.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e17de5f7..a4b9b88f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -18,7 +18,7 @@ addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0") -addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.25") +addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.5.1") libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.25" From 47a60d5103f80aba9da358e303edbb250aef8ad5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 26 Nov 2019 13:37:53 +0100 Subject: [PATCH 24/35] Update protobuf-java to 3.11.0 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 080ec878..4f266181 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -72,7 +72,7 @@ object Dependencies { val kryo = "com.esotericsoftware" % "kryo" % "4.0.0" - val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.5.1" + val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.11.0" val betterFiles = "com.github.pathikrit" %% "better-files" % "3.6.0" From eb9d109ff5e467dc198a39504e836ca61ae5d498 Mon Sep 17 00:00:00 2001 From: yk24na Date: Thu, 28 Nov 2019 13:37:54 +0100 Subject: [PATCH 25/35] Remove declarations for unused dependencies. --- build.sbt | 23 ++++++----------------- project/Dependencies.scala | 8 ++------ project/build.properties | 2 +- 3 files changed, 9 insertions(+), 24 deletions(-) diff --git a/build.sbt b/build.sbt index 968a647a..aefe75ae 100644 --- a/build.sbt +++ b/build.sbt @@ -1,7 +1,7 @@ import Dependencies.{scalaGraph, _} import sbt.Keys._ -def testScope(project: ProjectReference) = project % "test->test;test->compile" +def testScope(project: ProjectReference): ClasspathDep[ProjectReference] = project % "test->test;test->compile" val commonSettings = Defaults.coreDefaultSettings ++ Seq( organization := "com.ing.baker", @@ -55,10 +55,10 @@ lazy val bakertypes = project.in(file("bakertypes")) moduleName := "baker-types", libraryDependencies ++= compileDeps( slf4jApi, - ficusConfig, objenisis, scalapbRuntime, jodaTime, + typeSafeConfig, scalaReflect(scalaVersion.value) ) ++ testDeps(scalaTest, scalaCheck, logback, scalaCheck) ) @@ -71,7 +71,6 @@ lazy val intermediateLanguage = project.in(file("intermediate-language")) scalaGraph, slf4jApi, scalaGraphDot, - objenisis, typeSafeConfig ) ++ testDeps(scalaTest, scalaCheck, logback) ).dependsOn(bakertypes) @@ -89,21 +88,14 @@ lazy val runtime = project.in(file("runtime")) akkaActor, akkaPersistence, akkaPersistenceQuery, - akkaCluster, akkaClusterSharding, akkaBoostrap, - akkaInmemoryJournal, akkaSlf4j, ficusConfig, catsCore, catsEffect, - guava, - chill, - objenisis, scalapbRuntime, protobufJava, - kryo, - kryoSerializers, slf4jApi ) ++ testDeps( akkaStream, @@ -137,13 +129,13 @@ lazy val splitBrainResolver = project.in(file("split-brain-resolver")) compileDeps( akkaActor, akkaCluster, - akkaSlf4j, - ficusConfig + ficusConfig, + slf4jApi ) ++ testDeps( akkaTestKit, akkaMultiNodeTestkit, scalaTest - ) ++ providedDeps(findbugs) + ) ) .enablePlugins(MultiJvmPlugin) .configs(MultiJvm) @@ -177,7 +169,7 @@ lazy val recipeCompiler = project.in(file("compiler")) .settings( moduleName := "baker-compiler", libraryDependencies ++= - compileDeps(slf4jApi) ++ testDeps(scalaTest, scalaCheck, logback, junitJupiter) + testDeps(scalaTest, scalaCheck, logback, junitJupiter) ) .dependsOn(recipeDsl, intermediateLanguage, testScope(recipeDsl)) @@ -187,9 +179,6 @@ lazy val baas = project.in(file("baas")) .settings( moduleName := "baker-baas", libraryDependencies ++= - compileDeps( - akkaHttp, - akkaPersistenceCassandra) ++ testDeps( akkaSlf4j, akkaTestKit, diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 080ec878..0a49e146 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -10,8 +10,6 @@ object Dependencies { val jvmV = "1.8" val scalapbVersion = scalapb.compiler.Version.scalapbVersion - val typeSafeConfig = "com.typesafe" % "config" % "1.3.1" - val akkaInmemoryJournal = ("com.github.dnvriend" %% "akka-persistence-inmemory" % "2.5.1.1") .exclude("com.typesafe.akka", "akka-actor") .exclude("com.typesafe.akka", "akka-persistence") @@ -63,20 +61,18 @@ object Dependencies { val javaxInject = "javax.inject" % "javax.inject" % "1" val paranamer = "com.thoughtworks.paranamer" % "paranamer" % "2.8" - val guava = "com.google.guava" % "guava" % "19.0" val findbugs = "com.google.code.findbugs" % "jsr305" % "1.3.9" val scalapbRuntime = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapbVersion % "protobuf" val chill = ("com.twitter" %% "chill-akka" % "0.9.4") .exclude("com.typesafe.akka", "akka-actor") - val kryo = "com.esotericsoftware" % "kryo" % "4.0.0" - val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.5.1" val betterFiles = "com.github.pathikrit" %% "better-files" % "3.6.0" - val kryoSerializers = "de.javakaffee" % "kryo-serializers" % "0.41" + val typeSafeConfig = "com.typesafe" % "config" % "1.4.0" + val objenisis = "org.objenesis" % "objenesis" % "2.5.1" val jodaTime = "joda-time" % "joda-time" % "2.9.9" diff --git a/project/build.properties b/project/build.properties index 64cf32f7..c0bab049 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.1.4 +sbt.version=1.2.8 From 3d96dac156ea0f6b82f250cb5d9eba8d14fee40a Mon Sep 17 00:00:00 2001 From: yk24na Date: Thu, 28 Nov 2019 13:50:26 +0100 Subject: [PATCH 26/35] Remove chill akka from the dependencies. --- project/Dependencies.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 0a49e146..0c89a612 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -64,8 +64,6 @@ object Dependencies { val findbugs = "com.google.code.findbugs" % "jsr305" % "1.3.9" val scalapbRuntime = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapbVersion % "protobuf" - val chill = ("com.twitter" %% "chill-akka" % "0.9.4") - .exclude("com.typesafe.akka", "akka-actor") val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.5.1" From 65f62e7a31d70d15815d79ae9a515c11b11d647f Mon Sep 17 00:00:00 2001 From: yk24na Date: Thu, 28 Nov 2019 13:55:22 +0100 Subject: [PATCH 27/35] Remove unused plugins. --- build.sbt | 2 +- project/plugins.sbt | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/build.sbt b/build.sbt index aefe75ae..01d336f2 100644 --- a/build.sbt +++ b/build.sbt @@ -45,7 +45,7 @@ lazy val noPublishSettings = Seq( publishArtifact := false ) -lazy val defaultModuleSettings = commonSettings ++ dependencyOverrideSettings ++ Revolver.settings ++ SonatypePublish.settings +lazy val defaultModuleSettings = commonSettings ++ dependencyOverrideSettings ++ SonatypePublish.settings lazy val scalaPBSettings = Seq(PB.targets in Compile := Seq(scalapb.gen() -> (sourceManaged in Compile).value)) diff --git a/project/plugins.sbt b/project/plugins.sbt index e17de5f7..c262848e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,17 +1,11 @@ -addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") - addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6") -addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0") - addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.7") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.4") - addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") From 2d140e0785d68884c89b0225821da9bdf93890ff Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 28 Nov 2019 19:50:54 +0100 Subject: [PATCH 28/35] Update sbt-pgp to 2.0.0 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e8534049..f975a693 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.4.0") From 0e8a1eaa2bbdafb8552d446580edd647d0f0214d Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 30 Nov 2019 19:08:57 +0100 Subject: [PATCH 29/35] Update junit-jupiter-engine to 5.5.2 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 85509690..c4326b3d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -19,7 +19,7 @@ object Dependencies { val scalaTest = "org.scalatest" %% "scalatest" % "3.0.5" val mockito = "org.mockito" % "mockito-all" % "1.10.19" val junitInterface = "com.novocode" % "junit-interface" % "0.11" - val junitJupiter = "org.junit.jupiter" % "junit-jupiter-engine" % "5.0.3" + val junitJupiter = "org.junit.jupiter" % "junit-jupiter-engine" % "5.5.2" val akkaActor = "com.typesafe.akka" %% "akka-actor" % akkaVersion val akkaStream = "com.typesafe.akka" %% "akka-stream" % akkaVersion From 88cd1a47b587ded549cca405f29b6f27558a3be2 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 30 Nov 2019 19:09:12 +0100 Subject: [PATCH 30/35] Update objenesis to 3.1 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 85509690..48a76613 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -75,7 +75,7 @@ object Dependencies { val typeSafeConfig = "com.typesafe" % "config" % "1.4.0" - val objenisis = "org.objenesis" % "objenesis" % "2.6" + val objenisis = "org.objenesis" % "objenesis" % "3.1" val jodaTime = "joda-time" % "joda-time" % "2.10.5" val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.29" From 98d0d34739b4ee6b0b5f579539c510c16081ca04 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 30 Nov 2019 19:09:20 +0100 Subject: [PATCH 31/35] Update cats-core to 2.0.0 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 85509690..702fdf88 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -59,7 +59,7 @@ object Dependencies { val circeGeneric = "io.circe" %% "circe-generic" % circeVersion val catsEffect = "org.typelevel" %% "cats-effect" % "1.4.0" - val catsCore = "org.typelevel" %% "cats-core" % "1.6.1" + val catsCore = "org.typelevel" %% "cats-core" % "2.0.0" def scalaReflect(scalaV: String): ModuleID = "org.scala-lang"% "scala-reflect" % scalaV val javaxInject = "javax.inject" % "javax.inject" % "1" From 44d19ed5b0041439f48d5e106fd951447cc57103 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 30 Nov 2019 19:09:52 +0100 Subject: [PATCH 32/35] Update sbt-sonatype to 3.8.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 210c2791..d44dc3ec 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.6") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") From d3c671470f74b9a0509f4cc48f7fd484f260a82d Mon Sep 17 00:00:00 2001 From: yk24na Date: Tue, 3 Dec 2019 14:37:21 +0100 Subject: [PATCH 33/35] Update cats-effect to 2.0.0 Updates org.typelevel:cats-effect from 1.4.0 to 2.0.0. Release Notes/Changelog --- project/Dependencies.scala | 2 +- .../com/ing/baker/runtime/akka/internal/RecipeRuntime.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index cd43cbe5..cd1b40e4 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -58,7 +58,7 @@ object Dependencies { val circe = "io.circe" %% "circe-core" % circeVersion val circeGeneric = "io.circe" %% "circe-generic" % circeVersion - val catsEffect = "org.typelevel" %% "cats-effect" % "1.4.0" + val catsEffect = "org.typelevel" %% "cats-effect" % "2.0.0" val catsCore = "org.typelevel" %% "cats-core" % "2.0.0" def scalaReflect(scalaV: String): ModuleID = "org.scala-lang"% "scala-reflect" % scalaV diff --git a/runtime/src/main/scala/com/ing/baker/runtime/akka/internal/RecipeRuntime.scala b/runtime/src/main/scala/com/ing/baker/runtime/akka/internal/RecipeRuntime.scala index 81b5b1e9..454f5c2c 100644 --- a/runtime/src/main/scala/com/ing/baker/runtime/akka/internal/RecipeRuntime.scala +++ b/runtime/src/main/scala/com/ing/baker/runtime/akka/internal/RecipeRuntime.scala @@ -3,7 +3,7 @@ package com.ing.baker.runtime.akka.internal import java.lang.reflect.InvocationTargetException import akka.event.EventStream -import cats.effect.IO +import cats.effect.{ContextShift, IO} import com.ing.baker.il import com.ing.baker.il.failurestrategy.ExceptionStrategyOutcome import com.ing.baker.il.petrinet._ @@ -120,6 +120,7 @@ object RecipeRuntime { class RecipeRuntime(recipe: CompiledRecipe, interactionManager: InteractionManager, eventStream: EventStream)(implicit ec: ExecutionContext) extends ProcessInstanceRuntime[Place, Transition, RecipeInstanceState, EventInstance] { + protected implicit lazy val contextShift: ContextShift[IO] = IO.contextShift(ec) /** * All transitions except sensory event interactions are auto-fireable by the runtime */ From ef89e0825bad15b9ecf5a22243e3ef89e175e555 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 3 Dec 2019 15:36:32 +0100 Subject: [PATCH 34/35] Update protobuf-java to 3.11.1 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index cd43cbe5..228c645b 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -69,7 +69,7 @@ object Dependencies { val scalapbRuntime = "com.thesamet.scalapb" %% "scalapb-runtime" % scalapbVersion % "protobuf" - val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.11.0" + val protobufJava = "com.google.protobuf" % "protobuf-java" % "3.11.1" val betterFiles = "com.github.pathikrit" %% "better-files" % "3.8.0" From 9c8a0f87541d1231327021e8259c470970cf0bfc Mon Sep 17 00:00:00 2001 From: yk24na Date: Tue, 3 Dec 2019 16:28:23 +0100 Subject: [PATCH 35/35] Update scalatest to 3.0.8 Updates org.scalatest:scalatest from 3.0.5 to 3.0.8. --- .../types/modules/JodaTimeModuleSpec.scala | 10 ++--- .../types/modules/PrimitiveModuleSpec.scala | 12 +++--- .../types/modules/ScalaModulesSpec.scala | 22 +++------- .../ing/baker/il/HashcodeGenerationSpec.scala | 10 +---- project/Dependencies.scala | 2 +- .../ing/baker/recipe/common/HashingSpec.scala | 7 ++-- .../com/ing/baker/BakerRuntimeTestBase.scala | 42 ++++++++++--------- .../ing/baker/pbt/RecipePropertiesSpec.scala | 2 +- .../process_index/ProcessIndexSpec.scala | 26 ++++++------ .../ProcessInstanceSpec.scala | 2 +- .../EncryptionPropertiesSpec.scala | 2 +- .../serialization/SerializationSpec.scala | 16 +++---- .../internal/InteractionManagerSpec.scala | 2 +- .../akka/internal/RecipeRuntimeSpec.scala | 2 +- 14 files changed, 72 insertions(+), 85 deletions(-) diff --git a/bakertypes/src/test/scala/com/ing/baker/types/modules/JodaTimeModuleSpec.scala b/bakertypes/src/test/scala/com/ing/baker/types/modules/JodaTimeModuleSpec.scala index 6a534262..ce81f41f 100644 --- a/bakertypes/src/test/scala/com/ing/baker/types/modules/JodaTimeModuleSpec.scala +++ b/bakertypes/src/test/scala/com/ing/baker/types/modules/JodaTimeModuleSpec.scala @@ -1,24 +1,24 @@ package com.ing.baker.types.modules import com.ing.baker.types -import com.ing.baker.types.{Converters, Int64} +import com.ing.baker.types.Converters import org.joda.time.{DateTime, LocalDate, LocalDateTime} import org.scalacheck.Gen import org.scalacheck.Test.Parameters.defaultVerbose -import org.scalatest.prop.Checkers import org.scalatest.{Matchers, WordSpecLike} +import org.scalatestplus.scalacheck.Checkers class JodaTimeModuleSpec extends WordSpecLike with Matchers with Checkers { - val minSuccessfulTests = 100 + private val minSuccessfulTests = 100 // Long.MaxValue is not supported by joda time for local dates, resulting in a integer overflow // This shifts the long max value 1 bit to the right (divides by 2) // This translates to the date: Fri Apr 24 17:36:27 CEST 146140482 - val maxMillis = Long.MaxValue >> 1 + private val maxMillis = Long.MaxValue >> 1 - val numGen: Gen[Long] = Gen.chooseNum[Long]( + private val numGen: Gen[Long] = Gen.chooseNum[Long]( 0L, maxMillis, 0, maxMillis ) diff --git a/bakertypes/src/test/scala/com/ing/baker/types/modules/PrimitiveModuleSpec.scala b/bakertypes/src/test/scala/com/ing/baker/types/modules/PrimitiveModuleSpec.scala index cf53ec1f..efb1a896 100644 --- a/bakertypes/src/test/scala/com/ing/baker/types/modules/PrimitiveModuleSpec.scala +++ b/bakertypes/src/test/scala/com/ing/baker/types/modules/PrimitiveModuleSpec.scala @@ -9,7 +9,7 @@ import com.ing.baker.types.modules.PrimitiveModuleSpec._ import org.scalacheck.Gen import org.scalacheck.Prop.{BooleanOperators, forAll} import org.scalacheck.Test.Parameters.defaultVerbose -import org.scalatest.prop.Checkers +import org.scalatestplus.scalacheck.Checkers import org.scalatest.{Matchers, WordSpecLike} import scala.reflect.runtime.universe.TypeTag @@ -17,15 +17,15 @@ import scala.reflect.runtime.universe.TypeTag object PrimitiveModuleSpec { val intGen: Gen[Int] = Gen.chooseNum[Int](Integer.MIN_VALUE, Integer.MAX_VALUE) - val langIntegerGen: Gen[lang.Integer] = intGen.map(Int.box(_)) + val langIntegerGen: Gen[lang.Integer] = intGen.map(Int.box) val longGen: Gen[Long] = Gen.chooseNum[Long](Long.MinValue, Long.MaxValue) - val langLongGen: Gen[lang.Long] = Gen.chooseNum[Long](Long.MinValue, Long.MaxValue).map(Long.box(_)) + val langLongGen: Gen[lang.Long] = Gen.chooseNum[Long](Long.MinValue, Long.MaxValue).map(Long.box) val shortGen: Gen[Short] = Gen.chooseNum[Short](Short.MinValue, Short.MaxValue) - val langShortGen: Gen[lang.Short] = shortGen.map(Short.box(_)) + val langShortGen: Gen[lang.Short] = shortGen.map(Short.box) val floatGen: Gen[Float] = Gen.chooseNum(Float.MinValue, Float.MaxValue) - val langFloatGen: Gen[lang.Float] = floatGen.map(Float.box(_)) + val langFloatGen: Gen[lang.Float] = floatGen.map(Float.box) val doubleGen: Gen[Double] = Gen.chooseNum[Double](Double.MinValue, Double.MaxValue) - val langDoubleGen: Gen[lang.Double] = doubleGen.map(Double.box(_)) + val langDoubleGen: Gen[lang.Double] = doubleGen.map(Double.box) val stringGen: Gen[String] = Gen.alphaStr val bigIntGen: Gen[BigInt] = longGen.map(BigInt(_)) val javaBigIntGen: Gen[java.math.BigInteger] = bigIntGen.map(_.bigInteger) diff --git a/bakertypes/src/test/scala/com/ing/baker/types/modules/ScalaModulesSpec.scala b/bakertypes/src/test/scala/com/ing/baker/types/modules/ScalaModulesSpec.scala index 27910ced..deaedd27 100644 --- a/bakertypes/src/test/scala/com/ing/baker/types/modules/ScalaModulesSpec.scala +++ b/bakertypes/src/test/scala/com/ing/baker/types/modules/ScalaModulesSpec.scala @@ -3,22 +3,22 @@ package com.ing.baker.types.modules import com.ing.baker.types import com.ing.baker.types.Converters.{readJavaType, toJava, toValue} import com.ing.baker.types._ -import org.scalatest.prop.Checkers import org.scalatest.{Matchers, WordSpecLike} +import org.scalatestplus.scalacheck.Checkers class ScalaModulesSpec extends WordSpecLike with Matchers with Checkers { - val listValue123 = ListValue(List(PrimitiveValue(1), PrimitiveValue(2), PrimitiveValue(3))) + private val listValue123 = ListValue(List(PrimitiveValue(1), PrimitiveValue(2), PrimitiveValue(3))) - val recordPerson = RecordValue(Map("name" -> PrimitiveValue("john"), "age" -> PrimitiveValue(42))) + private val recordPerson = RecordValue(Map("name" -> PrimitiveValue("john"), "age" -> PrimitiveValue(42))) - val valueMap = RecordValue(Map( + private val valueMap = RecordValue(Map( "a" -> PrimitiveValue(1), "b" -> PrimitiveValue(2), "c" -> PrimitiveValue(3) )) - val scalaMap = Map( + private val scalaMap = Map( "a" -> 1, "b" -> 2, "c" -> 3 @@ -35,24 +35,20 @@ class ScalaModulesSpec extends WordSpecLike with Matchers with Checkers { } "correctly parse set types" in { - readJavaType[Set[String]] shouldBe ListType(types.CharArray) readJavaType[java.util.Set[String]] shouldBe ListType(types.CharArray) } "correctly parse map types" in { - readJavaType[Map[String, Int]] shouldBe MapType(types.Int32) readJavaType[java.util.Map[String, Int]] shouldBe MapType(types.Int32) } "be able to autobox null values to scala Options" in { - toJava[Option[Int]](NullValue) shouldBe None } "be able to autobox primitive values to scala Options" in { - toJava[Option[Int]](PrimitiveValue(42)) shouldBe Some(42) } @@ -61,42 +57,34 @@ class ScalaModulesSpec extends WordSpecLike with Matchers with Checkers { } "be able to parse scala.collection.immutable.List objects" in { - toValue(List(1, 2, 3)) shouldBe listValue123 } "be able to create scala.collection.immutable.List objects" in { - toJava[List[Int]](listValue123) shouldBe List(1, 2, 3) } "be able to parse scala.collection.immutable.Set objects" in { - toValue(Set(1, 2, 3)) shouldBe listValue123 } "be able to create scala.collection.immutable.Set objects" in { - toJava[Set[Int]](listValue123) shouldBe Set(1, 2, 3) } "be able to parse case class objects" in { - toValue(PersonCaseClass("john", 42)) shouldBe recordPerson } "be able to create case class objects" in { - toJava[PersonCaseClass](recordPerson) shouldBe PersonCaseClass("john", 42) } "be able to parse scala.collection.immutable.Map objects" in { - toValue(scalaMap) shouldBe valueMap } "be able to create scala.collection.immutable.Map objects" in { - toJava[Map[String, Int]](valueMap) shouldBe scalaMap } } diff --git a/intermediate-language/src/test/scala/com/ing/baker/il/HashcodeGenerationSpec.scala b/intermediate-language/src/test/scala/com/ing/baker/il/HashcodeGenerationSpec.scala index f51faba5..59e482c9 100644 --- a/intermediate-language/src/test/scala/com/ing/baker/il/HashcodeGenerationSpec.scala +++ b/intermediate-language/src/test/scala/com/ing/baker/il/HashcodeGenerationSpec.scala @@ -2,13 +2,9 @@ package com.ing.baker.il import org.scalacheck.{Gen, Prop, Test} import org.scalatest.FunSuite -import org.scalatest.prop.Checkers +import org.scalatestplus.scalacheck.Checkers class HashcodeGenerationSpec extends FunSuite with Checkers { - -// def hash(str: String): Long = str.hashCode // Test fails with this hash function - def hash(str: String): Long = sha256HashCode(str) - test("sha256 hash function") { val prop = Prop.forAll(Gen.alphaNumStr, Gen.alphaNumStr) { (s1: String, s2: String) => { @@ -20,7 +16,5 @@ class HashcodeGenerationSpec extends FunSuite with Checkers { check(prop, Test.Parameters.defaultVerbose.withMinSuccessfulTests(100 * 1000)) } -// test("test2") { -// assert("sr".hashCode != "u4".hashCode) -// } + private def hash(str: String): Long = sha256HashCode(str) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index cd43cbe5..ee1cf14e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -16,7 +16,7 @@ object Dependencies { .exclude("com.typesafe.akka", "akka-persistence-query") .exclude("com.typesafe.akka", "akka-stream") - val scalaTest = "org.scalatest" %% "scalatest" % "3.0.5" + val scalaTest = "org.scalatest" %% "scalatest" % "3.0.8" val mockito = "org.mockito" % "mockito-all" % "1.10.19" val junitInterface = "com.novocode" % "junit-interface" % "0.11" val junitJupiter = "org.junit.jupiter" % "junit-jupiter-engine" % "5.5.2" diff --git a/recipe-dsl/src/test/scala/com/ing/baker/recipe/common/HashingSpec.scala b/recipe-dsl/src/test/scala/com/ing/baker/recipe/common/HashingSpec.scala index 816b2f21..63ee4fc6 100644 --- a/recipe-dsl/src/test/scala/com/ing/baker/recipe/common/HashingSpec.scala +++ b/recipe-dsl/src/test/scala/com/ing/baker/recipe/common/HashingSpec.scala @@ -7,7 +7,7 @@ import org.scalacheck.Prop.forAll import org.scalacheck.Test.Parameters.defaultVerbose import org.scalacheck.{Arbitrary, Gen, Test} import org.scalatest.FunSuiteLike -import org.scalatest.prop.Checkers +import org.scalatestplus.scalacheck.Checkers class HashingSpec extends FunSuiteLike with Checkers { @@ -16,7 +16,7 @@ class HashingSpec extends FunSuiteLike with Checkers { def hashingLaw[A: Arbitrary](): Unit = { check(forAll { (x: A, y: A) => - if(x == y) x.hashCode() == y.hashCode() + if (x == y) x.hashCode() == y.hashCode() else x.hashCode() != y.hashCode() }, config) } @@ -82,5 +82,4 @@ object HashingSpec { def optionTypeGen: Gen[OptionType] = primitiveTypeGen.map(OptionType) - -} +} \ No newline at end of file diff --git a/runtime/src/test/scala/com/ing/baker/BakerRuntimeTestBase.scala b/runtime/src/test/scala/com/ing/baker/BakerRuntimeTestBase.scala index 5d3835e8..0d42c545 100644 --- a/runtime/src/test/scala/com/ing/baker/BakerRuntimeTestBase.scala +++ b/runtime/src/test/scala/com/ing/baker/BakerRuntimeTestBase.scala @@ -1,20 +1,22 @@ package com.ing.baker +import java.nio.file.Paths +import java.util.UUID + import akka.actor.ActorSystem import akka.testkit.TestKit import com.ing.baker.compiler.RecipeCompiler import com.ing.baker.il.CompiledRecipe -import com.ing.baker.recipe.TestRecipe.{ fireTwoEventsInteraction, _ } -import com.ing.baker.recipe.{ CaseClassIngredient, common } -import com.ing.baker.runtime.scaladsl.{ Baker, EventInstance, InteractionInstance } -import com.ing.baker.types.{ Converters, Value } -import com.typesafe.config.{ Config, ConfigFactory } -import java.nio.file.Paths -import java.util.UUID +import com.ing.baker.recipe.TestRecipe.{fireTwoEventsInteraction, _} +import com.ing.baker.recipe.{CaseClassIngredient, common} +import com.ing.baker.runtime.scaladsl.{Baker, EventInstance, InteractionInstance} +import com.ing.baker.types.{Converters, Value} +import com.typesafe.config.{Config, ConfigFactory} import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest._ -import org.scalatest.mockito.MockitoSugar +import org.scalatestplus.mockito.MockitoSugar + import scala.concurrent.Future import scala.concurrent.duration._ import scala.language.postfixOps @@ -103,7 +105,7 @@ trait BakerRuntimeTestBase testProvidesNothingInteractionMock).map(InteractionInstance.unsafeFrom(_)) def writeRecipeToSVGFile(recipe: CompiledRecipe) = { - import guru.nidi.graphviz.engine.{ Format, Graphviz } + import guru.nidi.graphviz.engine.{Format, Graphviz} import guru.nidi.graphviz.parse.Parser val g = Parser.read(recipe.getRecipeVisualization) Graphviz.fromGraph(g).render(Format.SVG).toFile(Paths.get(recipe.name).toFile) @@ -163,9 +165,9 @@ trait BakerRuntimeTestBase s""" |akka { | - | actor.provider = "akka.cluster.ClusterActorRefProvider" + | actor.provider = "akka.cluster.ClusterActorRefProvider" | - | remote { + | remote { | netty.tcp { | hostname = localhost | port = $port @@ -173,7 +175,7 @@ trait BakerRuntimeTestBase | } |} | - |baker { + |baker { | actor.provider = "cluster-sharded" | cluster.seed-nodes = ["akka.tcp://$actorSystemName@localhost:$port"] |} @@ -186,14 +188,14 @@ trait BakerRuntimeTestBase } /** - * Returns a Baker instance that contains a simple recipe that can be used in tests - * It als sets mocks that return happy flow responses for the interactions - * - * This recipe contains: See TestRecipe.png for a visualization - * - * @param recipeName A unique name that is needed for the recipe to insure that the tests do not interfere with each other - * @return - */ + * Returns a Baker instance that contains a simple recipe that can be used in tests + * It als sets mocks that return happy flow responses for the interactions + * + * This recipe contains: See TestRecipe.png for a visualization + * + * @param recipeName A unique name that is needed for the recipe to insure that the tests do not interfere with each other + * @return + */ protected def setupBakerWithRecipe(recipeName: String, appendUUIDToTheRecipeName: Boolean = true) (implicit actorSystem: ActorSystem): Future[(Baker, String)] = { val newRecipeName = if (appendUUIDToTheRecipeName) s"$recipeName-${UUID.randomUUID().toString}" else recipeName diff --git a/runtime/src/test/scala/com/ing/baker/pbt/RecipePropertiesSpec.scala b/runtime/src/test/scala/com/ing/baker/pbt/RecipePropertiesSpec.scala index 197d6a98..ca9de605 100644 --- a/runtime/src/test/scala/com/ing/baker/pbt/RecipePropertiesSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/pbt/RecipePropertiesSpec.scala @@ -10,7 +10,7 @@ import org.scalacheck.Prop.forAll import org.scalacheck.Test.Parameters.defaultVerbose import org.scalacheck._ import org.scalatest.FunSuite -import org.scalatest.prop.Checkers +import org.scalatestplus.scalacheck.Checkers import scala.annotation.tailrec import scala.util.Random diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_index/ProcessIndexSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_index/ProcessIndexSpec.scala index 78ddadc4..89d6f139 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_index/ProcessIndexSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_index/ProcessIndexSpec.scala @@ -1,32 +1,34 @@ package com.ing.baker.runtime.akka.actor.process_index -import akka.actor.{ Actor, ActorRef, ActorSystem, Props } -import akka.testkit.{ ImplicitSender, TestKit, TestProbe } -import com.ing.baker.il.petrinet.{ EventTransition, Place, RecipePetriNet, Transition } -import com.ing.baker.il.{ CompiledRecipe, EventDescriptor, IngredientDescriptor } -import com.ing.baker.petrinet.api.{ Marking, PetriNet } +import java.util.UUID + +import akka.actor.{Actor, ActorRef, ActorSystem, Props} +import akka.testkit.{ImplicitSender, TestKit, TestProbe} +import com.ing.baker.il.petrinet.{EventTransition, Place, RecipePetriNet, Transition} +import com.ing.baker.il.{CompiledRecipe, EventDescriptor, IngredientDescriptor} +import com.ing.baker.petrinet.api.{Marking, PetriNet} import com.ing.baker.runtime.akka.actor.process_index.ProcessIndex.CheckForProcessesToBeDeleted import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol.FireSensoryEventReaction.NotifyWhenReceived import com.ing.baker.runtime.akka.actor.process_index.ProcessIndexProtocol._ import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol import com.ing.baker.runtime.akka.actor.process_instance.ProcessInstanceProtocol._ import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProtocol -import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProtocol.{ AllRecipes, GetAllRecipes, RecipeInformation } +import com.ing.baker.runtime.akka.actor.recipe_manager.RecipeManagerProtocol.{AllRecipes, GetAllRecipes, RecipeInformation} import com.ing.baker.runtime.akka.actor.serialization.Encryption import com.ing.baker.runtime.akka.internal.InteractionManager -import com.ing.baker.runtime.scaladsl.{ EventInstance, RecipeInstanceState } +import com.ing.baker.runtime.scaladsl.{EventInstance, RecipeInstanceState} import com.ing.baker.types import com.ing.baker.types.Value -import com.typesafe.config.{ Config, ConfigFactory } -import java.util.UUID +import com.typesafe.config.{Config, ConfigFactory} import org.mockito.Mockito import org.mockito.Mockito.when import org.scalatest.concurrent.Eventually -import org.scalatest.mockito.MockitoSugar -import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Matchers, WordSpecLike } -import scala.concurrent.duration._ +import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Matchers, WordSpecLike} +import org.scalatestplus.mockito.MockitoSugar import scalax.collection.immutable.Graph +import scala.concurrent.duration._ + object ProcessIndexSpec { val config: Config = ConfigFactory.parseString( """ diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceSpec.scala index 50aceb7a..e3432290 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceSpec.scala @@ -26,8 +26,8 @@ import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer import org.scalatest.Matchers import org.scalatest.concurrent.ScalaFutures -import org.scalatest.mockito.MockitoSugar import org.scalatest.time.{Milliseconds, Span} +import org.scalatestplus.mockito.MockitoSugar import scala.concurrent.Promise import scala.concurrent.duration._ diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/EncryptionPropertiesSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/EncryptionPropertiesSpec.scala index 771cc12c..ea7f9a34 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/EncryptionPropertiesSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/EncryptionPropertiesSpec.scala @@ -4,8 +4,8 @@ import org.scalacheck.Gen._ import org.scalacheck.Prop.forAll import org.scalacheck._ import org.scalatest.FunSuite -import org.scalatest.prop.Checkers import com.ing.baker.runtime.akka.actor.serialization.Encryption._ +import org.scalatestplus.scalacheck.Checkers class EncryptionPropertiesSpec extends FunSuite with Checkers { diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/SerializationSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/SerializationSpec.scala index 3e6d325f..c95e1a18 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/SerializationSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/actor/serialization/SerializationSpec.scala @@ -28,7 +28,7 @@ import org.scalacheck.Prop.forAll import org.scalacheck.Test.Parameters.defaultVerbose import org.scalacheck._ import org.scalatest.FunSuiteLike -import org.scalatest.prop.Checkers +import org.scalatestplus.scalacheck.Checkers import scala.concurrent.duration._ import scala.reflect.ClassTag @@ -91,7 +91,7 @@ class SerializationSpec extends TestKit(ActorSystem("BakerProtobufSerializerSpec val serialized = serializer.toBinary(m) val deserialized = serializer.fromBinary(serialized, serializer.manifest(m)) deserialized === m && - ctxFromProto(ctxToProto(m)) === Success(m) + ctxFromProto(ctxToProto(m)) === Success(m) } checkFor[ProcessIndexProtocol.Index].run @@ -358,7 +358,9 @@ object SerializationSpec { } yield CreateProcess(recipeId, recipeInstanceId) class SimpleActor extends Actor { - override def receive: Receive = { case _ => () } + override def receive: Receive = { + case _ => () + } } val waitForRetriesGen = Gen.oneOf(true, false) @@ -400,7 +402,7 @@ object SerializationSpec { } yield StopRetryingInteraction(recipeInstanceId, interactionName) val sensoryEventStatusGen: Gen[SensoryEventStatus] = Gen.oneOf( - SensoryEventStatus.AlreadyReceived , + SensoryEventStatus.AlreadyReceived, SensoryEventStatus.Completed, SensoryEventStatus.FiringLimitMet, SensoryEventStatus.Received, @@ -409,9 +411,9 @@ object SerializationSpec { ) val eventResultGen: Gen[SensoryEventResult] = for { - status <- sensoryEventStatusGen - events <- Gen.listOf(Gen.alphaStr) - ingredients <- Gen.listOf(Runtime.ingredientsGen) + status <- sensoryEventStatusGen + events <- Gen.listOf(Gen.alphaStr) + ingredients <- Gen.listOf(Runtime.ingredientsGen) } yield SensoryEventResult(status, events, ingredients.toMap) implicit val processEventResponse: Gen[ProcessEventResponse] = for { diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/InteractionManagerSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/InteractionManagerSpec.scala index 7721355d..4a1b274a 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/InteractionManagerSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/InteractionManagerSpec.scala @@ -6,8 +6,8 @@ import com.ing.baker.runtime.scaladsl.InteractionInstance import com.ing.baker.types import com.ing.baker.types.Type import org.mockito.Mockito.when -import org.scalatest.mockito.MockitoSugar import org.scalatest.{Matchers, WordSpecLike} +import org.scalatestplus.mockito.MockitoSugar class InteractionManagerSpec extends WordSpecLike with Matchers with MockitoSugar { "getImplementation" should { diff --git a/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/RecipeRuntimeSpec.scala b/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/RecipeRuntimeSpec.scala index 0d154bc4..b2455c59 100644 --- a/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/RecipeRuntimeSpec.scala +++ b/runtime/src/test/scala/com/ing/baker/runtime/akka/internal/RecipeRuntimeSpec.scala @@ -8,8 +8,8 @@ import com.ing.baker.runtime.scaladsl.RecipeInstanceState import com.ing.baker.types.Value import com.ing.baker.{il, types} import org.mockito.Mockito._ -import org.scalatest.mockito.MockitoSugar import org.scalatest.{Matchers, WordSpecLike} +import org.scalatestplus.mockito.MockitoSugar class RecipeRuntimeSpec extends WordSpecLike with Matchers with MockitoSugar { "The recipe runtime" should {