diff --git a/.run/NussknackerApp.run.xml b/.run/NussknackerApp.run.xml
index 4f08f2afcf2..deda8970b3a 100644
--- a/.run/NussknackerApp.run.xml
+++ b/.run/NussknackerApp.run.xml
@@ -1,6 +1,6 @@
-
+
@@ -9,15 +9,16 @@
+
+
+
-
-
@@ -34,4 +35,4 @@
-
+
\ No newline at end of file
diff --git a/.run/NussknackerRemoteDebug.run.xml b/.run/NussknackerRemoteDebug.run.xml
index ea46b4eabdc..13e251d108a 100644
--- a/.run/NussknackerRemoteDebug.run.xml
+++ b/.run/NussknackerRemoteDebug.run.xml
@@ -1,16 +1,15 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/build.sbt b/build.sbt
index bcd09bc56a6..7d4961e9d35 100644
--- a/build.sbt
+++ b/build.sbt
@@ -38,10 +38,6 @@ lazy val silencerV_2_12 = "1.6.0"
def propOrEnv(name: String, default: String): String = propOrEnv(name).getOrElse(default)
def propOrEnv(name: String): Option[String] = Option(System.getProperty(name)).orElse(sys.env.get(name))
-//by default we include flink and scala, we want to be able to disable this behaviour for performance reasons
-val includeFlinkAndScala = propOrEnv("includeFlinkAndScala", "true").toBoolean
-
-val flinkScope = if (includeFlinkAndScala) "compile" else "provided"
val nexusUrlFromProps = propOrEnv("nexusUrl")
//TODO: this is pretty clunky, but works so far for our case...
val nexusHostFromProps = nexusUrlFromProps.map(_.replaceAll("http[s]?://", "").replaceAll("[:/].*", ""))
@@ -103,7 +99,7 @@ lazy val publishSettings = Seq(
)
def defaultMergeStrategy: String => MergeStrategy = {
- // remove JPMS module descriptors (a proper soultion would be to merge them)
+ // remove JPMS module descriptors (a proper solution would be to merge them)
case PathList(ps @ _*) if ps.last == "module-info.class" => MergeStrategy.discard
// we override Spring's class and we want to keep only our implementation
case PathList(ps @ _*) if ps.last == "NumberUtils.class" => MergeStrategy.first
@@ -118,7 +114,8 @@ def designerMergeStrategy: String => MergeStrategy = {
// https://tapir.softwaremill.com/en/latest/docs/openapi.html#using-swaggerui-with-sbt-assembly
case PathList("META-INF", "maven", "org.webjars", "swagger-ui", "pom.properties") =>
MergeStrategy.singleOrError
- case x => defaultMergeStrategy(x)
+ case x =>
+ defaultMergeStrategy(x)
}
val scalaTestReports = Tests.Argument(TestFrameworks.ScalaTest, "-u", "target/surefire-reports", "-oFGD")
@@ -438,7 +435,8 @@ def assemblySettings(
includeScala: Boolean,
filterProvidedDeps: Boolean = true
): List[Def.SettingsDefinition] = {
- // This work around need to be optional because for designer module it causes excluding of scala lib (because we has there other work around for Idea classpath and provided deps)
+ // This work around need to be optional because for designer module it causes excluding of scala lib
+ // (because we have there other work around for Idea classpath and provided deps)
val filterProvidedDepsSettingOpt = if (filterProvidedDeps) {
Some(
// For some reason problem described in https://github.com/sbt/sbt-assembly/issues/295 appears, workaround also works...
@@ -470,7 +468,7 @@ lazy val modelArtifacts = taskKey[List[(File, String)]]("model artifacts")
lazy val devArtifacts = taskKey[List[(File, String)]]("dev artifacts")
-lazy val managerArtifacts = taskKey[List[(File, String)]]("manager artifacts")
+lazy val deploymentManagerArtifacts = taskKey[List[(File, String)]]("deployment manager artifacts")
def filterDevConfigArtifacts(files: Seq[(File, String)]) = {
val devConfigFiles = Set("dev-tables-definition.sql", "dev-application.conf", "dev-oauth2-users.conf")
@@ -482,7 +480,7 @@ lazy val distribution: Project = sbt
.settings(commonSettings)
.enablePlugins(JavaAgent, SbtNativePackager, JavaServerAppPackaging)
.settings(
- managerArtifacts := {
+ deploymentManagerArtifacts := {
List(
(flinkDeploymentManager / assembly).value -> "managers/nussknacker-flink-manager.jar",
(liteK8sDeploymentManager / assembly).value -> "managers/lite-k8s-manager.jar",
@@ -520,7 +518,7 @@ lazy val distribution: Project = sbt
else filterDevConfigArtifacts((Universal / mappings).value)
universalMappingsWithDevConfigFilter ++
- (managerArtifacts).value ++
+ (deploymentManagerArtifacts).value ++
(componentArtifacts).value ++
(if (addDevArtifacts)
Seq((developmentTestsDeploymentManager / assembly).value -> "managers/development-tests-manager.jar")
@@ -610,17 +608,17 @@ lazy val flinkDeploymentManager = (project in flink("management"))
libraryDependencies ++= {
Seq(
"org.typelevel" %% "cats-core" % catsV % Provided,
- ("org.apache.flink" % "flink-streaming-java" % flinkV % flinkScope)
+ ("org.apache.flink" % "flink-streaming-java" % flinkV)
.excludeAll(
ExclusionRule("log4j", "log4j"),
ExclusionRule("org.slf4j", "slf4j-log4j12"),
ExclusionRule("com.esotericsoftware", "kryo-shaded"),
),
- "org.apache.flink" % "flink-statebackend-rocksdb" % flinkV % flinkScope,
+ "org.apache.flink" % "flink-statebackend-rocksdb" % flinkV,
"com.softwaremill.retry" %% "retry" % retryV,
"org.wiremock" % "wiremock" % wireMockV % Test,
"org.scalatestplus" %% "mockito-5-10" % scalaTestPlusV % Test,
- ) ++ flinkLibScalaDeps(scalaVersion.value, Some(flinkScope))
+ ) ++ flinkLibScalaDeps(scalaVersion.value)
},
// override scala-collection-compat from com.softwaremill.retry:retry
dependencyOverrides += "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionsCompatV
@@ -1908,6 +1906,10 @@ lazy val deploymentManagerApi = (project in file("designer/deployment-manager-ap
)
.dependsOn(extensionsApi, testUtils % Test)
+lazy val prepareDesignerTests = taskKey[Unit]("Prepare all necessary artifacts before running designer module tests")
+lazy val prepareDesignerSlowTests =
+ taskKey[Unit]("Prepare all necessary artifacts before running designer module slow tests")
+
lazy val designer = (project in file("designer/server"))
.configs(SlowTests)
.enablePlugins(GenerateDesignerOpenApiPlugin)
@@ -1916,7 +1918,7 @@ lazy val designer = (project in file("designer/server"))
.settings(
assemblySettings(
"nussknacker-designer-assembly.jar",
- includeScala = includeFlinkAndScala,
+ includeScala = true,
filterProvidedDeps = false
): _*
)
@@ -1938,24 +1940,36 @@ lazy val designer = (project in file("designer/server"))
CopyOptions.apply(overwrite = true, preserveLastModified = true, preserveExecutable = false)
)
},
- ThisBuild / parallelExecution := false,
SlowTests / test := (SlowTests / test)
.dependsOn(
flinkDevModel / Compile / assembly,
flinkExecutor / Compile / assembly
)
.value,
- Test / test := (Test / test)
- .dependsOn(
- defaultModel / Compile / assembly,
- flinkTableApiComponents / Compile / assembly,
- flinkDevModel / Compile / assembly,
- flinkExecutor / Compile / assembly,
- flinkExecutor / prepareItLibs
- )
- .value,
+ prepareDesignerSlowTests := {
+ (flinkDeploymentManager / assembly).value
+ (liteEmbeddedDeploymentManager / assembly).value
+ (liteK8sDeploymentManager / assembly).value
+ (flinkDevModel / assembly).value
+ (flinkExecutor / assembly).value
+ },
+ prepareDesignerTests := {
+ (flinkDeploymentManager / assembly).value
+ (liteEmbeddedDeploymentManager / assembly).value
+ (liteK8sDeploymentManager / assembly).value
+ (defaultModel / assembly).value
+ (flinkTableApiComponents / assembly).value
+ (flinkDevModel / assembly).value
+ (flinkExecutor / assembly).value
+ (flinkExecutor / prepareItLibs).value
+ },
+ ThisBuild / parallelExecution := false,
+ SlowTests / test := (SlowTests / test).dependsOn(prepareDesignerSlowTests).value,
+ SlowTests / testOptions += Tests.Setup(() => prepareDesignerSlowTests.value),
+ Test / test := (Test / test).dependsOn(prepareDesignerTests).value,
+ Test / testOptions += Tests.Setup(() => prepareDesignerTests.value),
/*
- We depend on copyClientDist in packageBin and assembly to be make sure fe files will be included in jar and fajar
+ We depend on copyClientDist in packageBin and assembly to be make sure FE files will be included in jar and fajar
We abuse sbt a little bit, but we don't want to put webpack in generate resources phase, as it's long and it would
make compilation v. long. This is not too nice, but so far only alternative is to put designer dists copyClientDist outside sbt and
use bash to control when it's done - and this can lead to bugs and edge cases (release, dist/docker, dist/tgz, assembly...)
@@ -1994,6 +2008,7 @@ lazy val designer = (project in file("designer/server"))
"org.apache.xmlgraphics" % "fop" % "2.9" exclude ("commons-logging", "commons-logging"),
"com.beachape" %% "enumeratum-circe" % enumeratumV,
"tf.tofu" %% "derevo-circe" % "0.13.0",
+ "com.softwaremill.retry" %% "retry" % retryV,
"com.softwaremill.sttp.apispec" %% "openapi-circe-yaml" % openapiCirceYamlV,
"com.github.tminglei" %% "slick-pg" % slickPgV,
"com.softwaremill.sttp.tapir" %% "tapir-akka-http-server" % tapirV,
@@ -2033,24 +2048,16 @@ lazy val designer = (project in file("designer/server"))
processReports,
security,
deploymentManagerApi,
+ componentsApi,
restmodel,
listenerApi,
configLoaderApi,
defaultHelpers % Test,
testUtils % Test,
flinkTestUtils % Test,
- componentsApi % "test->test",
- // All DeploymentManager dependencies are added because they are needed to run NussknackerApp* with
- // dev-application.conf. Currently, we doesn't have a separate classpath for DMs like we have for components.
- // schemedKafkaComponentsUtils is added because loading the provided liteEmbeddedDeploymentManager causes
- // that are also load added their test dependencies on the classpath by the Idea. It causes that
- // UniversalKafkaSourceFactory is loaded from app classloader and GenericRecord which is defined in typesToExtract
- // is missing from this classloader
- flinkDeploymentManager % Provided,
- liteEmbeddedDeploymentManager % Provided,
- liteK8sDeploymentManager % Provided,
- developmentTestsDeploymentManager % Provided,
- schemedKafkaComponentsUtils % Provided,
+ developmentTestsDeploymentManager % Test,
+ kafkaComponentsUtils % Test,
+ componentsApi % "test->test"
)
lazy val e2eTests = (project in file("e2e-tests"))
@@ -2254,10 +2261,10 @@ prepareDev := {
(flinkExecutor / prepareItLibs).value
val workTarget = (designer / baseDirectory).value / "work"
val artifacts =
- (distribution / componentArtifacts).value ++ (distribution / devArtifacts).value ++ developmentTestsDeployManagerArtifacts.value ++
- Def
- .taskDyn(if (addManagerArtifacts) distribution / managerArtifacts else Def.task[List[(File, String)]](Nil))
- .value ++
+ (distribution / componentArtifacts).value ++
+ (distribution / devArtifacts).value ++
+ developmentTestsDeployManagerArtifacts.value ++
+ (distribution / deploymentManagerArtifacts).value ++
(flinkExecutor / additionalBundledArtifacts).value
IO.copy(artifacts.map { case (source, target) => (source, workTarget / target) })
(designer / copyClientDist).value
diff --git a/designer/restmodel/src/main/scala/pl/touk/nussknacker/restmodel/definition/package.scala b/designer/restmodel/src/main/scala/pl/touk/nussknacker/restmodel/definition/package.scala
index 03b3dd8b4c0..c1557ca5260 100644
--- a/designer/restmodel/src/main/scala/pl/touk/nussknacker/restmodel/definition/package.scala
+++ b/designer/restmodel/src/main/scala/pl/touk/nussknacker/restmodel/definition/package.scala
@@ -36,7 +36,7 @@ package object definition {
name: String,
typ: TypingResult,
editor: ParameterEditor,
- // It it used for node parameter adjustment on FE side (see ParametersUtils.ts -> adjustParameters)
+ // It is used for node parameter adjustment on FE side (see ParametersUtils.ts -> adjustParameters)
defaultValue: Expression,
// additionalVariables and variablesToHide are served to FE because suggestions API requires full set of variables
// and ScenarioWithDetails.json.validationResult.nodeResults is not enough
diff --git a/designer/server/src/main/resources/defaultDesignerConfig.conf b/designer/server/src/main/resources/defaultDesignerConfig.conf
index 9084c19c84b..4792ce16ff2 100644
--- a/designer/server/src/main/resources/defaultDesignerConfig.conf
+++ b/designer/server/src/main/resources/defaultDesignerConfig.conf
@@ -1,6 +1,9 @@
-#We use defaultUConfig.conf instead of reference.conf, as we don't want these properties in config loaded in model configuration
-#This configuration file contains sensible designer defaults for all Nussknacker deployments, without assumptions about deployment models and external tools (grafana, flink etc.)
-#All models configurations also shouldn't be in this file
+# We use defaultUConfig.conf instead of reference.conf, as we don't want these properties in config loaded in model configuration
+# This configuration file contains sensible designer defaults for all Nussknacker deployments, without assumptions about deployment
+# models and external tools (grafana, flink etc.). All models configurations also shouldn't be in this file
+
+managersDirs: ["./managers"]
+managersDirs: [ ${?MANAGERS_DIR} ]
storageDir: ./storage
storageDir: ${?STORAGE_DIR}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/LocalNussknackerWithSingleModel.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/LocalNussknackerWithSingleModel.scala
index 25dc03d5742..74aa94fad22 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/LocalNussknackerWithSingleModel.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/LocalNussknackerWithSingleModel.scala
@@ -3,8 +3,9 @@ package pl.touk.nussknacker.ui
import cats.effect.{IO, Resource}
import com.typesafe.config.{Config, ConfigFactory, ConfigValue, ConfigValueFactory}
import org.apache.commons.io.FileUtils
+import pl.touk.nussknacker.engine.util.loader.DeploymentManagersClassLoader
import pl.touk.nussknacker.engine.{DeploymentManagerProvider, ModelData}
-import pl.touk.nussknacker.ui.config.DesignerConfigLoader
+import pl.touk.nussknacker.ui.config.{DesignerConfig, SimpleConfigLoadingDesignerConfigLoader}
import pl.touk.nussknacker.ui.factory.NussknackerAppFactory
import pl.touk.nussknacker.ui.process.processingtype.loader.LocalProcessingTypeDataLoader
@@ -49,15 +50,21 @@ object LocalNussknackerWithSingleModel {
modelData = Map(typeName -> (category, modelData)),
deploymentManagerProvider = deploymentManagerProvider
)
- val designerConfigLoader = DesignerConfigLoader.fromConfig(
+ val designerConfig = DesignerConfig.from(
// This map is ignored but must exist
appConfig.withValue("scenarioTypes", ConfigValueFactory.fromMap(Map.empty[String, ConfigValue].asJava))
)
- val appFactory = new NussknackerAppFactory(
- designerConfigLoader,
- _ => local
- )
- appFactory.createApp()
+ for {
+ deploymentManagersClassLoader <- DeploymentManagersClassLoader.create(List.empty)
+ designerConfigLoader = new SimpleConfigLoadingDesignerConfigLoader(designerConfig.rawConfig.resolved)
+ appFactory = new NussknackerAppFactory(
+ designerConfig,
+ designerConfigLoader,
+ _ => local,
+ deploymentManagersClassLoader
+ )
+ app <- appFactory.createApp()
+ } yield app
}
// TODO: easier way of handling users file
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala
index f7b4c356e2c..08e11be9e10 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala
@@ -1,16 +1,18 @@
package pl.touk.nussknacker.ui
import cats.effect.{ExitCode, IO, IOApp}
-import pl.touk.nussknacker.ui.config.{AlwaysLoadingFileBasedDesignerConfigLoader, DesignerConfigLoader}
+import pl.touk.nussknacker.ui.config.AlwaysLoadingFileBasedDesignerConfigLoader
import pl.touk.nussknacker.ui.factory.NussknackerAppFactory
object NussknackerApp extends IOApp {
override def run(args: List[String]): IO[ExitCode] = {
- for {
- appFactory <- IO(NussknackerAppFactory(AlwaysLoadingFileBasedDesignerConfigLoader(getClass.getClassLoader)))
- _ <- appFactory.createApp().use { _ => IO.never }
- } yield ExitCode.Success
+ program.useForever.as(ExitCode.Success)
}
+ private def program = for {
+ appFactory <- NussknackerAppFactory.create(AlwaysLoadingFileBasedDesignerConfigLoader(getClass.getClassLoader))
+ _ <- appFactory.createApp()
+ } yield ()
+
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/additionalInfo/AdditionalInfoProviders.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/additionalInfo/AdditionalInfoProviders.scala
index b9f8cfb1892..4ab75ddce82 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/additionalInfo/AdditionalInfoProviders.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/additionalInfo/AdditionalInfoProviders.scala
@@ -21,7 +21,7 @@ class AdditionalInfoProviders(typeToConfig: ProcessingTypeDataProvider[ModelData
private val nodeProviders: ProcessingTypeDataProvider[Option[NodeData => Future[Option[AdditionalInfo]]], _] =
typeToConfig.mapValues(pt =>
ScalaServiceLoader
- .load[AdditionalInfoProvider](pt.modelClassLoader.classLoader)
+ .load[AdditionalInfoProvider](pt.modelClassLoader)
.headOption
.map(_.nodeAdditionalInfo(pt.modelConfig))
)
@@ -29,7 +29,7 @@ class AdditionalInfoProviders(typeToConfig: ProcessingTypeDataProvider[ModelData
private val propertiesProviders: ProcessingTypeDataProvider[Option[MetaData => Future[Option[AdditionalInfo]]], _] =
typeToConfig.mapValues(pt =>
ScalaServiceLoader
- .load[AdditionalInfoProvider](pt.modelClassLoader.classLoader)
+ .load[AdditionalInfoProvider](pt.modelClassLoader)
.headOption
.map(_.propertiesAdditionalInfo(pt.modelConfig))
)
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/AppApiHttpService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/AppApiHttpService.scala
index 77c0dff72dd..35bdf044e8e 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/AppApiHttpService.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/AppApiHttpService.scala
@@ -6,6 +6,7 @@ import io.circe.parser
import pl.touk.nussknacker.engine.api.deployment.ProcessState
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
import pl.touk.nussknacker.engine.api.process.{ProcessName, ProcessingType}
+import pl.touk.nussknacker.engine.util.ExecutionContextWithIORuntime
import pl.touk.nussknacker.engine.util.Implicits.RichTupleList
import pl.touk.nussknacker.engine.version.BuildInfo
import pl.touk.nussknacker.ui.api.description.AppApiEndpoints
@@ -17,7 +18,6 @@ import pl.touk.nussknacker.ui.process.processingtype.provider.{
}
import pl.touk.nussknacker.ui.process.{ProcessService, ScenarioQuery}
import pl.touk.nussknacker.ui.security.api.{AuthManager, LoggedUser, NussknackerInternalUser}
-import pl.touk.nussknacker.ui.util.ExecutionContextWithIORuntime
import scala.concurrent.Future
import scala.util.control.NonFatal
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/NodesApiHttpService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/NodesApiHttpService.scala
index b01aac1d4db..21321abd647 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/NodesApiHttpService.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/NodesApiHttpService.scala
@@ -179,7 +179,7 @@ class NodesApiHttpService(
modelData: ModelData
): EitherT[Future, NodesError, NodeValidationRequest] = {
EitherT.fromEither(
- fromNodeRequestDto(nodeValidationRequestDto)(prepareTypingResultDecoder(modelData.modelClassLoader.classLoader))
+ fromNodeRequestDto(nodeValidationRequestDto)(prepareTypingResultDecoder(modelData.modelClassLoader))
)
}
@@ -226,7 +226,7 @@ class NodesApiHttpService(
localVariables <- EitherT.fromEither[Future](
decodeVariableTypes(
request.variableTypes,
- prepareTypingResultDecoder(modelData.modelClassLoader.classLoader)
+ prepareTypingResultDecoder(modelData.modelClassLoader)
)
)
suggestions <- EitherT.right(
@@ -264,7 +264,7 @@ class NodesApiHttpService(
request: ParametersValidationRequestDto,
modelData: ModelData
): Either[NodesError, ParametersValidationRequest] = {
- val typingResultDecoder = prepareTypingResultDecoder(modelData.modelClassLoader.classLoader)
+ val typingResultDecoder = prepareTypingResultDecoder(modelData.modelClassLoader)
for {
parameters <- request.parameters.map { parameter =>
typingResultDecoder
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala
index cedbe843136..f9aedd998fc 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala
@@ -1501,7 +1501,7 @@ object NodesApiEndpoints {
}
-object TypingDtoSchemas { // todo
+object TypingDtoSchemas {
import pl.touk.nussknacker.engine.api.typed.typing._
import sttp.tapir.Schema.SName
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfig.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfig.scala
index d56720136c1..9747b4f487f 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfig.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfig.scala
@@ -2,13 +2,19 @@ package pl.touk.nussknacker.ui.config
import com.typesafe.config.{Config, ConfigFactory}
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
+import pl.touk.nussknacker.engine.util.StringUtils._
import pl.touk.nussknacker.engine.{ConfigWithUnresolvedVersion, ProcessingTypeConfig}
+import pl.touk.nussknacker.ui.config.DesignerConfig.ConfigurationMalformedException
import pl.touk.nussknacker.ui.configloader.ProcessingTypeConfigs
+import java.nio.file.{Files, Path, Paths}
+import scala.jdk.CollectionConverters._
+
// TODO: We should extract a class for all configuration options that should be available to designer instead of returning raw hocon config.
// Thanks to that it will be easier to split processing type config from rest of configs and use this interface programmatically
final case class DesignerConfig private (rawConfig: ConfigWithUnresolvedVersion) {
+ import DesignerConfig._
import net.ceedubs.ficus.Ficus._
def processingTypeConfigs: ProcessingTypeConfigs =
@@ -18,11 +24,31 @@ final case class DesignerConfig private (rawConfig: ConfigWithUnresolvedVersion)
rawConfig
.getConfigOpt("scenarioTypes")
.getOrElse {
- throw new RuntimeException("No scenario types configuration provided")
+ throw ConfigurationMalformedException("No scenario types configuration provided")
}
def configLoaderConfig: Config = rawConfig.resolved.getAs[Config]("configLoader").getOrElse(ConfigFactory.empty())
+ def managersDirs(): List[Path] = {
+ val managersPath = "managersDirs"
+ if (rawConfig.resolved.hasPath(managersPath)) {
+ val managersDirs = rawConfig.resolved.getStringList(managersPath).asScala.toList
+ val paths = managersDirs.map(_.convertToURL().toURI).map(Paths.get)
+ val invalidPaths = paths
+ .map(p => (p, !Files.isDirectory(p)))
+ .collect { case (p, true) => p }
+
+ if (invalidPaths.isEmpty)
+ paths
+ else
+ throw ConfigurationMalformedException(
+ s"Cannot find the following directories: ${invalidPaths.mkString(", ")}"
+ )
+ } else {
+ throw ConfigurationMalformedException(s"No '$managersPath' configuration path found")
+ }
+ }
+
}
object DesignerConfig {
@@ -31,4 +57,5 @@ object DesignerConfig {
DesignerConfig(ConfigWithUnresolvedVersion(config))
}
+ final case class ConfigurationMalformedException(msg: String) extends RuntimeException(msg)
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfigLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfigLoader.scala
index f0f00d3bb4e..67199d824c6 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfigLoader.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/config/DesignerConfigLoader.scala
@@ -54,10 +54,3 @@ class SimpleConfigLoadingDesignerConfigLoader(loadConfig: => Config) extends Des
override def loadDesignerConfig(): IO[DesignerConfig] = IO.delay(DesignerConfig.from(loadConfig))
}
-
-object DesignerConfigLoader {
-
- def fromConfig(loadConfig: => Config): SimpleConfigLoadingDesignerConfigLoader =
- new SimpleConfigLoadingDesignerConfigLoader(loadConfig)
-
-}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/AlignedComponentsDefinitionProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/AlignedComponentsDefinitionProvider.scala
index e992a79ad7a..c3bb4b46f04 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/AlignedComponentsDefinitionProvider.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/AlignedComponentsDefinitionProvider.scala
@@ -58,7 +58,7 @@ object AlignedComponentsDefinitionProvider {
new AlignedComponentsDefinitionProvider(
new BuiltInComponentsDefinitionsPreparer(designerModelData.modelData.componentsUiConfig),
new FragmentComponentDefinitionExtractor(
- designerModelData.modelData.modelClassLoader.classLoader,
+ designerModelData.modelData.modelClassLoader,
designerModelData.modelData.modelDefinitionWithClasses.classDefinitions.all,
designerModelData.modelData.componentsUiConfig.groupName,
designerModelData.modelData.determineDesignerWideId
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala
index d5df18a1856..ce7a1a92e88 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala
@@ -9,7 +9,7 @@ import scala.io.Source
// The purpose of this listing is to be possible to dynamically (without changing application image)
// add some java scripts to our main.html. Example usage:
//
-// docker run -it --network host -e CLASSPATH="/opt/nussknacker/lib/*:/opt/nussknacker/managers/*:/opt/nussknacker/extra-resources"
+// docker run -it --network host -e CLASSPATH="/opt/nussknacker/lib/*:/opt/nussknacker/extra-resources"
// -v ./extrajs:/opt/nussknacker/extra-resources/web/static/extra touk/nussknacker:latest
//
// After this, all *.js in the extrajs directory will be injected into main.html in the lexicographic order. Notice that if you want to locally
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
index 44787d0477d..63c76befde1 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
@@ -7,10 +7,14 @@ import cats.effect.{IO, Resource}
import com.typesafe.scalalogging.LazyLogging
import io.dropwizard.metrics5.MetricRegistry
import io.dropwizard.metrics5.jmx.JmxReporter
-import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
-import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader
-import pl.touk.nussknacker.engine.util.{JavaClassVersionChecker, SLF4JBridgeHandlerRegistrar}
import pl.touk.nussknacker.engine.{ConfigWithUnresolvedVersion, ProcessingTypeConfig}
+import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
+import pl.touk.nussknacker.engine.util.loader.{DeploymentManagersClassLoader, ScalaServiceLoader}
+import pl.touk.nussknacker.engine.util.{
+ ExecutionContextWithIORuntimeAdapter,
+ JavaClassVersionChecker,
+ SLF4JBridgeHandlerRegistrar
+}
import pl.touk.nussknacker.ui.config.{DesignerConfig, DesignerConfigLoader}
import pl.touk.nussknacker.ui.configloader.{ProcessingTypeConfigsLoader, ProcessingTypeConfigsLoaderFactory}
import pl.touk.nussknacker.ui.db.DbRef
@@ -21,37 +25,60 @@ import pl.touk.nussknacker.ui.process.processingtype.loader.{
}
import pl.touk.nussknacker.ui.process.processingtype.{ModelClassLoaderDependencies, ModelClassLoaderProvider}
import pl.touk.nussknacker.ui.server.{AkkaHttpBasedRouteProvider, NussknackerHttpServer}
-import pl.touk.nussknacker.ui.util.{ActorSystemBasedExecutionContextWithIORuntime, IOToFutureSttpBackendConverter}
+import pl.touk.nussknacker.ui.util.IOToFutureSttpBackendConverter
import sttp.client3.SttpBackend
import sttp.client3.asynchttpclient.cats.AsyncHttpClientCatsBackend
import java.time.Clock
+object NussknackerAppFactory {
+
+ def create(designerConfigLoader: DesignerConfigLoader): Resource[IO, NussknackerAppFactory] = {
+ for {
+ designerConfig <- Resource.eval(designerConfigLoader.loadDesignerConfig())
+ managersDirs <- Resource.eval(IO.delay(designerConfig.managersDirs()))
+ deploymentManagerClassLoader <- DeploymentManagersClassLoader.create(managersDirs)
+ } yield new NussknackerAppFactory(
+ designerConfig,
+ designerConfigLoader,
+ new ProcessingTypesConfigBasedProcessingTypeDataLoader(_, deploymentManagerClassLoader),
+ deploymentManagerClassLoader
+ )
+ }
+
+}
+
class NussknackerAppFactory(
+ alreadyLoadedConfig: DesignerConfig,
designerConfigLoader: DesignerConfigLoader,
- createProcessingTypeDataLoader: ProcessingTypeConfigsLoader => ProcessingTypeDataLoader
+ createProcessingTypeDataLoader: ProcessingTypeConfigsLoader => ProcessingTypeDataLoader,
+ deploymentManagersClassLoader: DeploymentManagersClassLoader
) extends LazyLogging {
def createApp(clock: Clock = Clock.systemUTC()): Resource[IO, Unit] = {
for {
- designerConfig <- Resource.eval(designerConfigLoader.loadDesignerConfig())
- system <- createActorSystem(designerConfig.rawConfig)
- executionContextWithIORuntime = ActorSystemBasedExecutionContextWithIORuntime.createFrom(system)
- ioSttpBackend <- AsyncHttpClientCatsBackend.resource[IO]()
+ system <- createActorSystem(alreadyLoadedConfig.rawConfig)
+ executionContextWithIORuntime <- ExecutionContextWithIORuntimeAdapter.createFrom(system.dispatcher)
+ ioSttpBackend <- AsyncHttpClientCatsBackend.resource[IO]()
processingTypeConfigsLoader = createProcessingTypeConfigsLoader(
- designerConfig,
+ alreadyLoadedConfig,
ioSttpBackend
)(executionContextWithIORuntime.ioRuntime)
modelClassLoaderProvider = createModelClassLoaderProvider(
- designerConfig.processingTypeConfigs.configByProcessingType
+ alreadyLoadedConfig.processingTypeConfigs.configByProcessingType,
+ deploymentManagersClassLoader
)
processingTypeDataLoader = createProcessingTypeDataLoader(processingTypeConfigsLoader)
materializer = Materializer(system)
- _ <- Resource.eval(IO(JavaClassVersionChecker.check()))
- _ <- Resource.eval(IO(SLF4JBridgeHandlerRegistrar.register()))
- metricsRegistry <- createGeneralPurposeMetricsRegistry()
- db <- DbRef.create(designerConfig.rawConfig.resolved)
- feStatisticsRepository <- QuestDbFEStatisticsRepository.create(system, clock, designerConfig.rawConfig.resolved)
+ _ <- Resource.eval(IO(JavaClassVersionChecker.check()))
+ _ <- Resource.eval(IO(SLF4JBridgeHandlerRegistrar.register()))
+ metricsRegistry <- createGeneralPurposeMetricsRegistry()
+ db <- DbRef.create(alreadyLoadedConfig.rawConfig.resolved)
+ feStatisticsRepository <- QuestDbFEStatisticsRepository.create(
+ system,
+ clock,
+ alreadyLoadedConfig.rawConfig.resolved
+ )
server = new NussknackerHttpServer(
new AkkaHttpBasedRouteProvider(
db,
@@ -68,7 +95,7 @@ class NussknackerAppFactory(
),
system
)
- _ <- server.start(designerConfig, metricsRegistry)
+ _ <- server.start(alreadyLoadedConfig, metricsRegistry)
_ <- startJmxReporter(metricsRegistry)
_ <- createStartAndStopLoggingEntries()
} yield ()
@@ -123,22 +150,13 @@ class NussknackerAppFactory(
}
private def createModelClassLoaderProvider(
- processingTypeConfigs: Map[String, ProcessingTypeConfig]
+ processingTypeConfigs: Map[String, ProcessingTypeConfig],
+ deploymentManagersClassLoader: DeploymentManagersClassLoader
): ModelClassLoaderProvider = {
val defaultWorkingDirOpt = None
ModelClassLoaderProvider(
- processingTypeConfigs.mapValuesNow(c => ModelClassLoaderDependencies(c.classPath, defaultWorkingDirOpt))
- )
- }
-
-}
-
-object NussknackerAppFactory {
-
- def apply(designerConfigLoader: DesignerConfigLoader): NussknackerAppFactory = {
- new NussknackerAppFactory(
- designerConfigLoader,
- new ProcessingTypesConfigBasedProcessingTypeDataLoader(_)
+ processingTypeConfigs.mapValuesNow(c => ModelClassLoaderDependencies(c.classPath, defaultWorkingDirOpt)),
+ deploymentManagersClassLoader
)
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
index 5242de64a68..5a143896590 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
@@ -242,7 +242,7 @@ class PeriodicDeploymentManager private[periodic] (
// We should move periodic mechanism to the core and reuse new synchronization mechanism also in this case.
override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport
- // todo NU-1772
+ // TODO: NU-1772
// In the current implementation:
// - PeriodicDeploymentManager is a kind of plugin, and it has its own data source (separate db)
// - PeriodicDeploymentManager returns (by implementing ManagerSpecificScenarioActivitiesStoredByManager) custom ScenarioActivities, that are associated with operations performed internally by the manager
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala
index 7518b5cda0a..c46626ac88c 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala
@@ -1,7 +1,11 @@
package pl.touk.nussknacker.ui.process.periodic.legacy.db
import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.ui.process.periodic.model.{PeriodicProcessDeploymentId, PeriodicProcessDeploymentStatus, PeriodicProcessId}
+import pl.touk.nussknacker.ui.process.periodic.model.{
+ PeriodicProcessDeploymentId,
+ PeriodicProcessDeploymentStatus,
+ PeriodicProcessId
+}
import slick.jdbc.{JdbcProfile, JdbcType}
import slick.lifted.ProvenShape
import slick.sql.SqlProfile.ColumnOption.NotNull
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala
index d747be320b2..e7e9f8a7443 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala
@@ -3,7 +3,6 @@ package pl.touk.nussknacker.ui.process.periodic.legacy.db
import cats.Monad
import com.github.tminglei.slickpg.ExPostgresProfile
import com.typesafe.scalalogging.LazyLogging
-import db.util.DBIOActionInstances.DB
import io.circe.parser.decode
import io.circe.syntax.EncoderOps
import pl.touk.nussknacker.engine.api.ProcessVersion
@@ -11,7 +10,6 @@ import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.FlinkScheduledExecutionPerformer.jarFileNameRuntimeParam
import pl.touk.nussknacker.ui.process.periodic.ScheduleProperty
import pl.touk.nussknacker.ui.process.periodic.legacy.db.LegacyPeriodicProcessesRepository.createPeriodicProcess
import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
@@ -65,7 +63,7 @@ object LegacyPeriodicProcessesRepository {
processId = None,
processName = processEntity.processName,
versionId = processEntity.processVersionId,
- runtimeParams = RuntimeParams(Map(jarFileNameRuntimeParam -> processEntity.jarFileName)),
+ runtimeParams = RuntimeParams(Map("jarFileName" -> processEntity.jarFileName)),
),
scheduleProperty,
processEntity.active,
@@ -121,7 +119,7 @@ class SlickLegacyPeriodicProcessesRepository(
processActionId: ProcessActionId,
): Action[PeriodicProcess] = {
val jarFileName = deploymentWithRuntimeParams.runtimeParams.params.getOrElse(
- jarFileNameRuntimeParam,
+ "jarFileName",
throw new RuntimeException(s"jarFileName runtime param not present")
)
val processEntity = PeriodicProcessEntityWithJson(
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala
index b3404eb0ac1..a98b12b3455 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala
@@ -1,7 +1,7 @@
package pl.touk.nussknacker.ui.process.processingtype
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
-import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
+import pl.touk.nussknacker.engine.util.loader.{DeploymentManagersClassLoader, ModelClassLoader}
import java.nio.file.Path
@@ -59,9 +59,12 @@ class ModelClassLoaderProvider private (
object ModelClassLoaderProvider {
- def apply(processingTypeConfig: Map[String, ModelClassLoaderDependencies]): ModelClassLoaderProvider = {
+ def apply(
+ processingTypeConfig: Map[String, ModelClassLoaderDependencies],
+ deploymentManagersClassLoader: DeploymentManagersClassLoader
+ ): ModelClassLoaderProvider = {
val processingTypesClassloaders = processingTypeConfig.map { case (name, deps) =>
- name -> (ModelClassLoader(deps.classpath, deps.workingDirectoryOpt) -> deps)
+ name -> (ModelClassLoader(deps.classpath, deps.workingDirectoryOpt, deploymentManagersClassLoader) -> deps)
}
new ModelClassLoaderProvider(processingTypesClassloaders)
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
index 6f7e96d6e67..2cad01afc34 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
@@ -5,7 +5,7 @@ import com.typesafe.scalalogging.LazyLogging
import pl.touk.nussknacker.engine._
import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
-import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader
+import pl.touk.nussknacker.engine.util.loader.{DeploymentManagersClassLoader, ScalaServiceLoader}
import pl.touk.nussknacker.ui.configloader.{ProcessingTypeConfigs, ProcessingTypeConfigsLoader}
import pl.touk.nussknacker.ui.db.DbRef
import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
@@ -13,8 +13,10 @@ import pl.touk.nussknacker.ui.process.processingtype._
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState
-class ProcessingTypesConfigBasedProcessingTypeDataLoader(processingTypeConfigsLoader: ProcessingTypeConfigsLoader)
- extends ProcessingTypeDataLoader
+class ProcessingTypesConfigBasedProcessingTypeDataLoader(
+ processingTypeConfigsLoader: ProcessingTypeConfigsLoader,
+ deploymentManagersClassLoader: DeploymentManagersClassLoader
+) extends ProcessingTypeDataLoader
with LazyLogging {
override def loadProcessingTypeData(
@@ -108,7 +110,10 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(processingTypeConfigsLo
}
private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = {
- ScalaServiceLoader.loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType)
+ ScalaServiceLoader.loadNamed[DeploymentManagerProvider](
+ typeConfig.deploymentManagerType,
+ deploymentManagersClassLoader
+ )
}
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/provider/ReloadableProcessingTypeDataProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/provider/ReloadableProcessingTypeDataProvider.scala
index 32fa266375e..c7b0c6e4642 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/provider/ReloadableProcessingTypeDataProvider.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/provider/ReloadableProcessingTypeDataProvider.scala
@@ -23,7 +23,8 @@ class ReloadableProcessingTypeDataProvider(
) extends ProcessingTypeDataProvider[ProcessingTypeData, CombinedProcessingTypeData]
with LazyLogging {
- // We init state with dumb value instead of calling loadMethod() to avoid problems with dependency injection cycle - see NusskanckerDefaultAppRouter.create
+ // We initiate state with dumb value instead of calling loadMethod() to avoid problems with dependency injection
+ // cycle - see NusskanckerDefaultAppRouter.create
private var stateValue: ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData] = emptyState
override private[processingtype] def state
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
index 1b3ec275c25..9b59a62c68b 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
@@ -15,6 +15,7 @@ import pl.touk.nussknacker.engine.compile.ProcessValidator
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.definition.test.ModelDataTestInfoProvider
import pl.touk.nussknacker.engine.dict.ProcessDictSubstitutor
+import pl.touk.nussknacker.engine.util.ExecutionContextWithIORuntime
import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader
import pl.touk.nussknacker.engine.util.multiplicity.{Empty, Many, Multiplicity, One}
import pl.touk.nussknacker.engine.{DeploymentManagerDependencies, ModelDependencies}
@@ -449,7 +450,7 @@ class AkkaHttpBasedRouteProvider(
(
processingTypeData.designerModelData.modelData.designerDictServices.dictQueryService,
processingTypeData.designerModelData.modelData.modelDefinition.expressionConfig.dictionaries,
- processingTypeData.designerModelData.modelData.modelClassLoader.classLoader
+ processingTypeData.designerModelData.modelData.modelClassLoader
)
}
)
@@ -726,7 +727,7 @@ class AkkaHttpBasedRouteProvider(
Resource
.make(
acquire = IO {
- val laodProcessingTypeDataIO = processingTypeDataLoader.loadProcessingTypeData(
+ val loadProcessingTypeDataIO = processingTypeDataLoader.loadProcessingTypeData(
getModelDependencies(
additionalUIConfigProvider,
_,
@@ -743,7 +744,7 @@ class AkkaHttpBasedRouteProvider(
modelClassLoaderProvider,
Some(dbRef),
)
- val loadAndNotifyIO = laodProcessingTypeDataIO
+ val loadAndNotifyIO = loadProcessingTypeDataIO
.map { state =>
globalNotificationRepository.saveEntry(Notification.configurationReloaded)
state
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala
index 417849e6451..81973a806b7 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala
@@ -9,7 +9,7 @@ object RawFEStatistics {
def apply(request: RegisterStatisticsRequestDto): RawFEStatistics =
new RawFEStatistics(
- // todo change to groupMapReduce in scala 2.13
+ // TODO: change to groupMapReduce in scala 2.13
raw = request.statistics
.groupBy(_.name.shortName)
.map { case (k, v) =>
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/suggester/ExpressionSuggester.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/suggester/ExpressionSuggester.scala
index 472f5f240c0..e2d3f1f2b7c 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/suggester/ExpressionSuggester.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/suggester/ExpressionSuggester.scala
@@ -57,7 +57,7 @@ object ExpressionSuggester {
modelData.modelDefinition.expressionConfig,
modelData.modelDefinitionWithClasses.classDefinitions,
modelData.designerDictServices,
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
scenarioPropertiesNames
)
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/ActorSystemBasedExecutionContextWithIORuntime.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/ActorSystemBasedExecutionContextWithIORuntime.scala
deleted file mode 100644
index 9a203f755dc..00000000000
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/ActorSystemBasedExecutionContextWithIORuntime.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package pl.touk.nussknacker.ui.util
-
-import akka.actor.ActorSystem
-import cats.effect.unsafe.{IORuntime, IORuntimeConfig}
-
-import java.util.concurrent.Executors
-import scala.concurrent.ExecutionContext
-
-trait ExecutionContextWithIORuntime extends ExecutionContext {
- implicit def ioRuntime: IORuntime
-}
-
-class ActorSystemBasedExecutionContextWithIORuntime private (actorSystem: ActorSystem)
- extends ExecutionContextWithIORuntime {
-
- private val cachedThreadPool = Executors.newCachedThreadPool()
-
- override implicit val ioRuntime: IORuntime = IORuntime(
- compute = actorSystem.dispatcher,
- blocking = ExecutionContext.fromExecutor(cachedThreadPool),
- scheduler = IORuntime.global.scheduler,
- shutdown = () => (),
- config = IORuntimeConfig()
- )
-
- actorSystem.registerOnTermination {
- ioRuntime.shutdown()
- cachedThreadPool.shutdown()
- }
-
- override def execute(runnable: Runnable): Unit = actorSystem.dispatcher.execute(runnable)
-
- override def reportFailure(cause: Throwable): Unit = actorSystem.dispatcher.reportFailure(cause)
-}
-
-object ActorSystemBasedExecutionContextWithIORuntime {
- def createFrom(actorSystem: ActorSystem): ActorSystemBasedExecutionContextWithIORuntime =
- new ActorSystemBasedExecutionContextWithIORuntime(actorSystem)
-}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/IOToFutureSttpBackendConverter.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/IOToFutureSttpBackendConverter.scala
index 6c1edb56bcd..3deee58beb4 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/IOToFutureSttpBackendConverter.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/util/IOToFutureSttpBackendConverter.scala
@@ -2,6 +2,7 @@ package pl.touk.nussknacker.ui.util
import cats.arrow.FunctionK
import cats.effect.IO
+import pl.touk.nussknacker.engine.util.ExecutionContextWithIORuntime
import sttp.client3.SttpBackend
import sttp.client3.impl.cats.implicits.sttpBackendToCatsMappableSttpBackend
import sttp.monad.FutureMonad
diff --git a/designer/server/src/test/resources/config/common-designer.conf b/designer/server/src/test/resources/config/common-designer.conf
index ce04adefb40..42aecee8a32 100644
--- a/designer/server/src/test/resources/config/common-designer.conf
+++ b/designer/server/src/test/resources/config/common-designer.conf
@@ -85,3 +85,9 @@ usageStatisticsReports {
}
repositoryGaugesCacheDuration: 10 seconds
+
+managersDirs: [
+ "engine/lite/embeddedDeploymentManager/target/scala-"${scala.major.version}"/",
+ "engine/lite/k8sDeploymentManager/target/scala-"${scala.major.version}"/",
+ "engine/flink/management/target/scala-"${scala.major.version}"/"
+]
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala
index 174f55d2179..8be5ad259d1 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala
@@ -8,7 +8,7 @@ import org.scalatest.{BeforeAndAfterAll, Suite}
import pl.touk.nussknacker.test.DefaultUniquePortProvider
import pl.touk.nussknacker.test.base.db.WithHsqlDbTesting
import pl.touk.nussknacker.test.config.WithDesignerConfig
-import pl.touk.nussknacker.ui.config.DesignerConfigLoader
+import pl.touk.nussknacker.ui.config.SimpleConfigLoadingDesignerConfigLoader
import pl.touk.nussknacker.ui.factory.NussknackerAppFactory
trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with WithClock with BeforeAndAfterAll {
@@ -22,9 +22,10 @@ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with Wit
override protected def beforeAll(): Unit = {
super.beforeAll()
- val designerConfigLoader = DesignerConfigLoader.fromConfig(adjustNuTestConfig())
- releaseAppResources = NussknackerAppFactory(designerConfigLoader)
- .createApp(clock = clock)
+ val designerConfigLoader = new SimpleConfigLoadingDesignerConfigLoader(adjustNuTestConfig())
+ releaseAppResources = NussknackerAppFactory
+ .create(designerConfigLoader)
+ .flatMap(_.createApp(clock = clock))
.allocated
.unsafeRunSync()
._2
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
index c662bc0c32a..b9faa1ad999 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
@@ -33,7 +33,12 @@ import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.TestCategory
import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.TestProcessingType.Streaming
import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.{TestCategory, TestProcessingType}
import pl.touk.nussknacker.test.config.{ConfigWithScalaVersion, WithSimplifiedDesignerConfig}
-import pl.touk.nussknacker.test.mock.{MockDeploymentManager, MockManagerProvider, TestProcessChangeListener}
+import pl.touk.nussknacker.test.mock.{
+ MockDeploymentManager,
+ MockManagerProvider,
+ TestProcessChangeListener,
+ WithTestDeploymentManagerClassLoader
+}
import pl.touk.nussknacker.test.utils.domain.TestFactory._
import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory}
import pl.touk.nussknacker.test.utils.scalas.AkkaHttpExtensions.toRequestEntity
@@ -68,6 +73,7 @@ trait NuResourcesTest
with WithClock
with WithSimplifiedDesignerConfig
with WithSimplifiedConfigScenarioHelper
+ with WithTestDeploymentManagerClassLoader
with EitherValuesDetailedMessage
with OptionValues
with BeforeAndAfterEach
@@ -99,7 +105,7 @@ trait NuResourcesTest
protected val processChangeListener = new TestProcessChangeListener()
- protected lazy val deploymentManager: MockDeploymentManager = new MockDeploymentManager
+ protected lazy val deploymentManager: MockDeploymentManager = MockDeploymentManager.create()
protected val deploymentCommentSettings: Option[DeploymentCommentSettings] = None
@@ -128,7 +134,8 @@ trait NuResourcesTest
protected val deploymentManagerProvider: DeploymentManagerProvider = new MockManagerProvider(deploymentManager)
private val modelClassLoaderProvider = ModelClassLoaderProvider(
- Map(Streaming.stringify -> ModelClassLoaderDependencies(processingTypeConfig.classPath, None))
+ Map(Streaming.stringify -> ModelClassLoaderDependencies(processingTypeConfig.classPath, None)),
+ deploymentManagersClassLoader
)
private val modelData =
@@ -158,7 +165,10 @@ trait NuResourcesTest
protected val typeToConfig: ProcessingTypeDataProvider[ProcessingTypeData, CombinedProcessingTypeData] = {
val designerConfig = DesignerConfig.from(testConfig)
ProcessingTypeDataProvider(
- new ProcessingTypesConfigBasedProcessingTypeDataLoader(() => IO.pure(designerConfig.processingTypeConfigs))
+ new ProcessingTypesConfigBasedProcessingTypeDataLoader(
+ () => IO.pure(designerConfig.processingTypeConfigs),
+ deploymentManagersClassLoader
+ )
.loadProcessingTypeData(
_ => modelDependencies,
_ => deploymentManagerDependencies,
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
index 7f2a7b58edf..ac7926f343b 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
@@ -3,6 +3,8 @@ package pl.touk.nussknacker.test.mock
import akka.actor.ActorSystem
import cats.data.Validated.valid
import cats.data.ValidatedNel
+import cats.effect.IO
+import cats.effect.unsafe.IORuntime
import com.google.common.collect.LinkedHashMultimap
import com.typesafe.config.Config
import sttp.client3.testing.SttpBackendStub
@@ -14,6 +16,7 @@ import pl.touk.nussknacker.engine.api.{ProcessVersion, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment._
import pl.touk.nussknacker.engine.management.{FlinkDeploymentManager, FlinkStreamingDeploymentManagerProvider}
+import pl.touk.nussknacker.engine.util.loader.DeploymentManagersClassLoader
import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import pl.touk.nussknacker.test.config.ConfigWithScalaVersion
import pl.touk.nussknacker.test.utils.domain.TestFactory
@@ -22,7 +25,6 @@ import shapeless.syntax.typeable.typeableOps
import java.time.Instant
import java.util.UUID
import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue}
-import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.Try
@@ -32,19 +34,46 @@ object MockDeploymentManager {
val savepointPath = "savepoints/123-savepoint"
val stopSavepointPath = "savepoints/246-stop-savepoint"
val maxParallelism = 10
+
+ def create(
+ defaultProcessStateStatus: StateStatus = SimpleStateStatus.NotDeployed,
+ deployedScenariosProvider: ProcessingTypeDeployedScenariosProvider =
+ new ProcessingTypeDeployedScenariosProviderStub(List.empty),
+ actionService: ProcessingTypeActionService = new ProcessingTypeActionServiceStub,
+ scenarioActivityManager: ScenarioActivityManager = NoOpScenarioActivityManager,
+ customProcessStateDefinitionManager: Option[ProcessStateDefinitionManager] = None,
+ deploymentManagersClassLoader: Option[DeploymentManagersClassLoader] = None
+ ): MockDeploymentManager = {
+ new MockDeploymentManager(
+ defaultProcessStateStatus,
+ deployedScenariosProvider,
+ actionService,
+ scenarioActivityManager,
+ customProcessStateDefinitionManager,
+ DeploymentManagersClassLoader.create(List.empty).allocated.unsafeRunSync()(IORuntime.global)
+ )(ExecutionContext.global, IORuntime.global)
+ }
+
}
-class MockDeploymentManager(
+class MockDeploymentManager private (
defaultProcessStateStatus: StateStatus = SimpleStateStatus.NotDeployed,
deployedScenariosProvider: ProcessingTypeDeployedScenariosProvider =
new ProcessingTypeDeployedScenariosProviderStub(List.empty),
actionService: ProcessingTypeActionService = new ProcessingTypeActionServiceStub,
scenarioActivityManager: ScenarioActivityManager = NoOpScenarioActivityManager,
-) extends FlinkDeploymentManager(
+ customProcessStateDefinitionManager: Option[ProcessStateDefinitionManager],
+ deploymentManagersClassLoader: (DeploymentManagersClassLoader, IO[Unit]),
+)(implicit executionContext: ExecutionContext, IORuntime: IORuntime)
+ extends FlinkDeploymentManager(
ModelData(
ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig),
TestFactory.modelDependencies,
- ModelClassLoader(ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig).classPath, None)
+ ModelClassLoader(
+ ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig).classPath,
+ None,
+ deploymentManagersClassLoader._1
+ )
),
DeploymentManagerDependencies(
deployedScenariosProvider,
@@ -73,6 +102,12 @@ class MockDeploymentManager(
// Pass correct deploymentId
private def fallbackDeploymentId = DeploymentId(UUID.randomUUID().toString)
+ override def processStateDefinitionManager: ProcessStateDefinitionManager =
+ customProcessStateDefinitionManager match {
+ case Some(manager) => manager
+ case None => super.processStateDefinitionManager
+ }
+
override def getProcessStates(
name: ProcessName
)(implicit freshnessPolicy: DataFreshnessPolicy): Future[WithDataFreshnessStatus[List[StatusDetails]]] = {
@@ -239,7 +274,9 @@ class MockDeploymentManager(
deploymentId: Option[newdeployment.DeploymentId]
): Future[Option[ExternalDeploymentId]] = ???
- override def close(): Unit = {}
+ override def close(): Unit = {
+ deploymentManagersClassLoader._2.unsafeRunSync()
+ }
override def cancelDeployment(command: DMCancelDeploymentCommand): Future[Unit] = Future.successful(())
@@ -267,7 +304,7 @@ class MockDeploymentManager(
override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
}
-class MockManagerProvider(deploymentManager: DeploymentManager = new MockDeploymentManager())
+class MockManagerProvider(deploymentManager: DeploymentManager = MockDeploymentManager.create())
extends FlinkStreamingDeploymentManagerProvider {
override def createDeploymentManager(
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/WithTestDeploymentManagerClassLoader.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/WithTestDeploymentManagerClassLoader.scala
new file mode 100644
index 00000000000..6a9b2fcb3cd
--- /dev/null
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/WithTestDeploymentManagerClassLoader.scala
@@ -0,0 +1,23 @@
+package pl.touk.nussknacker.test.mock
+
+import cats.effect.unsafe.implicits.global
+import org.scalatest.{BeforeAndAfterAll, Suite}
+import pl.touk.nussknacker.engine.util.loader.DeploymentManagersClassLoader
+
+trait WithTestDeploymentManagerClassLoader extends BeforeAndAfterAll {
+ this: Suite =>
+
+ private val (deploymentManagersClassLoaderInstance, releaseDeploymentManagersClassLoaderResources) =
+ DeploymentManagersClassLoader
+ .create(List.empty)
+ .allocated
+ .unsafeRunSync()
+
+ def deploymentManagersClassLoader: DeploymentManagersClassLoader = deploymentManagersClassLoaderInstance
+
+ override protected def afterAll(): Unit = {
+ releaseDeploymentManagersClassLoaderResources.unsafeRunSync()
+ super.afterAll()
+ }
+
+}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala
index 47f2486fa4a..0fbc61496cf 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala
@@ -32,6 +32,7 @@ import pl.touk.nussknacker.test.mock.{
StubModelDataWithModelDefinition,
TestAdditionalUIConfigProvider
}
+
import pl.touk.nussknacker.ui.definition.ScenarioPropertiesConfigFinalizer
import pl.touk.nussknacker.ui.definition.editor.JavaSampleEnum
import pl.touk.nussknacker.ui.process.ProcessService.UpdateScenarioCommand
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceBusinessSpec.scala
index 449d2f851a2..cfb1a1e8862 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceBusinessSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceBusinessSpec.scala
@@ -157,6 +157,7 @@ class ComponentApiHttpServiceBusinessSpec
"streaming-service-echoenumservice",
"streaming-service-enricher",
"streaming-service-enrichernullresult",
+ "streaming-service-env",
"streaming-service-listreturnobjectservice",
"streaming-service-log",
"streaming-service-meetingservice",
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceSecuritySpec.scala
index 218897b3200..aab78d6ed39 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceSecuritySpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ComponentApiHttpServiceSecuritySpec.scala
@@ -241,6 +241,7 @@ class ComponentApiHttpServiceSecuritySpec
"streaming1-service-echoenumservice",
"streaming1-service-enricher",
"streaming1-service-enrichernullresult",
+ "streaming1-service-env",
"streaming1-service-listreturnobjectservice",
"streaming1-service-log",
"streaming1-service-meetingservice",
@@ -306,6 +307,7 @@ class ComponentApiHttpServiceSecuritySpec
"streaming2-service-echoenumservice",
"streaming2-service-enricher",
"streaming2-service-enrichernullresult",
+ "streaming2-service-env",
"streaming2-service-listreturnobjectservice",
"streaming2-service-log",
"streaming2-service-meetingservice",
@@ -370,6 +372,7 @@ class ComponentApiHttpServiceSecuritySpec
"streaming1-service-echoenumservice",
"streaming1-service-enricher",
"streaming1-service-enrichernullresult",
+ "streaming1-service-env",
"streaming1-service-listreturnobjectservice",
"streaming1-service-log",
"streaming1-service-meetingservice",
@@ -427,6 +430,7 @@ class ComponentApiHttpServiceSecuritySpec
"streaming2-service-echoenumservice",
"streaming2-service-enricher",
"streaming2-service-enrichernullresult",
+ "streaming2-service-env",
"streaming2-service-listreturnobjectservice",
"streaming2-service-log",
"streaming2-service-meetingservice",
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala
index 3580f463aba..a18e961459f 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala
@@ -33,7 +33,7 @@ class NuDesignerApiAvailableToExposeYamlSpec extends AnyFunSuite with Matchers {
val examplesValidationResult = OpenAPIExamplesValidator.forTapir.validateExamples(
specYaml = generatedSpec,
excludeResponseValidationForOperationIds = List(
- "getApiProcessesScenarionameActivityActivities" // todo NU-1772: responses contain discriminator, it is not properly handled by validator
+ "getApiProcessesScenarionameActivityActivities" // TODO: NU-1772: responses contain discriminator, it is not properly handled by validator
)
)
val clue = examplesValidationResult
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiSwaggerUISpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiSwaggerUISpec.scala
index c2ff0efef1f..82ad4b7e4fe 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiSwaggerUISpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiSwaggerUISpec.scala
@@ -17,7 +17,9 @@ class NuDesignerApiSwaggerUISpec
"Swagger UI should be visible and achievable" in {
given()
.when()
- .get(s"$nuDesignerHttpAddress/api/docs")
+ .redirects()
+ .follow(true)
+ .get(s"$nuDesignerHttpAddress/api/docs/")
.Then()
.statusCode(200)
.header("Content-Type", "text/html")
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala
index c95d2b45487..f24069b3f8b 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala
@@ -41,7 +41,7 @@ class StatisticsApiHttpServiceSecuritySpec
}
}
- // todo what about anonymous user
+ // TODO: what about anonymous user
"not authenticated should" - {
"forbid access" in {
given()
@@ -69,7 +69,7 @@ class StatisticsApiHttpServiceSecuritySpec
}
}
- // todo what about anonymous user
+ // TODO: what about anonymous user
"not authenticated should" - {
"forbid access" in {
given()
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
index afcb0dfe60d..8ec5c7c737b 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
@@ -7,13 +7,14 @@ import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import pl.touk.nussknacker.engine.{ModelData, ProcessingTypeConfig}
import pl.touk.nussknacker.test.config.ConfigWithScalaVersion
import pl.touk.nussknacker.test.utils.domain.TestFactory
+import pl.touk.nussknacker.test.mock.WithTestDeploymentManagerClassLoader
import java.net.URI
import java.nio.file.Files
import java.util.UUID
// TODO: We should spit DesignerConfigLoader tests and model ProcessingTypeConfig tests
-class ConfigurationTest extends AnyFunSuite with Matchers {
+class ConfigurationTest extends AnyFunSuite with WithTestDeploymentManagerClassLoader with Matchers {
// warning: can't be val - uses ConfigFactory.load which breaks "should preserve config overrides" test
private def globalConfig = ConfigWithScalaVersion.TestsConfig
@@ -23,7 +24,7 @@ class ConfigurationTest extends AnyFunSuite with Matchers {
ModelData(
config,
TestFactory.modelDependencies,
- ModelClassLoader(config.classPath, None)
+ ModelClassLoader(config.classPath, None, deploymentManagersClassLoader),
)
}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/processingtype/ProcessingTypeDataLoaderSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/processingtype/ProcessingTypeDataLoaderSpec.scala
index ec23803e342..4aa610c9922 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/processingtype/ProcessingTypeDataLoaderSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/processingtype/ProcessingTypeDataLoaderSpec.scala
@@ -6,7 +6,7 @@ import com.typesafe
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers.{convertToAnyShouldWrapper, include}
-import pl.touk.nussknacker.ui.config.{DesignerConfig, DesignerConfigLoader}
+import pl.touk.nussknacker.ui.config.{DesignerConfig, SimpleConfigLoadingDesignerConfigLoader}
import pl.touk.nussknacker.ui.configloader.ProcessingTypeConfigsLoader
class ProcessingTypeDataLoaderSpec extends AnyFunSuite {
@@ -118,7 +118,7 @@ class ProcessingTypeDataLoaderSpec extends AnyFunSuite {
}
private def staticConfigBasedProcessingTypeConfigsLoader(config: Config): ProcessingTypeConfigsLoader = { () =>
- DesignerConfigLoader.fromConfig(config).loadDesignerConfig().map(_.processingTypeConfigs)
+ new SimpleConfigLoadingDesignerConfigLoader(config).loadDesignerConfig().map(_.processingTypeConfigs)
}
private def loadDifferentConfigPerInvocationProcessingTypeConfigsLoader(
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/DefinitionsServiceSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/DefinitionsServiceSpec.scala
index 7d1ab6de70f..de1524f64ab 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/DefinitionsServiceSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/DefinitionsServiceSpec.scala
@@ -315,7 +315,7 @@ class DefinitionsServiceSpec extends AnyFunSuite with Matchers with PatientScala
),
fragmentPropertiesConfig = Map.empty,
scenarioPropertiesConfig = Map.empty,
- deploymentManager = new MockDeploymentManager,
+ deploymentManager = MockDeploymentManager.create(),
alignedComponentsDefinitionProvider = alignedComponentsDefinitionProvider,
scenarioPropertiesConfigFinalizer =
new ScenarioPropertiesConfigFinalizer(TestAdditionalUIConfigProvider, processingType.stringify),
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/notifications/NotificationServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/notifications/NotificationServiceTest.scala
index b2f6c131db7..8ca798b1793 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/notifications/NotificationServiceTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/notifications/NotificationServiceTest.scala
@@ -70,7 +70,7 @@ class NotificationServiceTest
private val dbProcessRepository = TestFactory.newFetchingProcessRepository(testDbRef)
private val writeProcessRepository = TestFactory.newWriteProcessRepository(testDbRef, clock)
private val scenarioActivityRepository = DbScenarioActivityRepository.create(testDbRef, clock)
- private val dm: MockDeploymentManager = new MockDeploymentManager
+ private val dm: MockDeploymentManager = MockDeploymentManager.create()
private val dmDispatcher = new DeploymentManagerDispatcher(
mapProcessingTypeDataProvider(Streaming.stringify -> dm),
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
index 78ccd06c3c2..93bd8c5353a 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
@@ -193,9 +193,9 @@ class ProcessStateDefinitionServiceSpec extends AnyFunSuite with Matchers {
componentDefinitionExtractionMode = modelDependencies.componentDefinitionExtractionMode
),
new MockManagerProvider(
- new MockDeploymentManager() {
- override def processStateDefinitionManager: ProcessStateDefinitionManager = stateDefinitionManager
- }
+ MockDeploymentManager.create(
+ customProcessStateDefinitionManager = Some(stateDefinitionManager)
+ )
),
SchedulingForProcessingType.NotAvailable,
TestFactory.deploymentManagerDependencies,
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/deployment/DeploymentServiceSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/deployment/DeploymentServiceSpec.scala
index f4c86410106..ff1f48a023b 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/deployment/DeploymentServiceSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/deployment/DeploymentServiceSpec.scala
@@ -96,11 +96,11 @@ class DeploymentServiceSpec
private val initialVersionId = ProcessVersion.empty.versionId
- deploymentManager = new MockDeploymentManager(
- SimpleStateStatus.Running,
- DefaultProcessingTypeDeployedScenariosProvider(testDbRef, "streaming"),
- new DefaultProcessingTypeActionService("streaming", deploymentService),
- new RepositoryBasedScenarioActivityManager(activityRepository, dbioRunner),
+ deploymentManager = MockDeploymentManager.create(
+ defaultProcessStateStatus = SimpleStateStatus.Running,
+ deployedScenariosProvider = DefaultProcessingTypeDeployedScenariosProvider(testDbRef, "streaming"),
+ actionService = new DefaultProcessingTypeActionService("streaming", deploymentService),
+ scenarioActivityManager = new RepositoryBasedScenarioActivityManager(activityRepository, dbioRunner),
)
private def createDeploymentService(
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
index e76cb1f6f9a..679870cc4a8 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
@@ -8,14 +8,14 @@ import pl.touk.nussknacker.engine.api.component.ComponentDefinition
import pl.touk.nussknacker.engine.api.process.{Source, SourceFactory}
import pl.touk.nussknacker.engine.testing.{DeploymentManagerProviderStub, LocalModelData}
import pl.touk.nussknacker.security.Permission
+import pl.touk.nussknacker.test.mock.WithTestDeploymentManagerClassLoader
import pl.touk.nussknacker.test.utils.domain.TestFactory
import pl.touk.nussknacker.ui.UnauthorizedError
-import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.LocalProcessingTypeDataLoader
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider
import pl.touk.nussknacker.ui.security.api.RealLoggedUser
-class ProcessingTypeDataProviderSpec extends AnyFunSuite with Matchers {
+class ProcessingTypeDataProviderSpec extends AnyFunSuite with WithTestDeploymentManagerClassLoader with Matchers {
test("allow to access to processing type data only users that has read access to associated category") {
val provider = ProcessingTypeDataProvider(mockProcessingTypeData("foo", "bar"))
@@ -59,7 +59,10 @@ class ProcessingTypeDataProviderSpec extends AnyFunSuite with Matchers {
.loadProcessingTypeData(
_ => modelDependencies,
_ => TestFactory.deploymentManagerDependencies,
- ModelClassLoaderProvider(allProcessingTypes.map(_ -> ModelClassLoaderDependencies(List.empty, None)).toMap),
+ ModelClassLoaderProvider(
+ allProcessingTypes.map(_ -> ModelClassLoaderDependencies(List.empty, None)).toMap,
+ deploymentManagersClassLoader
+ ),
dbRef = None,
)
.unsafeRunSync()
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
index 4b5deae86c6..603c369e27f 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
@@ -18,9 +18,9 @@ import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import pl.touk.nussknacker.restmodel.scenariodetails.ScenarioParameters
import pl.touk.nussknacker.security.Permission
import pl.touk.nussknacker.test.ValidatedValuesDetailedMessage
+import pl.touk.nussknacker.test.mock.WithTestDeploymentManagerClassLoader
import pl.touk.nussknacker.test.utils.domain.TestFactory
import pl.touk.nussknacker.ui.config.DesignerConfig
-import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypesConfigBasedProcessingTypeDataLoader
import pl.touk.nussknacker.ui.security.api.{LoggedUser, RealLoggedUser}
@@ -31,6 +31,7 @@ class ScenarioParametersServiceTest
extends AnyFunSuite
with Matchers
with ValidatedValuesDetailedMessage
+ with WithTestDeploymentManagerClassLoader
with OptionValues
with LazyLogging {
@@ -287,7 +288,10 @@ class ScenarioParametersServiceTest
val designerConfig =
DesignerConfig.from(ConfigFactory.parseFile(devApplicationConfFile).withFallback(fallbackConfig))
val processingTypeData =
- new ProcessingTypesConfigBasedProcessingTypeDataLoader(() => IO.pure(designerConfig.processingTypeConfigs))
+ new ProcessingTypesConfigBasedProcessingTypeDataLoader(
+ () => IO.pure(designerConfig.processingTypeConfigs),
+ deploymentManagersClassLoader
+ )
.loadProcessingTypeData(
processingType =>
ModelDependencies(
@@ -301,7 +305,8 @@ class ScenarioParametersServiceTest
ModelClassLoaderProvider(
designerConfig.processingTypeConfigs.configByProcessingType.mapValuesNow(conf =>
ModelClassLoaderDependencies(conf.classPath, None)
- )
+ ),
+ deploymentManagersClassLoader
),
dbRef = None,
)
diff --git a/docs/Changelog.md b/docs/Changelog.md
index c008dee443c..d78ce2d903e 100644
--- a/docs/Changelog.md
+++ b/docs/Changelog.md
@@ -49,7 +49,7 @@
* [#7387](https://github.com/TouK/nussknacker/pull/7387) Creator panel is automatically reloaded after configuration reload - page refresh is not needed now
* [#7400](https://github.com/TouK/nussknacker/pull/7400) Deploy and cancel buttons are not shown for fragments
* [#7354](https://github.com/TouK/nussknacker/pull/7354) Reduce response payload size when fetching scenarios for scenarios tab by removing unused fields and `null` attributes.
-* [#7404](https://github.com/TouK/nussknacker/pull/7404) Fix spel evaluation error when using conversion extensions methods or array.get extension method
+* [#7404](https://github.com/TouK/nussknacker/pull/7404) Fix SpEL evaluation error when using conversion extensions methods or array.get extension method
* [#7420](https://github.com/TouK/nussknacker/pull/7420) Add toInteger and toIntegerOrNull conversions. Also add canBeInteger extension
* [#7438](https://github.com/TouK/nussknacker/pull/7438) Map int32 integer format in OpenAPI schema to the `Integer` type
* [#7446](https://github.com/TouK/nussknacker/pull/7446) Small changes regarding node errors in fragments used in scenarios:
@@ -60,6 +60,7 @@
* implementation provided for Flink DM
* [#7443](https://github.com/TouK/nussknacker/pull/7443) Indexing on record is more similar to indexing on map. The change lets us access record values dynamically. For example now spel expression "{a: 5, b: 10}[#input.field]" compiles and has type "Integer" inferred from types of values of the record. This lets us access record value based on user input, for instance if user passes "{"field": "b"}" to scenario we will get value "10", whereas input {"field": "c"} would result in "null". Expression "{a: 5}["b"]" still does not compile because it is known at compile time that record does not have property "b".
* [#7324](https://github.com/TouK/nussknacker/pull/7324) Fix: Passing Flink Job Global Params
+* [#7335](https://github.com/TouK/nussknacker/pull/7335) introduced `managersDirs` config to configure deployment managers directory paths (you can use `MANAGERS_DIR` env in case of docker-based deployments). The default is `./managers`.
## 1.18
diff --git a/docs/MigrationGuide.md b/docs/MigrationGuide.md
index 43052d7b8f0..39ef33ce68a 100644
--- a/docs/MigrationGuide.md
+++ b/docs/MigrationGuide.md
@@ -73,6 +73,7 @@ To see the biggest differences please consult the [changelog](Changelog.md).
shouldVerifyBeforeDeploy: true
}
```
+* [#7335](https://github.com/TouK/nussknacker/pull/7335) Deployment managers are loaded using separate class loader (not the Application ClassLoader - `/opt/nussknacker/managers/*` should be removed from CLASSPATH definition). The default location for deployment managers jars is the `managers` folder inside the working directory.
### Code API changes
* [#7368](https://github.com/TouK/nussknacker/pull/7368) Renamed `PeriodicSourceFactory` to `SampleGeneratorSourceFactory`
diff --git a/docs/configuration/Common.md b/docs/configuration/Common.md
index 8e60f327f4f..19058ac66f6 100644
--- a/docs/configuration/Common.md
+++ b/docs/configuration/Common.md
@@ -12,38 +12,39 @@ Because we use [HOCON](../#conventions), you can set (or override) any configura
## Basic environment variables
-| Variable name | Type | Default value | Description |
-|-------------------------------|---------|--------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| JDK_JAVA_OPTIONS | string | | Custom JVM options, e.g `-Xmx512M` |
-| JAVA_DEBUG_PORT | int | | Port to Remote JVM Debugger. By default debugger is turned off. |
-| CONFIG_FILE | string | $NUSSKNACKER_DIR/conf/application.conf | Location of application configuration. You can pass comma separated list of files, they will be merged in given order, using HOCON fallback mechanism |
-| LOGBACK_FILE | string | $NUSSKNACKER_DIR/conf/docker-logback.xml | Location of logging configuration |
-| WORKING_DIR | string | $NUSSKNACKER_DIR | Location of working directory |
-| STORAGE_DIR | string | $WORKING_DIR/storage | Location of HSQLDB database storage |
-| CLASSPATH | string | $NUSSKNACKER_DIR/lib/*:$NUSSKNACKER_DIR/managers/* | Classpath of the Designer, _lib_ directory contains related jar libraries (e.g. database driver), _managers_ directory contains deployment manager providers |
-| LOGS_DIR | string | $WORKING_DIR/logs | Location of logs |
-| HTTP_INTERFACE | string | 0.0.0.0 | Network address Nussknacker binds to |
-| HTTP_PORT | string | 8080 | HTTP port used by Nussknacker |
-| HTTP_PUBLIC_PATH | string | | Public HTTP path prefix the Designer UI is served at, e.g. using external proxy like [nginx](../../installation/Binaries/#configuring-the-designer-with-nginx-http-public-path) |
-| DB_URL | string | jdbc:hsqldb:file:${STORAGE_DIR}/db;sql.syntax_ora=true | [See also](../configuration/DesignerConfiguration.md#database-configuration) for more information |
-| DB_DRIVER | string | org.hsqldb.jdbc.JDBCDriver | Database driver class name |
-| DB_USER | string | SA | User used for connection to database |
-| DB_PASSWORD | string | | Password used for connection to database |
-| DB_CONNECTION_TIMEOUT | int | 30000 | Connection to database timeout in milliseconds |
-| AUTHENTICATION_METHOD | string | BasicAuth | Method of authentication. One of: BasicAuth, OAuth2 |
-| AUTHENTICATION_USERS_FILE | string | $NUSSKNACKER_DIR/conf/users.conf | Location of users configuration file |
-| AUTHENTICATION_HEADERS_ACCEPT | string | application/json | |
-| AUTHENTICATION_REALM | string | nussknacker | [Realm](https://datatracker.ietf.org/doc/html/rfc2617#section-1.2) |
-| FLINK_REST_URL | string | http://localhost:8081 | URL to Flink's REST API - used for scenario deployment |
-| FLINK_ROCKSDB_ENABLE | boolean | true | Enable RocksDB state backend support |
-| KAFKA_ADDRESS | string | localhost:9092 | Kafka address used by Kafka components (sources, sinks) |
-| KAFKA_AUTO_OFFSET_RESET | string | | See [Kafka documentation](https://kafka.apache.org/documentation/#consumerconfigs_auto.offset.reset). For development purposes it may be convenient to set this value to 'earliest', when not set the default from Kafka ('latest' at the moment) is used |
-| SCHEMA_REGISTRY_URL | string | http://localhost:8082 | Address of Confluent Schema registry used for storing data model |
-| GRAFANA_URL | string | /grafana | URL to Grafana, used in UI. Should be relative to Nussknacker URL to avoid additional CORS configuration |
-| INFLUXDB_URL | string | http://localhost:8086 | URL to InfluxDB used by counts mechanism |
-| PROMETHEUS_METRICS_PORT | int | | When defined, JMX MBeans are exposed as Prometheus metrics on this port |
-| PROMETHEUS_AGENT_CONFIG_FILE | int | $NUSSKNACKER_DIR/conf/jmx_prometheus.yaml | Default configuration for JMX Prometheus agent. Used only when agent is enabled. See `PROMETHEUS_METRICS_PORT` |
-| TABLES_DEFINITION_FILE | string | $NUSSKNACKER_DIR/conf/dev-tables-definition.sql | Location of file containing definitions of tables for Flink Table API components in Flink Sql |
+| Variable name | Type | Default value | Description |
+|-------------------------------|----------|--------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| JDK_JAVA_OPTIONS | string | | Custom JVM options, e.g `-Xmx512M` |
+| JAVA_DEBUG_PORT | int | | Port to Remote JVM Debugger. By default debugger is turned off. |
+| CONFIG_FILE | string | $NUSSKNACKER_DIR/conf/application.conf | Location of application configuration. You can pass comma separated list of files, they will be merged in given order, using HOCON fallback mechanism |
+| LOGBACK_FILE | string | $NUSSKNACKER_DIR/conf/docker-logback.xml | Location of logging configuration |
+| WORKING_DIR | string | $NUSSKNACKER_DIR | Location of working directory |
+| STORAGE_DIR | string | $WORKING_DIR/storage | Location of HSQLDB database storage |
+| MANAGERS_DIR | string | $WORKING_DIR/managers | Location of deployment managers jars |
+| CLASSPATH | string | $NUSSKNACKER_DIR/lib/* | Classpath of the Designer, _lib_ directory contains related jar libraries (e.g. database driver) |
+| LOGS_DIR | string | $WORKING_DIR/logs | Location of logs |
+| HTTP_INTERFACE | string | 0.0.0.0 | Network address Nussknacker binds to |
+| HTTP_PORT | string | 8080 | HTTP port used by Nussknacker |
+| HTTP_PUBLIC_PATH | string | | Public HTTP path prefix the Designer UI is served at, e.g. using external proxy like [nginx](../../installation/Binaries/#configuring-the-designer-with-nginx-http-public-path) |
+| DB_URL | string | jdbc:hsqldb:file:${STORAGE_DIR}/db;sql.syntax_ora=true | [See also](../configuration/DesignerConfiguration.md#database-configuration) for more information |
+| DB_DRIVER | string | org.hsqldb.jdbc.JDBCDriver | Database driver class name |
+| DB_USER | string | SA | User used for connection to database |
+| DB_PASSWORD | string | | Password used for connection to database |
+| DB_CONNECTION_TIMEOUT | int | 30000 | Connection to database timeout in milliseconds |
+| AUTHENTICATION_METHOD | string | BasicAuth | Method of authentication. One of: BasicAuth, OAuth2 |
+| AUTHENTICATION_USERS_FILE | string | $NUSSKNACKER_DIR/conf/users.conf | Location of users configuration file |
+| AUTHENTICATION_HEADERS_ACCEPT | string | application/json | |
+| AUTHENTICATION_REALM | string | nussknacker | [Realm](https://datatracker.ietf.org/doc/html/rfc2617#section-1.2) |
+| FLINK_REST_URL | string | http://localhost:8081 | URL to Flink's REST API - used for scenario deployment |
+| FLINK_ROCKSDB_ENABLE | boolean | true | Enable RocksDB state backend support |
+| KAFKA_ADDRESS | string | localhost:9092 | Kafka address used by Kafka components (sources, sinks) |
+| KAFKA_AUTO_OFFSET_RESET | string | | See [Kafka documentation](https://kafka.apache.org/documentation/#consumerconfigs_auto.offset.reset). For development purposes it may be convenient to set this value to 'earliest', when not set the default from Kafka ('latest' at the moment) is used |
+| SCHEMA_REGISTRY_URL | string | http://localhost:8082 | Address of Confluent Schema registry used for storing data model |
+| GRAFANA_URL | string | /grafana | URL to Grafana, used in UI. Should be relative to Nussknacker URL to avoid additional CORS configuration |
+| INFLUXDB_URL | string | http://localhost:8086 | URL to InfluxDB used by counts mechanism |
+| PROMETHEUS_METRICS_PORT | int | | When defined, JMX MBeans are exposed as Prometheus metrics on this port |
+| PROMETHEUS_AGENT_CONFIG_FILE | int | $NUSSKNACKER_DIR/conf/jmx_prometheus.yaml | Default configuration for JMX Prometheus agent. Used only when agent is enabled. See `PROMETHEUS_METRICS_PORT` |
+| TABLES_DEFINITION_FILE | string | $NUSSKNACKER_DIR/conf/dev-tables-definition.sql | Location of file containing definitions of tables for Flink Table API components in Flink Sql |
## OAuth2 environment variables
diff --git a/docs/installation/Binaries.md b/docs/installation/Binaries.md
index 8dba7954a53..62bb79b8853 100644
--- a/docs/installation/Binaries.md
+++ b/docs/installation/Binaries.md
@@ -49,7 +49,7 @@ We provide following scripts:
| $NUSSKNACKER_DIR/model/flinkExecutor.jar | | JAR with Flink executor, used by scenarios running on Flink |
| $NUSSKNACKER_DIR/components | | Directory with Nussknacker Component Provider JARS |
| $NUSSKNACKER_DIR/lib | | Directory with Nussknacker base libraries |
-| $NUSSKNACKER_DIR/managers | | Directory with Nussknacker Deployment Managers |
+| $NUSSKNACKER_DIR/managers | Configured by MANAGERS_DIR property | Directory with Nussknacker Deployment Managers |
## Logging
diff --git a/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml b/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml
index 6e1c1f81ab0..38b70fa7b0e 100644
--- a/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml
+++ b/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml
@@ -4,3 +4,4 @@ services:
volumes:
- ../../e2e-tests/src/test/resources/detect-large-transactions:/scenario-examples/detect-large-transactions
- ../../e2e-tests/src/test/resources/determine-offered-plan:/scenario-examples/determine-offered-plan
+ - ../../e2e-tests/src/test/resources/loan-request:/scenario-examples/loan-request
diff --git a/e2e-tests/src/test/resources/loan-request/LoanRequest.json b/e2e-tests/src/test/resources/loan-request/LoanRequest.json
new file mode 100644
index 00000000000..a82ba1ec844
--- /dev/null
+++ b/e2e-tests/src/test/resources/loan-request/LoanRequest.json
@@ -0,0 +1,239 @@
+{
+ "metaData": {
+ "id": "LoanRequest",
+ "additionalFields": {
+ "description": null,
+ "properties": {
+ "inputSchema": "{\n \"type\": \"object\",\n \"properties\": {\n \"customerId\": {\n \"type\": \"string\"\n },\n \"location\": {\n \"type\": \"object\",\n \"properties\": {\n \"city\": {\n \"type\": \"string\"\n },\n \"street\": {\n \"type\": \"string\"\n }\n }\n },\n \"requestType\": {\n \"type\": \"string\"\n },\n \"requestedAmount\": {\n \"type\": \"number\"\n }\n },\n \"required\": [\"customerId\", \"location\", \"requestType\", \"requestedAmount\"],\n \"additionalProperties\": false\n}",
+ "outputSchema": "{\n \"type\": \"object\",\n \"properties\": {\n \"acceptedAmount\": {\n \"type\": \"number\",\n \"description\": \"Accepted amount\"\n },\n \"message\": {\n \"type\": \"string\",\n \"description\": \"Additional message\"\n }\n },\n \"required\": [\"acceptedAmount\", \"message\"],\n \"additionalProperties\": false\n}",
+ "slug": "loan"
+ },
+ "metaDataType": "RequestResponseMetaData"
+ }
+ },
+ "nodes": [
+ {
+ "id": "request",
+ "ref": {
+ "typ": "request",
+ "parameters": [
+ ]
+ },
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 360,
+ "y": 0
+ }
+ },
+ "type": "Source"
+ },
+ {
+ "defaultNext": [
+ ],
+ "nexts": [
+ {
+ "expression": {
+ "language": "spel",
+ "expression": "#input.requestType == 'loan'"
+ },
+ "nodes": [
+ {
+ "id": "loan response",
+ "ref": {
+ "typ": "response",
+ "parameters": [
+ {
+ "name": "acceptedAmount",
+ "expression": {
+ "language": "spel",
+ "expression": "50"
+ }
+ },
+ {
+ "name": "message",
+ "expression": {
+ "language": "spel",
+ "expression": "'only small amount available'"
+ }
+ }
+ ]
+ },
+ "endResult": null,
+ "isDisabled": null,
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 0,
+ "y": 360
+ }
+ },
+ "type": "Sink"
+ }
+ ]
+ },
+ {
+ "expression": {
+ "language": "spel",
+ "expression": "#input.requestType == 'mortgage'"
+ },
+ "nodes": [
+ {
+ "defaultNext": [
+ ],
+ "nexts": [
+ {
+ "expression": {
+ "language": "spel",
+ "expression": "#input.location.city == 'Warszawa'"
+ },
+ "nodes": [
+ {
+ "id": "Warsaw mortgage",
+ "ref": {
+ "typ": "response",
+ "parameters": [
+ {
+ "name": "acceptedAmount",
+ "expression": {
+ "language": "spel",
+ "expression": "1000"
+ }
+ },
+ {
+ "name": "message",
+ "expression": {
+ "language": "spel",
+ "expression": "'Large sum for Warszawa'"
+ }
+ }
+ ]
+ },
+ "endResult": null,
+ "isDisabled": null,
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 180,
+ "y": 540
+ }
+ },
+ "type": "Sink"
+ }
+ ]
+ },
+ {
+ "expression": {
+ "language": "spel",
+ "expression": "true"
+ },
+ "nodes": [
+ {
+ "id": "Other city mortgage",
+ "ref": {
+ "typ": "response",
+ "parameters": [
+ {
+ "name": "Raw editor",
+ "expression": {
+ "language": "spel",
+ "expression": "false"
+ }
+ },
+ {
+ "name": "acceptedAmount",
+ "expression": {
+ "language": "spel",
+ "expression": "100"
+ }
+ },
+ {
+ "name": "message",
+ "expression": {
+ "language": "spel",
+ "expression": "'Large sum for other city'"
+ }
+ }
+ ]
+ },
+ "endResult": null,
+ "isDisabled": null,
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 540,
+ "y": 540
+ }
+ },
+ "type": "Sink"
+ }
+ ]
+ }
+ ],
+ "id": "switch",
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 360,
+ "y": 360
+ }
+ },
+ "type": "Switch"
+ }
+ ]
+ },
+ {
+ "expression": {
+ "language": "spel",
+ "expression": "true"
+ },
+ "nodes": [
+ {
+ "id": "unknown",
+ "ref": {
+ "typ": "response",
+ "parameters": [
+ {
+ "name": "acceptedAmount",
+ "expression": {
+ "language": "spel",
+ "expression": "0"
+ }
+ },
+ {
+ "name": "message",
+ "expression": {
+ "language": "spel",
+ "expression": "'Unknown loan type'"
+ }
+ }
+ ]
+ },
+ "endResult": null,
+ "isDisabled": null,
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 720,
+ "y": 360
+ }
+ },
+ "type": "Sink"
+ }
+ ]
+ }
+ ],
+ "id": "loan type",
+ "additionalFields": {
+ "description": null,
+ "layoutData": {
+ "x": 360,
+ "y": 180
+ }
+ },
+ "type": "Switch"
+ }
+ ],
+ "additionalBranches": [
+ ]
+}
diff --git a/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala b/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala
new file mode 100644
index 00000000000..a8bb31e6dea
--- /dev/null
+++ b/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala
@@ -0,0 +1,27 @@
+package pl.touk.nussknacker
+
+import org.scalatest.freespec.AnyFreeSpecLike
+import org.scalatest.matchers.should.Matchers
+import pl.touk.nussknacker.test.VeryPatientScalaFutures
+import pl.touk.nussknacker.test.installationexample.HttpResponse
+
+class LoanRequestSpec extends AnyFreeSpecLike with BaseE2ESpec with Matchers with VeryPatientScalaFutures {
+
+ "Properly handle loan request" in {
+ val result = client.sendHttpRequest(
+ serviceSlug = "loan",
+ payload = ujson.read {
+ """{
+ | "customerId": "anon",
+ | "requestedAmount": 1555,
+ | "requestType": "mortgage",
+ | "location": { "city": "Warszawa", "street": "Marszałkowska" }
+ |}""".stripMargin
+ }
+ )
+ result should be(
+ Right(HttpResponse(200, ujson.read("""{"acceptedAmount":1000,"message":"Large sum for Warszawa"}""")))
+ )
+ }
+
+}
diff --git a/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala b/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala
index 9da6994ea9b..4ffc6862c7c 100644
--- a/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala
+++ b/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala
@@ -1,6 +1,6 @@
package pl.touk.nussknacker.engine.process
-import com.typesafe.config.ConfigFactory
+import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.api.component.DesignerWideComponentId
@@ -16,7 +16,7 @@ import pl.touk.nussknacker.engine.{ClassLoaderModelData, ConfigWithUnresolvedVer
class SampleComponentProviderTest extends AnyFunSuite with FlinkSpec with Matchers {
- override protected lazy val config = ConfigFactory.empty()
+ override protected lazy val config: Config = ConfigFactory.empty()
test("detects component service") {
val process =
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
index 0b9b58fd450..7894333decb 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
@@ -1,6 +1,8 @@
package pl.touk.nussknacker.engine.management.streaming
import akka.actor.ActorSystem
+import cats.effect.IO
+import cats.effect.kernel.Resource
import org.asynchttpclient.DefaultAsyncHttpClientConfig
import sttp.client3.asynchttpclient.future.AsyncHttpClientFutureBackend
import pl.touk.nussknacker.engine._
@@ -13,6 +15,7 @@ import pl.touk.nussknacker.engine.api.deployment.{
}
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProvider
+import pl.touk.nussknacker.engine.util.loader.DeploymentManagersClassLoader
import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import pl.touk.nussknacker.engine.{
ConfigWithUnresolvedVersion,
@@ -26,9 +29,10 @@ object FlinkStreamingDeploymentManagerProviderHelper {
def createDeploymentManager(
processingTypeConfig: ConfigWithUnresolvedVersion,
+ deploymentManagerClassLoader: DeploymentManagersClassLoader
): DeploymentManager = {
val typeConfig = ProcessingTypeConfig.read(processingTypeConfig)
- val modelClassLoader = ModelClassLoader(typeConfig.classPath, None)
+ val modelClassLoader = ModelClassLoader(typeConfig.classPath, None, deploymentManagerClassLoader)
val modelData = ModelData(
processingTypeConfig = typeConfig,
ModelDependencies(
@@ -60,4 +64,14 @@ object FlinkStreamingDeploymentManagerProviderHelper {
.valueOr(err => throw new IllegalStateException(s"Invalid Deployment Manager: ${err.toList.mkString(", ")}"))
}
+ def createDeploymentManager(
+ processingTypeConfig: ConfigWithUnresolvedVersion,
+ ): Resource[IO, DeploymentManager] = {
+ DeploymentManagersClassLoader
+ .create(List.empty)
+ .map { deploymentManagerClassLoader =>
+ createDeploymentManager(processingTypeConfig, deploymentManagerClassLoader)
+ }
+ }
+
}
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
index 0916e833329..87784b69f6a 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
@@ -70,7 +70,7 @@ class FlinkStreamingDeploymentManagerSpec extends AnyFunSuite with Matchers with
deployedResponse.futureValue
}
- // this is for the case where e.g. we manually cancel flink job, or it fail and didn't restart...
+ // this is for the case where e.g. we manually cancel flink job, or it fails and didn't restart...
test("cancel of not existing job should not fail") {
deploymentManager
.processCommand(DMCancelScenarioCommand(ProcessName("not existing job"), user = userToAct))
@@ -273,7 +273,7 @@ class FlinkStreamingDeploymentManagerSpec extends AnyFunSuite with Matchers with
_ => true,
ComponentDefinitionExtractionMode.FinalDefinition
),
- ModelClassLoader(processingTypeConfig.classPath, None)
+ ModelClassLoader(processingTypeConfig.classPath, None, deploymentManagerClassLoader)
)
val definition = modelData.modelDefinition
definition.components.components.map(_.id) should contain(ComponentId(ComponentType.Service, "accountService"))
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingProcessTestRunnerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingProcessTestRunnerSpec.scala
index 560206a719f..f36a321be6d 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingProcessTestRunnerSpec.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingProcessTestRunnerSpec.scala
@@ -1,8 +1,10 @@
package pl.touk.nussknacker.engine.management.streaming
+import cats.effect.unsafe.implicits.global
import com.typesafe.config.ConfigValueFactory.fromAnyRef
import com.typesafe.config.{Config, ConfigValueFactory}
import io.circe.Json
+import org.scalatest.BeforeAndAfterAll
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.ConfigWithUnresolvedVersion
@@ -22,7 +24,8 @@ class FlinkStreamingProcessTestRunnerSpec
extends AnyFlatSpec
with Matchers
with VeryPatientScalaFutures
- with WithConfig {
+ with WithConfig
+ with BeforeAndAfterAll {
private val classPath: List[String] = ClassPaths.scalaClasspath
@@ -44,10 +47,18 @@ class FlinkStreamingProcessTestRunnerSpec
List(ScenarioTestJsonRecord("startProcess", Json.fromString("terefere")))
)
- it should "run scenario in test mode" in {
- val deploymentManager =
- FlinkStreamingDeploymentManagerProviderHelper.createDeploymentManager(ConfigWithUnresolvedVersion(config))
+ private lazy val (deploymentManager, releaseDeploymentMangerResources) =
+ FlinkStreamingDeploymentManagerProviderHelper
+ .createDeploymentManager(ConfigWithUnresolvedVersion(config))
+ .allocated
+ .unsafeRunSync()
+
+ override protected def afterAll(): Unit = {
+ releaseDeploymentMangerResources.unsafeRunSync()
+ super.afterAll()
+ }
+ it should "run scenario in test mode" in {
val processName = ProcessName(UUID.randomUUID().toString)
val processVersion = ProcessVersion.empty.copy(processName = processName)
@@ -71,9 +82,6 @@ class FlinkStreamingProcessTestRunnerSpec
.source("startProcess", "kafka-transaction")
.emptySink("endSend", "sendSmsNotExist")
- val deploymentManager =
- FlinkStreamingDeploymentManagerProviderHelper.createDeploymentManager(ConfigWithUnresolvedVersion(config))
-
val caught = intercept[IllegalArgumentException] {
Await.result(
deploymentManager.processCommand(DMTestScenarioCommand(processVersion, process, scenarioTestData)),
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/JavaConfigDeploymentManagerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/JavaConfigDeploymentManagerSpec.scala
index 158c0fdb860..eb0930ad1a7 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/JavaConfigDeploymentManagerSpec.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/JavaConfigDeploymentManagerSpec.scala
@@ -5,13 +5,12 @@ import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.DeploymentUpdateStrategy.StateRestoringStrategy
+import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.{
DMCancelScenarioCommand,
DMRunDeploymentCommand,
DeploymentUpdateStrategy
}
-import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
-import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.build.ScenarioBuilder
import pl.touk.nussknacker.engine.deployment.DeploymentData
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/StreamingDockerTest.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/StreamingDockerTest.scala
index 176a27d84da..9fb4caef92e 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/StreamingDockerTest.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/StreamingDockerTest.scala
@@ -1,5 +1,8 @@
package pl.touk.nussknacker.engine.management.streaming
+import cats.effect.IO
+import cats.effect.kernel.Resource
+import cats.effect.unsafe.implicits.global
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Assertion, BeforeAndAfterAll, OptionValues, Suite}
@@ -13,29 +16,41 @@ import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
import pl.touk.nussknacker.engine.kafka.KafkaClient
import pl.touk.nussknacker.engine.management.DockerTest
+import pl.touk.nussknacker.engine.util.loader.DeploymentManagersClassLoader
trait StreamingDockerTest extends DockerTest with BeforeAndAfterAll with Matchers with OptionValues {
self: Suite with LazyLogging =>
protected implicit val freshnessPolicy: DataFreshnessPolicy = DataFreshnessPolicy.Fresh
- protected var kafkaClient: KafkaClient = _
+ protected lazy val (kafkaClient, releaseKafkaClient) =
+ Resource
+ .make(
+ acquire = IO(new KafkaClient(hostKafkaAddress, self.suiteName))
+ .map { client =>
+ logger.info("Kafka client created")
+ client
+ }
+ )(
+ release = client => IO(client.shutdown()).map(_ => logger.info("Kafka client closed"))
+ )
+ .allocated
+ .unsafeRunSync()
- override def beforeAll(): Unit = {
- super.beforeAll()
- kafkaClient = new KafkaClient(hostKafkaAddress, self.suiteName)
- logger.info("Kafka client created")
- }
+ protected lazy val (deploymentManagerClassLoader, releaseDeploymentManagerClassLoaderResources) =
+ DeploymentManagersClassLoader.create(List.empty).allocated.unsafeRunSync()
+
+ protected lazy val deploymentManager = FlinkStreamingDeploymentManagerProviderHelper.createDeploymentManager(
+ ConfigWithUnresolvedVersion(config),
+ deploymentManagerClassLoader
+ )
override def afterAll(): Unit = {
- kafkaClient.shutdown()
- logger.info("Kafka client closed")
+ releaseKafkaClient.unsafeToFuture()
+ releaseDeploymentManagerClassLoaderResources.unsafeToFuture()
super.afterAll()
}
- protected lazy val deploymentManager: DeploymentManager =
- FlinkStreamingDeploymentManagerProviderHelper.createDeploymentManager(ConfigWithUnresolvedVersion(config))
-
protected def deployProcessAndWaitIfRunning(
process: CanonicalProcess,
processVersion: ProcessVersion,
diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessTestRunner.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessTestRunner.scala
index 853187e57d4..179e1a57054 100644
--- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessTestRunner.scala
+++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessTestRunner.scala
@@ -12,7 +12,7 @@ import scala.concurrent.{ExecutionContext, Future}
class FlinkProcessTestRunner(modelData: ModelData)
extends StaticMethodRunner(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
"pl.touk.nussknacker.engine.process.runner.FlinkTestMain",
"run"
) {
diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessVerifier.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessVerifier.scala
index 29b9226343a..30048eed38b 100644
--- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessVerifier.scala
+++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkProcessVerifier.scala
@@ -13,7 +13,7 @@ import scala.util.control.NonFatal
class FlinkProcessVerifier(modelData: ModelData)
extends StaticMethodRunner(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
"pl.touk.nussknacker.engine.process.runner.FlinkVerificationMain",
"run"
)
diff --git a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/RunFlinkStreamingModelLocally.scala b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/RunFlinkStreamingModelLocally.scala
index 8b3118973e2..3e9353a2936 100644
--- a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/RunFlinkStreamingModelLocally.scala
+++ b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/RunFlinkStreamingModelLocally.scala
@@ -24,7 +24,7 @@ object RunFlinkStreamingModelLocally extends IOApp.Simple {
configCreator = new DefaultConfigCreator
)
- // For simplicity we use stub here, one can add real Flink implementation after add appropriate dependencies
+ // For simplicity, we use stub here, one can add real Flink implementation after add appropriate dependencies
private val provider: DeploymentManagerProvider = new DeploymentManagerProviderStub
override def run: IO[Unit] = {
diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala
index 0bce946e9a1..dc296ec74fa 100644
--- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala
+++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala
@@ -2,7 +2,7 @@ package pl.touk.nussknacker.engine.embedded
import pl.touk.nussknacker.engine.ModelData
import pl.touk.nussknacker.engine.api.JobData
-import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, DeploymentStatusName, StateStatus}
+import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer
diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
index 9603698ddf5..8352a7b9e0e 100644
--- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
+++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
@@ -1,64 +1,20 @@
package pl.touk.nussknacker.engine.embedded
-import cats.data.Validated.valid
-import cats.data.ValidatedNel
-import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
-import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt
import pl.touk.nussknacker.engine.api._
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
-import pl.touk.nussknacker.engine.api.parameter.ValueInputWithDictEditor
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.{DeploymentData, DeploymentId, ExternalDeploymentId}
-import pl.touk.nussknacker.engine.embedded.requestresponse.RequestResponseDeploymentStrategy
-import pl.touk.nussknacker.engine.embedded.streaming.StreamingDeploymentStrategy
-import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer
-import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory}
-import pl.touk.nussknacker.engine.{BaseModelData, CustomProcessValidator, DeploymentManagerDependencies, ModelData}
-import pl.touk.nussknacker.lite.manager.{LiteDeploymentManager, LiteDeploymentManagerProvider}
-import pl.touk.nussknacker.engine.newdeployment
-import pl.touk.nussknacker.engine.util.AdditionalComponentConfigsForRuntimeExtractor
+import pl.touk.nussknacker.engine.{ModelData, newdeployment}
+import pl.touk.nussknacker.lite.manager.LiteDeploymentManager
-import scala.concurrent.duration.{DurationInt, FiniteDuration}
+import scala.concurrent.duration.DurationInt
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
-class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider {
-
- override def createDeploymentManager(
- modelData: BaseModelData,
- dependencies: DeploymentManagerDependencies,
- engineConfig: Config,
- scenarioStateCacheTTL: Option[FiniteDuration]
- ): ValidatedNel[String, DeploymentManager] = {
- import dependencies._
- val strategy = forMode(engineConfig)(
- new StreamingDeploymentStrategy,
- RequestResponseDeploymentStrategy(engineConfig)
- )
-
- val metricRegistry = LiteMetricRegistryFactory.usingHostnameAsDefaultInstanceId.prepareRegistry(engineConfig)
- val contextPreparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry))
-
- strategy.open(modelData.asInvokableModelData, contextPreparer)
- valid(new EmbeddedDeploymentManager(modelData.asInvokableModelData, deployedScenariosProvider, strategy))
- }
-
- override protected def defaultRequestResponseSlug(scenarioName: ProcessName, config: Config): String =
- RequestResponseDeploymentStrategy.defaultSlug(scenarioName)
-
- override def additionalValidators(config: Config): List[CustomProcessValidator] = forMode(config)(
- Nil,
- List(EmbeddedRequestResponseScenarioValidator)
- )
-
- override def name: String = "lite-embedded"
-
-}
-
/*
FIXME: better synchronization - comment below isn't true anymore + make HA ready
Currently we assume that all operations that modify state (i.e. deploy and cancel) are performed from
diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala
new file mode 100644
index 00000000000..6d450367e62
--- /dev/null
+++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala
@@ -0,0 +1,49 @@
+package pl.touk.nussknacker.engine.embedded
+
+import cats.data.Validated.valid
+import cats.data.ValidatedNel
+import com.typesafe.config.Config
+import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt
+import pl.touk.nussknacker.engine.api.deployment.DeploymentManager
+import pl.touk.nussknacker.engine.api.process.ProcessName
+import pl.touk.nussknacker.engine.embedded.requestresponse.RequestResponseDeploymentStrategy
+import pl.touk.nussknacker.engine.embedded.streaming.StreamingDeploymentStrategy
+import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer
+import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory}
+import pl.touk.nussknacker.engine.{BaseModelData, CustomProcessValidator, DeploymentManagerDependencies}
+import pl.touk.nussknacker.lite.manager.LiteDeploymentManagerProvider
+
+import scala.concurrent.duration.FiniteDuration
+
+class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider {
+
+ override val name: String = "lite-embedded"
+
+ override def createDeploymentManager(
+ modelData: BaseModelData,
+ dependencies: DeploymentManagerDependencies,
+ engineConfig: Config,
+ scenarioStateCacheTTL: Option[FiniteDuration]
+ ): ValidatedNel[String, DeploymentManager] = {
+ import dependencies._
+ val strategy = forMode(engineConfig)(
+ new StreamingDeploymentStrategy,
+ RequestResponseDeploymentStrategy(engineConfig)
+ )
+
+ val metricRegistry = LiteMetricRegistryFactory.usingHostnameAsDefaultInstanceId.prepareRegistry(engineConfig)
+ val contextPreparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry))
+
+ strategy.open(modelData.asInvokableModelData, contextPreparer)
+ valid(new EmbeddedDeploymentManager(modelData.asInvokableModelData, deployedScenariosProvider, strategy))
+ }
+
+ override protected def defaultRequestResponseSlug(scenarioName: ProcessName, config: Config): String =
+ RequestResponseDeploymentStrategy.defaultSlug(scenarioName)
+
+ override def additionalValidators(config: Config): List[CustomProcessValidator] = forMode(config)(
+ Nil,
+ List(EmbeddedRequestResponseScenarioValidator)
+ )
+
+}
diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala
index f762e6ea85d..ef73f9fde4c 100644
--- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala
+++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala
@@ -1,5 +1,6 @@
package pl.touk.nussknacker.engine.embedded.streaming
+import cats.effect.IO
import com.typesafe.scalalogging.LazyLogging
import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus
import pl.touk.nussknacker.engine.api.{JobData, LiteStreamMetaData, ProcessVersion}
@@ -7,6 +8,7 @@ import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.embedded.{Deployment, DeploymentStrategy}
import pl.touk.nussknacker.engine.lite.TaskStatus
import pl.touk.nussknacker.engine.lite.kafka.{KafkaTransactionalScenarioInterpreter, LiteKafkaJobData}
+import pl.touk.nussknacker.engine.util.ExecutionContextWithIORuntimeAdapter
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success, Try}
@@ -36,17 +38,24 @@ class StreamingDeploymentStrategy extends DeploymentStrategy with LazyLogging {
)
)
interpreterTry.flatMap { interpreter =>
+ val ecWithRuntime = ExecutionContextWithIORuntimeAdapter.unsafeCreateFrom(ec)
val runTry = Try {
- val result = interpreter.run()
- result.onComplete {
- case Failure(exception) => handleUnexpectedError(jobData.processVersion, exception)
- case Success(_) => // closed without problems
- }
+ interpreter
+ .run()
+ .handleErrorWith { exception =>
+ handleUnexpectedError(jobData.processVersion, exception)
+ interpreter.close()
+ IO.raiseError(exception)
+ }
+ .unsafeRunSync()(ecWithRuntime.ioRuntime)
}
runTry.transform(
- _ => Success(new StreamingDeployment(interpreter)),
+ _ => {
+ ecWithRuntime.close()
+ Success(new StreamingDeployment(interpreter))
+ },
ex => {
- interpreter.close()
+ ecWithRuntime.close()
Failure(ex)
}
)
diff --git a/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/StreamingEmbeddedDeploymentManagerTest.scala b/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/StreamingEmbeddedDeploymentManagerTest.scala
index 20b4c20f83d..aaae53dec82 100644
--- a/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/StreamingEmbeddedDeploymentManagerTest.scala
+++ b/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/StreamingEmbeddedDeploymentManagerTest.scala
@@ -3,7 +3,7 @@ package pl.touk.nussknacker.streaming.embedded
import io.circe.Json
import io.circe.Json.{fromInt, fromString, obj}
import org.scalatest.OptionValues
-import pl.touk.nussknacker.engine.api.{DisplayJsonWithEncoder, ProcessVersion}
+import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
diff --git a/engine/lite/integration-test/src/it/scala/pl/touk/nussknacker/engine/lite/utils/NuRuntimeDockerTestUtils.scala b/engine/lite/integration-test/src/it/scala/pl/touk/nussknacker/engine/lite/utils/NuRuntimeDockerTestUtils.scala
index 51321f6d6d1..d1a343f59bb 100644
--- a/engine/lite/integration-test/src/it/scala/pl/touk/nussknacker/engine/lite/utils/NuRuntimeDockerTestUtils.scala
+++ b/engine/lite/integration-test/src/it/scala/pl/touk/nussknacker/engine/lite/utils/NuRuntimeDockerTestUtils.scala
@@ -2,7 +2,7 @@ package pl.touk.nussknacker.engine.lite.utils
import com.dimafeng.testcontainers.GenericContainer
import org.slf4j.Logger
-import org.testcontainers.containers.output.Slf4jLogConsumer
+import org.testcontainers.containers.output.{OutputFrame, Slf4jLogConsumer}
import org.testcontainers.containers.wait.strategy.{Wait, WaitStrategy, WaitStrategyTarget}
import org.testcontainers.containers.{BindMode, Network}
import pl.touk.nussknacker.engine.util.config.ScalaMajorVersionConfig
@@ -10,6 +10,8 @@ import pl.touk.nussknacker.engine.version.BuildInfo
import java.io.File
import java.time.Duration
+import java.util.function.Consumer
+import scala.jdk.CollectionConverters._
object NuRuntimeDockerTestUtils {
@@ -44,8 +46,9 @@ object NuRuntimeDockerTestUtils {
)
val waitStrategy = if (checkReady) Wait.forHttp("/ready").forPort(runtimeApiPort) else DumbWaitStrategy
runtimeContainer.underlyingUnsafeContainer.setWaitStrategy(waitStrategy)
+ val logConsumer: Consumer[OutputFrame] = new Slf4jLogConsumer(logger)
+ runtimeContainer.underlyingUnsafeContainer.setLogConsumers((logConsumer :: Nil).asJava)
runtimeContainer.start()
- runtimeContainer.underlyingUnsafeContainer.followOutput(new Slf4jLogConsumer(logger))
runtimeContainer
}
diff --git a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
index b81fdaf74d3..77bcef5455c 100644
--- a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
+++ b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
@@ -1,6 +1,7 @@
package pl.touk.nussknacker.k8s.manager
import akka.actor.ActorSystem
+import akka.stream.scaladsl.Sink
import com.typesafe.config.ConfigValueFactory.{fromAnyRef, fromIterable}
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
@@ -151,6 +152,7 @@ class BaseK8sDeploymentManagerTest
def waitForRunning(version: ProcessVersion): Assertion = {
eventually {
val state = manager.getProcessStates(version.processName).map(_.value).futureValue
+ logger.debug(s"Current process state: $state")
state.flatMap(_.version) shouldBe List(version)
state.map(_.status) shouldBe List(SimpleStateStatus.Running)
}
diff --git a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerKafkaTest.scala b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerKafkaTest.scala
index 60c8b6d604e..344ae6e5850 100644
--- a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerKafkaTest.scala
+++ b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerKafkaTest.scala
@@ -152,33 +152,29 @@ class K8sDeploymentManagerKafkaTest
"resources",
fromMap(
Map(
- "requests" -> fromMap(Map("memory" -> "256Mi", "cpu" -> "800m").asJava),
- "limits" -> fromMap(Map("memory" -> "256Mi", "cpu" -> "800m").asJava)
+ "requests" -> fromMap(Map("memory" -> "512Mi", "cpu" -> "1024m").asJava),
+ "limits" -> fromMap(Map("memory" -> "512Mi", "cpu" -> "1024m").asJava)
).asJava
)
)
.root()
val f = createKafkaFixture(
deployConfig = kafkaDeployConfig
- .withValue("k8sDeploymentConfig.spec.replicas", fromAnyRef(3))
+ .withValue("k8sDeploymentConfig.spec.replicas", fromAnyRef(2))
.withValue(
"k8sDeploymentConfig.spec.template.spec.containers",
- fromIterable(
- List(
- runtimeContainerConfig
- ).asJava
- )
+ fromIterable(List(runtimeContainerConfig).asJava)
)
)
f.withRunningScenario {
eventually {
val pods = k8s.listSelected[ListResource[Pod]](requirementForName(f.version.processName)).futureValue.items
- pods.size shouldBe 3
+ pods.size shouldBe 2
forAll(pods.head.spec.get.containers) { container =>
container.resources shouldBe Some(
skuber.Resource.Requirements(
- limits = Map("cpu" -> Quantity("800m"), "memory" -> Quantity("256Mi")),
- requests = Map("cpu" -> Quantity("800m"), "memory" -> Quantity("256Mi"))
+ limits = Map("cpu" -> Quantity("1024m"), "memory" -> Quantity("512Mi")),
+ requests = Map("cpu" -> Quantity("1024m"), "memory" -> Quantity("512Mi"))
)
)
container.env should contain(EnvVar("ENV_VARIABLE", EnvVar.StringValue("VALUE")))
diff --git a/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreter.scala b/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreter.scala
index bf2c0d65098..2aa782eabf5 100644
--- a/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreter.scala
+++ b/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreter.scala
@@ -1,6 +1,8 @@
package pl.touk.nussknacker.engine.lite.kafka
import akka.http.scaladsl.server.Route
+import cats.effect.IO
+import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.producer.ProducerRecord
import pl.touk.nussknacker.engine.Interpreter.FutureShape
@@ -22,6 +24,7 @@ import pl.touk.nussknacker.engine.lite.{
ScenarioInterpreterFactory,
TestRunner
}
+import pl.touk.nussknacker.engine.util.ExecutionContextWithIORuntimeAdapter
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
@@ -97,7 +100,8 @@ class KafkaTransactionalScenarioInterpreter private[kafka] (
modelData: ModelData,
engineRuntimeContextPreparer: LiteEngineRuntimeContextPreparer
)(implicit ec: ExecutionContext)
- extends RunnableScenarioInterpreter {
+ extends RunnableScenarioInterpreter
+ with LazyLogging {
override def status(): TaskStatus = taskRunner.status()
@@ -121,10 +125,24 @@ class KafkaTransactionalScenarioInterpreter private[kafka] (
context.metricsProvider
)
- override def run(): Future[Unit] = {
- sourceMetrics.registerOwnMetrics(context.metricsProvider)
- interpreter.open(context)
- taskRunner.run(ec)
+ override def run(): IO[Unit] = {
+ for {
+ _ <- IO.delay(sourceMetrics.registerOwnMetrics(context.metricsProvider))
+ _ <- IO.delay(interpreter.open(context))
+ _ <- ExecutionContextWithIORuntimeAdapter
+ .createFrom(ec)
+ .use { adapter =>
+ IO.delay {
+ taskRunner
+ .run(adapter)
+ .unsafeRunAsync {
+ case Left(ex) =>
+ logger.error("Task runner failed", ex)
+ case Right(_) =>
+ }(adapter.ioRuntime)
+ }
+ }
+ } yield ()
}
override def close(): Unit = {
diff --git a/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/LoopUntilClosed.scala b/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/LoopUntilClosed.scala
index ba4ca505b6a..65302fd1c30 100644
--- a/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/LoopUntilClosed.scala
+++ b/engine/lite/kafka/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/kafka/LoopUntilClosed.scala
@@ -1,6 +1,7 @@
package pl.touk.nussknacker.engine.lite.kafka
import cats.data.NonEmptyList
+import cats.effect.IO
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.lang3.concurrent.BasicThreadFactory
import org.apache.kafka.common.errors.InterruptException
@@ -44,9 +45,9 @@ class TaskRunner(
new LoopUntilClosed(taskId, () => singleRun(taskId), waitAfterFailureDelay, metricsProviderForScenario)
}.toList
- def run(implicit ec: ExecutionContext): Future[Unit] = {
- Future.sequence(runAllTasks()).map(_ => ())
- }
+ def run(implicit ec: ExecutionContext): IO[Nothing] = IO.fromFuture {
+ IO(Future.sequence(runAllTasks()).map(_ => ()))
+ }.foreverM
/*
This is a bit tricky, we split the run method as we have to use two different ExecutionContextes:
diff --git a/engine/lite/kafka/runtime/src/test/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreterTest.scala b/engine/lite/kafka/runtime/src/test/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreterTest.scala
index bd2d8cc6e6c..5e74151dfc9 100644
--- a/engine/lite/kafka/runtime/src/test/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreterTest.scala
+++ b/engine/lite/kafka/runtime/src/test/scala/pl/touk/nussknacker/engine/lite/kafka/KafkaTransactionalScenarioInterpreterTest.scala
@@ -1,5 +1,6 @@
package pl.touk.nussknacker.engine.lite.kafka
+import cats.effect.unsafe.IORuntime
import com.typesafe.config.ConfigValueFactory.fromAnyRef
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
@@ -25,12 +26,12 @@ import java.time.Instant
import java.time.temporal.ChronoUnit
import java.util.Collections
import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration.{Duration, _}
-import scala.concurrent.{Await, Future}
+import scala.concurrent.Future
+import scala.concurrent.duration._
import scala.jdk.CollectionConverters._
import scala.language.higherKinds
import scala.reflect.ClassTag
-import scala.util.{Failure, Try, Using}
+import scala.util.{Try, Using}
class KafkaTransactionalScenarioInterpreterTest
extends FixtureAnyFunSuite
@@ -250,118 +251,6 @@ class KafkaTransactionalScenarioInterpreterTest
}
}
- test("detects fatal failure in close") { fixture =>
- val inputTopic = fixture.inputTopic
- val outputTopic = fixture.outputTopic
-
- val failureMessage = "EXPECTED_TO_HAPPEN"
-
- val scenario: CanonicalProcess = passThroughScenario(fixture)
- val modelDataToUse = modelData(adjustConfig(fixture.errorTopic, config))
- val jobData = JobData(scenario.metaData, ProcessVersion.empty.copy(processName = scenario.metaData.name))
- val liteKafkaJobData = LiteKafkaJobData(tasksCount = 1)
-
- val interpreter = ScenarioInterpreterFactory
- .createInterpreter[Future, Input, Output](scenario, jobData, modelDataToUse)
- .valueOr(errors => throw new IllegalArgumentException(s"Failed to compile: $errors"))
- val kafkaInterpreter = new KafkaTransactionalScenarioInterpreter(
- interpreter,
- scenario,
- jobData,
- liteKafkaJobData,
- modelDataToUse,
- preparer
- ) {
- override private[kafka] def createScenarioTaskRun(taskId: String): Task = {
- val original = super.createScenarioTaskRun(taskId)
- // we simulate throwing exception on shutdown
- new Task {
- override def init(): Unit = original.init()
-
- override def run(): Unit = original.run()
-
- override def close(): Unit = {
- original.close()
- logger.info("Original closed, throwing expected exception")
- throw new Exception(failureMessage)
- }
- }
-
- }
- }
- val runResult = Using.resource(kafkaInterpreter) { interpreter =>
- val runResult = interpreter.run()
- // we wait for one message to make sure everything is already running
- kafkaClient.sendMessage(inputTopic, "dummy").futureValue
- kafkaClient.createConsumer().consumeWithConsumerRecord(outputTopic).head
- runResult
- }
- Try(Await.result(runResult, 10 seconds)) match {
- case Failure(exception) =>
- exception.getMessage shouldBe failureMessage
- case result => throw new AssertionError(s"Should fail with completion exception, instead got: $result")
- }
-
- }
-
- test("detects fatal failure in run") { fixture =>
- val scenario: CanonicalProcess = passThroughScenario(fixture)
- val modelDataToUse = modelData(adjustConfig(fixture.errorTopic, config))
- val jobData = JobData(scenario.metaData, ProcessVersion.empty.copy(processName = scenario.metaData.name))
- val liteKafkaJobData = LiteKafkaJobData(tasksCount = 1)
-
- var initAttempts = 0
- var runAttempts = 0
-
- val interpreter = ScenarioInterpreterFactory
- .createInterpreter[Future, Input, Output](scenario, jobData, modelDataToUse)
- .valueOr(errors => throw new IllegalArgumentException(s"Failed to compile: $errors"))
- val kafkaInterpreter = new KafkaTransactionalScenarioInterpreter(
- interpreter,
- scenario,
- jobData,
- liteKafkaJobData,
- modelDataToUse,
- preparer
- ) {
- override private[kafka] def createScenarioTaskRun(taskId: String): Task = {
- val original = super.createScenarioTaskRun(taskId)
- // we simulate throwing exception on shutdown
- new Task {
- override def init(): Unit = {
- initAttempts += 1
- original.init()
- }
-
- override def run(): Unit = {
- runAttempts += 1
- if (runAttempts == 1) {
- throw new Exception("failure")
- }
- }
-
- override def close(): Unit = {
- original.close()
- }
- }
- }
- }
- val (runResult, attemptGauges, restartingGauges) = Using.resource(kafkaInterpreter) { interpreter =>
- val result = interpreter.run()
- // TODO: figure out how to wait for restarting tasks after failure?
- Thread.sleep(2000)
- // we have to get gauge here, as metrics are unregistered in close
- (result, metricsForName[Gauge[Int]]("task.attempt"), metricsForName[Gauge[Int]]("task.restarting"))
- }
-
- Await.result(runResult, 10 seconds)
- initAttempts should be > 1
- // we check if there weren't any errors in init causing that run next run won't be executed anymore
- runAttempts should be > 1
- attemptGauges.exists(_._2.getValue > 1)
- restartingGauges.exists(_._2.getValue > 1)
- }
-
test("detects source failure") { fixture =>
val scenario: CanonicalProcess = passThroughScenario(fixture)
@@ -435,7 +324,7 @@ class KafkaTransactionalScenarioInterpreterTest
val interpreter = ScenarioInterpreterFactory
.createInterpreter[Future, Input, Output](scenario, jobData, modelDataToUse)
.valueOr(errors => throw new IllegalArgumentException(s"Failed to compile: $errors"))
- val (runResult, output) = Using.resource(
+ val output = Using.resource(
new KafkaTransactionalScenarioInterpreter(
interpreter,
scenario,
@@ -445,10 +334,9 @@ class KafkaTransactionalScenarioInterpreterTest
preparer
)
) { interpreter =>
- val result = interpreter.run()
- (result, action)
+ interpreter.run().timeout(10 seconds).unsafeRunSync()(IORuntime.global)
+ action
}
- Await.result(runResult, 10 seconds)
output
}
diff --git a/engine/lite/request-response/runtime/src/main/scala/pl/touk/nussknacker/engine/requestresponse/RequestResponseRunnableScenarioInterpreter.scala b/engine/lite/request-response/runtime/src/main/scala/pl/touk/nussknacker/engine/requestresponse/RequestResponseRunnableScenarioInterpreter.scala
index 419e687b7bd..cb24d62035b 100644
--- a/engine/lite/request-response/runtime/src/main/scala/pl/touk/nussknacker/engine/requestresponse/RequestResponseRunnableScenarioInterpreter.scala
+++ b/engine/lite/request-response/runtime/src/main/scala/pl/touk/nussknacker/engine/requestresponse/RequestResponseRunnableScenarioInterpreter.scala
@@ -1,8 +1,8 @@
package pl.touk.nussknacker.engine.requestresponse
import akka.http.scaladsl.server.{Directives, Route}
+import cats.effect.IO
import com.typesafe.scalalogging.LazyLogging
-import org.apache.commons.lang3.concurrent.BasicThreadFactory
import pl.touk.nussknacker.engine.ModelData
import pl.touk.nussknacker.engine.api.JobData
import pl.touk.nussknacker.engine.api.process.ComponentUseCase
@@ -13,8 +13,7 @@ import pl.touk.nussknacker.engine.lite.{RunnableScenarioInterpreter, TaskStatus}
import pl.touk.nussknacker.engine.requestresponse.RequestResponseInterpreter.RequestResponseScenarioInterpreter
import pl.touk.nussknacker.engine.resultcollector.ProductionServiceInvocationCollector
-import java.util.concurrent.Executors
-import scala.concurrent.{ExecutionContext, Future, blocking}
+import scala.concurrent.{ExecutionContext, Future}
class RequestResponseRunnableScenarioInterpreter(
jobData: JobData,
@@ -46,35 +45,17 @@ class RequestResponseRunnableScenarioInterpreter(
}
.valueOr(errors => throw new IllegalArgumentException(s"Failed to compile: $errors"))
- override def run(): Future[Unit] = {
- val threadFactory = new BasicThreadFactory.Builder()
- .namingPattern(s"wait-until-closed")
- .build()
- // run waiting in separate thread to not exhaust main actor system thread pool
- val executionContext = ExecutionContext.fromExecutor(Executors.newSingleThreadExecutor(threadFactory))
- Future {
- waitUntilClosed()
- }(executionContext)
- }
-
- private def waitUntilClosed(): Unit = {
- blocking {
- synchronized {
- while (!closed) {
- wait()
- }
- }
- }
- }
+ override def run(): IO[Unit] = IO.unit
override def status(): TaskStatus = TaskStatus.Running
override def close(): Unit = {
synchronized {
- closed = true
- notify()
+ if (!closed) {
+ interpreter.close()
+ closed = true
+ }
}
- interpreter.close()
}
override val routes: Option[Route] = {
diff --git a/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/NuRuntimeApp.scala b/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/NuRuntimeApp.scala
index 2f33594193a..63d503d22ca 100644
--- a/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/NuRuntimeApp.scala
+++ b/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/NuRuntimeApp.scala
@@ -2,127 +2,172 @@ package pl.touk.nussknacker.engine.lite.app
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
-import akka.http.scaladsl.Http.ServerBinding
-import akka.http.scaladsl.server.Directives
+import akka.http.scaladsl.server.{Directives, Route}
+import cats.data.{NonEmptyList, Validated}
+import cats.effect.{ExitCode, IO, IOApp, Resource}
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
+import net.ceedubs.ficus.readers.ArbitraryTypeReader.arbitraryTypeValueReader
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.lite.RunnableScenarioInterpreter
+import pl.touk.nussknacker.engine.lite.app.NuRuntimeApp.AppStartingError.{CannotParseScenario, MissingArgument}
+import pl.touk.nussknacker.engine.lite.app.RunnableScenarioInterpreterFactory.prepareScenarioInterpreter
import pl.touk.nussknacker.engine.marshall.ScenarioParser
import pl.touk.nussknacker.engine.util.config.ConfigFactoryExt
+import pl.touk.nussknacker.engine.util.config.CustomFicusInstances._
import pl.touk.nussknacker.engine.util.{JavaClassVersionChecker, ResourceLoader, SLF4JBridgeHandlerRegistrar, UriUtils}
import java.nio.file.Path
import scala.concurrent.duration._
-import scala.concurrent.{Await, Future}
import scala.util.control.NonFatal
-object NuRuntimeApp extends App with LazyLogging {
-
- import net.ceedubs.ficus.readers.ArbitraryTypeReader.arbitraryTypeValueReader
- import pl.touk.nussknacker.engine.util.config.CustomFicusInstances._
-
- JavaClassVersionChecker.check()
- SLF4JBridgeHandlerRegistrar.register()
-
- val (scenarioFileLocation, deploymentConfigLocation) = parseArgs
- val scenario = parseScenario(scenarioFileLocation)
- val deploymentConfig = parseDeploymentConfig(deploymentConfigLocation)
-
- val runtimeConfig = {
- val configLocationsProperty: String = "nussknacker.config.locations"
- val locationsPropertyValueOpt = Option(System.getProperty(configLocationsProperty))
- val locations = locationsPropertyValueOpt.map(UriUtils.extractListOfLocations).getOrElse(List.empty)
- ConfigFactory.load(new ConfigFactoryExt(getClass.getClassLoader).parseUnresolved(locations))
+object NuRuntimeApp extends IOApp with LazyLogging {
+
+ override def run(args: List[String]): IO[ExitCode] = {
+ createProgram(args).useForever
+ .handleError {
+ case AppStartingError.MissingArgument(argumentName) =>
+ logger.error(s"Missing $argumentName argument!")
+ logger.error("")
+ logger.error("Usage: ./run.sh scenario_file_location.json deployment_config_location.conf")
+ ExitCode(1)
+ case AppStartingError.CannotParseScenario(errors) =>
+ logger.error("Scenario file is not a valid json")
+ logger.error(s"Errors found: ${errors.toList.mkString(", ")}")
+ ExitCode(2)
+ case NonFatal(ex) =>
+ logger.error("Application failed", ex)
+ ExitCode.Error
+ }
+ .as(ExitCode.Success)
}
- val httpConfig = runtimeConfig.as[HttpBindingConfig]("http")
-
- implicit val system: ActorSystem = ActorSystem("nu-lite-runtime", runtimeConfig)
-
- import system.dispatcher
-
- private val akkaHttpCloseTimeout = 10 seconds
-
- // Because actor system creates non-daemon threads, all exceptions from current thread will be suppressed and process
- // will be still alive even if something fail (like scenarioInterpreter creation)
- val exitCode =
- try {
- runAfterActorSystemCreation()
- 0
- } catch {
- case NonFatal(ex) =>
- logger.error("Exception during runtime execution", ex)
- 1
- } finally {
- Await.result(system.terminate(), 5.seconds)
- }
+ private def createProgram(args: List[String]) = for {
+ parsedArgs <- parseArgs(args)
+ (scenarioFileLocation, deploymentConfigLocation) = parsedArgs
+ runtimeConfig <- loadRuntimeConfig()
+ deploymentConfig <- loadDeploymentConfig(deploymentConfigLocation)
+ _ <- doPrerequisites()
+ system <- createActorSystem(runtimeConfig)
+ scenario <- parseScenario(scenarioFileLocation)
+ scenarioInterpreter <- createScenarioInterpreter(system, runtimeConfig, deploymentConfig, scenario)
+ routes = createRoutes(system, scenarioInterpreter)
+ _ <- createAndRunServer(runtimeConfig, routes)(system)
+ } yield ()
+
+ private def doPrerequisites() = Resource
+ .make(
+ acquire = for {
+ _ <- IO.delay(logger.info("Running NuRuntimeApp"))
+ _ <- IO.delay {
+ JavaClassVersionChecker.check()
+ SLF4JBridgeHandlerRegistrar.register()
+ }
+ } yield ()
+ )(
+ release = _ => IO.delay(logger.info("Closing NuRuntimeApp"))
+ )
+
+ private def loadRuntimeConfig() = {
+ Resource.eval(IO.delay {
+ val configLocationsProperty: String = "nussknacker.config.locations"
+ val locationsPropertyValueOpt = Option(System.getProperty(configLocationsProperty))
+ val locations = locationsPropertyValueOpt.map(UriUtils.extractListOfLocations).getOrElse(List.empty)
+ ConfigFactory.load(new ConfigFactoryExt(getClass.getClassLoader).parseUnresolved(locations))
+ })
+ }
- System.exit(exitCode)
+ private def createActorSystem(config: Config) = {
+ Resource.make(
+ acquire = IO(ActorSystem("nu-lite-runtime", config))
+ )(
+ release = system => IO.fromFuture(IO(system.terminate())).map(_ => ()).timeout(5 seconds)
+ )
+ }
- private def runAfterActorSystemCreation(): Unit = {
- val scenarioInterpreter =
- RunnableScenarioInterpreterFactory.prepareScenarioInterpreter(scenario, runtimeConfig, deploymentConfig, system)
+ private def createScenarioInterpreter(
+ system: ActorSystem,
+ runtimeConfig: Config,
+ deploymentConfig: Config,
+ scenario: CanonicalProcess
+ ) = {
+ for {
+ _ <- Resource.eval(IO.delay(logger.info("Preparing RunnableScenarioInterpreter")))
+ scenarioInterpreter <- prepareScenarioInterpreter(scenario, runtimeConfig, deploymentConfig, system)
+ _ <- Resource
+ .make(
+ acquire = for {
+ _ <- IO.delay(logger.info("Running RunnableScenarioInterpreter"))
+ _ <- scenarioInterpreter.run()
+ } yield ()
+ )(
+ release = _ => IO.delay(logger.info("Closing RunnableScenarioInterpreter"))
+ )
+ } yield scenarioInterpreter
+ }
+ private def createRoutes(system: ActorSystem, scenarioInterpreter: RunnableScenarioInterpreter): Route = {
val healthCheckProvider = new HealthCheckRoutesProvider(system, scenarioInterpreter)
+ Directives.concat(scenarioInterpreter.routes.toList ::: healthCheckProvider.routes :: Nil: _*)
+ }
- val httpServer = Http().newServerAt(interface = httpConfig.interface, port = httpConfig.port)
-
- val runFuture = scenarioInterpreter.run()
- val healthCheckRoutes = healthCheckProvider.routes
- val routes = Directives.concat(scenarioInterpreter.routes.toList ::: healthCheckRoutes :: Nil: _*)
-
- Runtime.getRuntime.addShutdownHook(new Thread() {
- override def run(): Unit = {
- logger.info("Closing RunnableScenarioInterpreter")
- scenarioInterpreter.close()
- }
- })
-
- @volatile var server: ServerBinding = null
- val boundRoutesFuture = httpServer.bind(routes).map { b =>
- logger.info(s"Http server started on ${httpConfig.interface}:${httpConfig.port}")
- server = b
- }
-
- try {
- Await.result(Future.sequence(List(runFuture, boundRoutesFuture)), Duration.Inf)
- } finally {
- logger.info("Closing application NuRuntimeApp")
- scenarioInterpreter.close() // in case of exception during binding
- if (server != null) Await.ready(server.terminate(akkaHttpCloseTimeout), akkaHttpCloseTimeout)
- }
+ private def createAndRunServer(runtimeConfig: Config, routes: Route)(
+ implicit system: ActorSystem
+ ): Resource[IO, Unit] = {
+ Resource
+ .make(
+ acquire = for {
+ httpConfig <- IO.delay(runtimeConfig.as[HttpBindingConfig]("http"))
+ _ <- IO.delay(logger.info(s"Starting HTTP server on ${httpConfig.interface}:${httpConfig.port}"))
+ binding <- IO.fromFuture {
+ IO(
+ Http()
+ .newServerAt(interface = httpConfig.interface, port = httpConfig.port)
+ .bind(routes)
+ )
+ }
+ } yield binding
+ )(
+ release = binding =>
+ for {
+ _ <- IO.delay(logger.info("Stopping HTTP server"))
+ _ <- IO.fromFuture(IO(binding.terminate(10 seconds)))
+ } yield ()
+ )
+ .map(_ => ())
}
- private def parseArgs: (Path, Path) = {
+ private def parseArgs(args: List[String]): Resource[IO, (Path, Path)] = Resource.eval(IO.delay {
if (args.length < 1) {
- missingArgumentError("scenario_file_location")
+ throw MissingArgument("scenario_file_location")
} else if (args.length < 2) {
- missingArgumentError("deployment_config_location")
+ throw MissingArgument("deployment_config_location")
}
(Path.of(args(0)), Path.of(args(1)))
+ })
+
+ private def parseScenario(location: Path) = Resource.eval {
+ IO.delay {
+ val scenarioString = ResourceLoader.load(location)
+ logger.info(s"Running scenario: $scenarioString")
+ ScenarioParser.parse(scenarioString) match {
+ case Validated.Valid(scenario) => scenario
+ case Validated.Invalid(errors) => throw CannotParseScenario(errors)
+ }
+ }
}
- private def missingArgumentError(argumentName: String): Unit = {
- System.err.println(s"Missing $argumentName argument!")
- System.err.println("")
- System.err.println("Usage: ./run.sh scenario_file_location.json deployment_config_location.conf")
- sys.exit(1)
- }
-
- private def parseScenario(location: Path): CanonicalProcess = {
- val scenarioString = ResourceLoader.load(location)
- logger.info(s"Running scenario: $scenarioString")
-
- val parsedScenario = ScenarioParser.parse(scenarioString)
- parsedScenario.valueOr { err =>
- System.err.println("Scenario file is not a valid json")
- System.err.println(s"Errors found: ${err.toList.mkString(", ")}")
- sys.exit(2)
+ private def loadDeploymentConfig(path: Path) = Resource.eval {
+ IO.delay {
+ ConfigFactory.parseFile(path.toFile)
}
}
- private def parseDeploymentConfig(path: Path): Config = {
- ConfigFactory.parseFile(path.toFile)
+ sealed trait AppStartingError extends Throwable
+
+ object AppStartingError {
+ final case class MissingArgument(name: String) extends AppStartingError
+ final case class CannotParseScenario(errors: NonEmptyList[String]) extends AppStartingError
}
}
diff --git a/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/RunnableScenarioInterpreterFactory.scala b/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/RunnableScenarioInterpreterFactory.scala
index d4cd99dc792..d35a894d967 100644
--- a/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/RunnableScenarioInterpreterFactory.scala
+++ b/engine/lite/runtime-app/src/main/scala/pl/touk/nussknacker/engine/lite/app/RunnableScenarioInterpreterFactory.scala
@@ -1,10 +1,10 @@
package pl.touk.nussknacker.engine.lite.app
import akka.actor.ActorSystem
+import cats.effect.{IO, Resource}
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import net.ceedubs.ficus.readers.ArbitraryTypeReader.arbitraryTypeValueReader
-import pl.touk.nussknacker.engine.{ModelConfigs, ModelData}
import pl.touk.nussknacker.engine.api.{JobData, LiteStreamMetaData, ProcessVersion, RequestResponseMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.lite.RunnableScenarioInterpreter
@@ -13,7 +13,8 @@ import pl.touk.nussknacker.engine.lite.kafka.{KafkaTransactionalScenarioInterpre
import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory}
import pl.touk.nussknacker.engine.requestresponse.{RequestResponseConfig, RequestResponseRunnableScenarioInterpreter}
import pl.touk.nussknacker.engine.util.config.CustomFicusInstances._
-import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
+import pl.touk.nussknacker.engine.util.loader.{DeploymentManagersClassLoader, ModelClassLoader}
+import pl.touk.nussknacker.engine.{ModelConfigs, ModelData}
object RunnableScenarioInterpreterFactory extends LazyLogging {
@@ -22,19 +23,30 @@ object RunnableScenarioInterpreterFactory extends LazyLogging {
runtimeConfig: Config,
deploymentConfig: Config,
system: ActorSystem
- ): RunnableScenarioInterpreter = {
- val modelConfig: Config = runtimeConfig.getConfig("modelConfig")
- val modelData = ModelData.duringExecution(
- ModelConfigs(modelConfig),
- ModelClassLoader(modelConfig.as[List[String]]("classPath"), workingDirectoryOpt = None),
- resolveConfigs = true
- )
- val metricRegistry = prepareMetricRegistry(runtimeConfig)
- val preparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry))
- // TODO Pass correct ProcessVersion and DeploymentData
- val jobData = JobData(scenario.metaData, ProcessVersion.empty.copy(processName = scenario.metaData.name))
+ ): Resource[IO, RunnableScenarioInterpreter] = {
+ for {
+ deploymentManagersClassLoader <- DeploymentManagersClassLoader.create(List.empty)
+ scenarioInterpreter <- Resource
+ .make(
+ acquire = IO.delay {
+ val modelConfig = runtimeConfig.getConfig("modelConfig")
+ val urls = modelConfig.as[List[String]]("classPath")
+ val modelData = ModelData.duringExecution(
+ ModelConfigs(modelConfig),
+ ModelClassLoader(urls, workingDirectoryOpt = None, deploymentManagersClassLoader),
+ resolveConfigs = true
+ )
+ val metricRegistry = prepareMetricRegistry(runtimeConfig)
+ val preparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry))
+ // TODO Pass correct ProcessVersion and DeploymentData
+ val jobData = JobData(scenario.metaData, ProcessVersion.empty.copy(processName = scenario.metaData.name))
- prepareScenarioInterpreter(scenario, runtimeConfig, jobData, deploymentConfig, modelData, preparer)(system)
+ prepareScenarioInterpreter(scenario, runtimeConfig, jobData, deploymentConfig, modelData, preparer)(system)
+ }
+ )(
+ release = scenarioInterpreter => IO.delay(scenarioInterpreter.close())
+ )
+ } yield scenarioInterpreter
}
private def prepareScenarioInterpreter(
diff --git a/engine/lite/runtime-app/src/universal/bin/nu-engine-entrypoint.sh b/engine/lite/runtime-app/src/universal/bin/nu-engine-entrypoint.sh
index 86c5a00c014..02702c931d2 100755
--- a/engine/lite/runtime-app/src/universal/bin/nu-engine-entrypoint.sh
+++ b/engine/lite/runtime-app/src/universal/bin/nu-engine-entrypoint.sh
@@ -11,6 +11,7 @@ fi
NUSSKNACKER_DIR=`dirname "$0" | xargs -I{} readlink -f {}/..`
CONF_DIR="$NUSSKNACKER_DIR/conf"
LIB_DIR="$NUSSKNACKER_DIR/lib"
+LOGBACK_FILE=${LOGBACK_FILE:-$CONF_DIR/logback.xml}
CLASSPATH=${CLASSPATH:-$LIB_DIR/*}
CONFIG_FILE=${CONFIG_FILE-"$CONF_DIR/application.conf"}
diff --git a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/RunnableScenarioInterpreter.scala b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/RunnableScenarioInterpreter.scala
index 33cc8cded37..0d353bdf240 100644
--- a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/RunnableScenarioInterpreter.scala
+++ b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/RunnableScenarioInterpreter.scala
@@ -1,12 +1,11 @@
package pl.touk.nussknacker.engine.lite
-import pl.touk.nussknacker.engine.lite.TaskStatus.TaskStatus
import akka.http.scaladsl.server.Route
-
-import scala.concurrent.Future
+import cats.effect.IO
+import pl.touk.nussknacker.engine.lite.TaskStatus.TaskStatus
trait RunnableScenarioInterpreter extends AutoCloseable {
def routes: Option[Route]
- def run(): Future[Unit]
+ def run(): IO[Unit]
def status(): TaskStatus
}
diff --git a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala
index b1ba4825aa1..7e00b2ce68b 100644
--- a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala
+++ b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala
@@ -88,7 +88,7 @@ object ScenarioInterpreterFactory {
modelData.modelDefinitionWithClasses,
modelData.engineDictRegistry,
listeners,
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
resultCollector,
componentUseCase,
modelData.customProcessValidator
@@ -296,7 +296,8 @@ object ScenarioInterpreterFactory {
}
)
- case other => throw new IllegalArgumentException(s"Not supported sink: $other")
+ case other =>
+ throw new IllegalArgumentException(s"Not supported sink: $other")
}
private def compilePartInvokers(parts: List[SubsequentPart]): CompilationResult[Map[String, PartInterpreterType]] =
diff --git a/examples/installation/docker-compose.yml b/examples/installation/docker-compose.yml
index 8bc5c3e84ad..030dd6a73b7 100644
--- a/examples/installation/docker-compose.yml
+++ b/examples/installation/docker-compose.yml
@@ -7,6 +7,7 @@ services:
restart: unless-stopped
ports:
- 8080:8080
+ - 8181:8181
depends_on:
grafana:
condition: service_healthy
diff --git a/examples/installation/nginx/nginx.conf b/examples/installation/nginx/nginx.conf
index ac3b74ba535..bf82dec8d7c 100644
--- a/examples/installation/nginx/nginx.conf
+++ b/examples/installation/nginx/nginx.conf
@@ -19,6 +19,14 @@ http {
proxy_pass http://designer:8080;
}
}
+ # exposing Request-Response Lite Embedded services defined in Nu
+ server {
+ listen 8181;
+
+ location / {
+ proxy_pass http://designer:8181/;
+ }
+ }
}
events {}
diff --git a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala
index 0ff65f26373..861e00ed1a3 100644
--- a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala
+++ b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala
@@ -9,7 +9,7 @@ import pl.touk.nussknacker.engine.api.process.{ProcessId, VersionId}
import java.time.Instant
import java.util.UUID
-// todo NU-1772
+// TODO: NU-1772
// - should be eventually replaced with pl.touk.nussknacker.engine.api.deployment.ScenarioActivity
// - this class is currently a compatibility layer for older fragments of code, new code should use ScenarioActivity
@JsonCodec case class ProcessAction(
diff --git a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh
index 4482a70497a..167de644c44 100755
--- a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh
+++ b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh
@@ -11,9 +11,8 @@ fi
NUSSKNACKER_DIR=`dirname "$0" | xargs -I{} readlink -f {}/..`
CONF_DIR="$NUSSKNACKER_DIR/conf"
LIB_DIR="$NUSSKNACKER_DIR/lib"
-MANAGERS_DIR="$NUSSKNACKER_DIR/managers"
-CLASSPATH=${CLASSPATH:-$LIB_DIR/*:$MANAGERS_DIR/*}
+CLASSPATH=${CLASSPATH:-$LIB_DIR/*}
CONFIG_FILE=${CONFIG_FILE-"$CONF_DIR/application.conf"}
LOGBACK_FILE=${LOGBACK_FILE-"$CONF_DIR/docker-logback.xml"}
@@ -22,6 +21,7 @@ WORKING_DIR=${WORKING_DIR:-$NUSSKNACKER_DIR}
export AUTHENTICATION_USERS_FILE=${AUTHENTICATION_USERS_FILE:-$CONF_DIR/users.conf}
export TABLES_DEFINITION_FILE=${TABLES_DEFINITION_FILE:-$CONF_DIR/dev-tables-definition.sql}
export STORAGE_DIR="${STORAGE_DIR:-$WORKING_DIR/storage}"
+export MANAGERS_DIR="${MANAGERS_DIR:-$WORKING_DIR/managers}"
if [ "$PROMETHEUS_METRICS_PORT" == "" ]; then
JAVA_PROMETHEUS_OPTS=""
diff --git a/nussknacker-dist/src/universal/bin/run.sh b/nussknacker-dist/src/universal/bin/run.sh
index 8d6118ef81c..656692efc72 100755
--- a/nussknacker-dist/src/universal/bin/run.sh
+++ b/nussknacker-dist/src/universal/bin/run.sh
@@ -8,9 +8,8 @@ DEFAULT_NUSSKNACKER_DIR=`dirname "$0" | xargs -I{} readlink -f {}/..`
NUSSKNACKER_DIR=${NUSSKNACKER_DIR:-$DEFAULT_NUSSKNACKER_DIR}
CONF_DIR=${CONF_DIR:-"$NUSSKNACKER_DIR/conf"}
LIB_DIR=${LIB_DIR:-"$NUSSKNACKER_DIR/lib"}
-MANAGERS_DIR="$NUSSKNACKER_DIR/managers"
-CLASSPATH=${CLASSPATH:-$LIB_DIR/*:$MANAGERS_DIR/*}
+CLASSPATH=${CLASSPATH:-$LIB_DIR/*}
CONFIG_FILE=${CONFIG_FILE:-$CONF_DIR/application.conf}
LOGBACK_FILE=${LOGBACK_FILE:-$CONF_DIR/logback.xml}
@@ -22,6 +21,7 @@ PID_FILE="$WORKING_DIR/nussknacker-designer.pid"
export AUTHENTICATION_USERS_FILE=${AUTHENTICATION_USERS_FILE:-$CONF_DIR/users.conf}
export TABLES_DEFINITION_FILE=${TABLES_DEFINITION_FILE:-$CONF_DIR/dev-tables-definition.sql}
export STORAGE_DIR=${STORAGE_DIR:-$WORKING_DIR/storage}
+export MANAGERS_DIR=${MANAGERS_DIR:-$WORKING_DIR/managers}
export FLINK_REST_URL=${FLINK_REST_URL:-http://localhost:8081}
export KAFKA_ADDRESS=${KAFKA_ADDRESS:-localhost:9092}
diff --git a/nussknacker-dist/src/universal/conf/dev-application.conf b/nussknacker-dist/src/universal/conf/dev-application.conf
index f795c859b07..889470d782b 100644
--- a/nussknacker-dist/src/universal/conf/dev-application.conf
+++ b/nussknacker-dist/src/universal/conf/dev-application.conf
@@ -56,6 +56,9 @@ metricsConfig {
fragmentPropertiesDocsUrl: "https://nussknacker.io/documentation/docs/scenarios_authoring/Fragments/"
+managersDirs: [ "designer/server/work/managers" ]
+managersDirs: [ ${?MANAGERS_DIR} ]
+
scenarioTypes {
"streaming": {
deploymentConfig: ${flinkDeploymentConfig}
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/DeploymentManagersClassLoader.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/DeploymentManagersClassLoader.scala
new file mode 100644
index 00000000000..d4fe88cf049
--- /dev/null
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/DeploymentManagersClassLoader.scala
@@ -0,0 +1 @@
+package pl.touk.nussknacker.engine
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
index d87f651729f..5f65b3754bc 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
@@ -24,7 +24,12 @@ import pl.touk.nussknacker.engine.dict.DictServicesFactoryLoader
import pl.touk.nussknacker.engine.migration.ProcessMigrations
import pl.touk.nussknacker.engine.modelconfig._
import pl.touk.nussknacker.engine.util.ThreadUtils
-import pl.touk.nussknacker.engine.util.loader.{ModelClassLoader, ProcessConfigCreatorLoader, ScalaServiceLoader}
+import pl.touk.nussknacker.engine.util.loader.{
+ DeploymentManagersClassLoader,
+ ModelClassLoader,
+ ProcessConfigCreatorLoader,
+ ScalaServiceLoader
+}
import pl.touk.nussknacker.engine.util.multiplicity.{Empty, Many, Multiplicity, One}
import java.net.URL
@@ -46,7 +51,7 @@ object ModelData extends LazyLogging {
modelClassLoader: ModelClassLoader
): ModelData = {
ClassLoaderModelData(
- _.resolveInputConfigDuringExecution(processingTypeConfig.modelConfig, modelClassLoader.classLoader),
+ _.resolveInputConfigDuringExecution(processingTypeConfig.modelConfig, modelClassLoader),
modelClassLoader,
Some(processingTypeConfig.category),
dependencies.determineDesignerWideId,
@@ -82,8 +87,8 @@ object ModelData extends LazyLogging {
def resolveInputConfigDuringExecution(modelConfigLoader: ModelConfigLoader): InputConfigDuringExecution = {
if (resolveConfigs) {
modelConfigLoader.resolveInputConfigDuringExecution(
- ConfigWithUnresolvedVersion(modelClassLoader.classLoader, modelConfigs.modelInputConfig),
- modelClassLoader.classLoader
+ ConfigWithUnresolvedVersion(modelClassLoader, modelConfigs.modelInputConfig),
+ modelClassLoader
)
} else {
InputConfigDuringExecution(modelConfigs.modelInputConfig)
@@ -128,8 +133,6 @@ case class ClassLoaderModelData private (
override val additionalConfigsFromProvider: Map[DesignerWideComponentId, ComponentAdditionalConfig],
// This property is for easier testing when for some reason, some jars with ComponentProvider are
// on the test classpath and CPs collide with other once with the same name.
- // E.g. we add liteEmbeddedDeploymentManager as a designer provided dependency which also
- // add liteKafkaComponents (which are in test scope), see comment next to designer module
shouldIncludeConfigCreator: ProcessConfigCreator => Boolean,
shouldIncludeComponentProvider: ComponentProvider => Boolean,
componentDefinitionExtractionMode: ComponentDefinitionExtractionMode,
@@ -140,10 +143,10 @@ case class ClassLoaderModelData private (
// this is not lazy, to be able to detect if creator can be created...
override val configCreator: ProcessConfigCreator =
- new ProcessConfigCreatorLoader(shouldIncludeConfigCreator).justOne(modelClassLoader.classLoader)
+ new ProcessConfigCreatorLoader(shouldIncludeConfigCreator).justOne(modelClassLoader)
override lazy val modelConfigLoader: ModelConfigLoader = {
- Multiplicity(ScalaServiceLoader.load[ModelConfigLoader](modelClassLoader.classLoader)) match {
+ Multiplicity(ScalaServiceLoader.load[ModelConfigLoader](modelClassLoader)) match {
case Empty() => new DefaultModelConfigLoader(shouldIncludeComponentProvider)
case One(modelConfigLoader) => modelConfigLoader
case Many(moreThanOne) =>
@@ -156,7 +159,7 @@ case class ClassLoaderModelData private (
)
override lazy val migrations: ProcessMigrations = {
- Multiplicity(ScalaServiceLoader.load[ProcessMigrations](modelClassLoader.classLoader)) match {
+ Multiplicity(ScalaServiceLoader.load[ProcessMigrations](modelClassLoader)) match {
case Empty() => ProcessMigrations.empty
case One(migrationsDef) => migrationsDef
case Many(moreThanOne) =>
@@ -229,7 +232,7 @@ trait ModelData extends BaseModelData with AutoCloseable {
final lazy val modelDefinitionWithClasses: ModelDefinitionWithClasses = {
val modelDefinitions = withThisAsContextClassLoader {
extractModelDefinitionFun(
- modelClassLoader.classLoader,
+ modelClassLoader,
ProcessObjectDependencies(modelConfig, namingStrategy),
determineDesignerWideId,
additionalConfigsFromProvider
@@ -242,11 +245,12 @@ trait ModelData extends BaseModelData with AutoCloseable {
// See parameters of implementing functions
def extractModelDefinitionFun: ExtractDefinitionFun
- final def modelDefinition: ModelDefinition =
+ final def modelDefinition: ModelDefinition = withThisAsContextClassLoader {
modelDefinitionWithClasses.modelDefinition
+ }
private lazy val dictServicesFactory: DictServicesFactory =
- DictServicesFactoryLoader.justOne(modelClassLoader.classLoader)
+ DictServicesFactoryLoader.justOne(modelClassLoader)
final lazy val designerDictServices: UiDictServices =
dictServicesFactory.createUiDictServices(modelDefinition.expressionConfig.dictionaries, modelConfig)
@@ -256,11 +260,11 @@ trait ModelData extends BaseModelData with AutoCloseable {
// TODO: remove it, see notice in CustomProcessValidatorFactory
final def customProcessValidator: CustomProcessValidator = {
- CustomProcessValidatorLoader.loadProcessValidators(modelClassLoader.classLoader, modelConfig)
+ CustomProcessValidatorLoader.loadProcessValidators(modelClassLoader, modelConfig)
}
final def withThisAsContextClassLoader[T](block: => T): T = {
- ThreadUtils.withThisAsContextClassLoader(modelClassLoader.classLoader) {
+ ThreadUtils.withThisAsContextClassLoader(modelClassLoader) {
block
}
}
@@ -272,7 +276,7 @@ trait ModelData extends BaseModelData with AutoCloseable {
def modelConfigLoader: ModelConfigLoader
final override lazy val modelConfig: Config =
- modelConfigLoader.resolveConfig(inputConfigDuringExecution, modelClassLoader.classLoader)
+ modelConfigLoader.resolveConfig(inputConfigDuringExecution, modelClassLoader)
final lazy val componentsUiConfig: ComponentsUiConfig = ComponentsUiConfigParser.parse(modelConfig)
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ExpressionCompiler.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ExpressionCompiler.scala
index 23c7426e4fb..0100f2b248d 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ExpressionCompiler.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ExpressionCompiler.scala
@@ -4,13 +4,12 @@ import cats.data.Validated.{Invalid, Valid, invalid, invalidNel, valid}
import cats.data.{Ior, IorNel, NonEmptyList, Validated, ValidatedNel}
import cats.instances.list._
import pl.touk.nussknacker.engine.ModelData
-import pl.touk.nussknacker.engine.api.{JobData, MetaData, NodeId}
+import pl.touk.nussknacker.engine.api.{JobData, NodeId}
import pl.touk.nussknacker.engine.api.context.ProcessCompilationError._
import pl.touk.nussknacker.engine.api.context.{PartSubGraphCompilationError, ProcessCompilationError, ValidationContext}
import pl.touk.nussknacker.engine.api.definition._
import pl.touk.nussknacker.engine.api.dict.{DictRegistry, EngineDictRegistry}
import pl.touk.nussknacker.engine.api.parameter.ParameterName
-import pl.touk.nussknacker.engine.api.process.ClassExtractionSettings
import pl.touk.nussknacker.engine.api.typed.typing.{Typed, TypingResult}
import pl.touk.nussknacker.engine.compiledgraph.{CompiledParameter, TypedParameter}
import pl.touk.nussknacker.engine.definition.clazz.ClassDefinitionSet
@@ -70,7 +69,7 @@ object ExpressionCompiler {
def withoutOptimization(modelData: ModelData): ExpressionCompiler = {
withoutOptimization(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
modelData.designerDictServices.dictRegistry,
modelData.modelDefinition.expressionConfig,
modelData.modelDefinitionWithClasses.classDefinitions,
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ProcessCompiler.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ProcessCompiler.scala
index 0d19e5b80b4..29a11f5f821 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ProcessCompiler.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/ProcessCompiler.scala
@@ -327,7 +327,7 @@ object ProcessValidator {
modelData.modelDefinitionWithClasses,
modelData.designerDictServices.dictRegistry,
modelData.customProcessValidator,
- modelData.modelClassLoader.classLoader
+ modelData.modelClassLoader
)
}
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/DynamicNodeValidator.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/DynamicNodeValidator.scala
index 10f1e641bc3..8b6cdc02f1d 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/DynamicNodeValidator.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/DynamicNodeValidator.scala
@@ -11,7 +11,7 @@ import pl.touk.nussknacker.engine.api.context._
import pl.touk.nussknacker.engine.api.context.transformation._
import pl.touk.nussknacker.engine.api.definition.Parameter
import pl.touk.nussknacker.engine.api.parameter.ParameterName
-import pl.touk.nussknacker.engine.api.{JobData, MetaData, NodeId}
+import pl.touk.nussknacker.engine.api.{JobData, NodeId}
import pl.touk.nussknacker.engine.compile.{ExpressionCompiler, NodeValidationExceptionHandler, Validations}
import pl.touk.nussknacker.engine.compiledgraph.TypedParameter
import pl.touk.nussknacker.engine.definition.component.parameter.StandardParameterEnrichment
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/NodeDataValidator.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/NodeDataValidator.scala
index cca96566d05..bc9c3ee7848 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/NodeDataValidator.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/compile/nodecompilation/NodeDataValidator.scala
@@ -10,7 +10,7 @@ import pl.touk.nussknacker.engine.api.context.{OutputVar, ProcessCompilationErro
import pl.touk.nussknacker.engine.api.definition.Parameter
import pl.touk.nussknacker.engine.api.process.ComponentUseCase
import pl.touk.nussknacker.engine.api.typed.typing.{TypingResult, Unknown}
-import pl.touk.nussknacker.engine.api.{JobData, MetaData, NodeId}
+import pl.touk.nussknacker.engine.api.{JobData, NodeId}
import pl.touk.nussknacker.engine.compile.nodecompilation.NodeCompiler.NodeCompilationResult
import pl.touk.nussknacker.engine.compile.nodecompilation.NodeDataValidator.OutgoingEdge
import pl.touk.nussknacker.engine.compile.{ExpressionCompiler, FragmentResolver, IdValidator, Output}
@@ -46,11 +46,11 @@ class NodeDataValidator(modelData: ModelData) {
private val compiler = new NodeCompiler(
modelData.modelDefinition,
new FragmentParametersDefinitionExtractor(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
modelData.modelDefinitionWithClasses.classDefinitions.all
),
expressionCompiler,
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
Seq.empty,
PreventInvocationCollector,
ComponentUseCase.Validation,
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/definition/test/ModelDataTestInfoProvider.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/definition/test/ModelDataTestInfoProvider.scala
index d2c0b815744..051fa63d4b0 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/definition/test/ModelDataTestInfoProvider.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/definition/test/ModelDataTestInfoProvider.scala
@@ -25,11 +25,11 @@ class ModelDataTestInfoProvider(modelData: ModelData) extends TestInfoProvider w
private lazy val nodeCompiler = new NodeCompiler(
modelData.modelDefinition,
new FragmentParametersDefinitionExtractor(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
modelData.modelDefinitionWithClasses.classDefinitions.all
),
expressionCompiler,
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
Seq.empty,
ProductionServiceInvocationCollector,
ComponentUseCase.TestDataGeneration,
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala
index 0c6d4ec67a1..8d7c06cf905 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala
@@ -26,7 +26,7 @@ import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneId, ZoneOffset}
import java.util.{Currency, UUID}
import scala.util.Try
-// todo: lbg - add casting methods to UTIL
+// TODO: lbg - add casting methods to UTIL
class CastOrConversionExt(classesBySimpleName: Map[String, Class[_]]) {
private val castException = new ClassCastException(s"Cannot cast value to given class")
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/testmode/TestDataPreparer.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/testmode/TestDataPreparer.scala
index 0c9b7a2a3f0..2d25f651d91 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/testmode/TestDataPreparer.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/testmode/TestDataPreparer.scala
@@ -10,8 +10,7 @@ import pl.touk.nussknacker.engine.api.definition.Parameter
import pl.touk.nussknacker.engine.api.dict.EngineDictRegistry
import pl.touk.nussknacker.engine.api.process.{Source, SourceTestSupport, TestWithParametersSupport}
import pl.touk.nussknacker.engine.api.test.{ScenarioTestJsonRecord, ScenarioTestParametersRecord, ScenarioTestRecord}
-import pl.touk.nussknacker.engine.api.{Context, JobData, MetaData, NodeId}
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.api.{Context, JobData, NodeId}
import pl.touk.nussknacker.engine.compile.ExpressionCompiler
import pl.touk.nussknacker.engine.compiledgraph.CompiledParameter
import pl.touk.nussknacker.engine.definition.clazz.ClassDefinitionSet
@@ -107,7 +106,7 @@ object TestDataPreparer {
def apply(modelData: ModelData, jobData: JobData): TestDataPreparer =
new TestDataPreparer(
- modelData.modelClassLoader.classLoader,
+ modelData.modelClassLoader,
modelData.modelDefinition.expressionConfig,
modelData.engineDictRegistry,
modelData.modelDefinitionWithClasses.classDefinitions,
diff --git a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
index c59145cb0ad..bdf1386044c 100644
--- a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
+++ b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
@@ -278,7 +278,10 @@ class SpelExpressionSpec extends AnyFunSuite with Matchers with ValidatedValuesD
}
test("should figure out result type when dynamically indexing record") {
- evaluate[Int]("{a: {g: 5, h: 10}, b: {g: 50, h: 100}}[#input.toString()].h", Context("abc").withVariable("input", "b")) shouldBe 100
+ evaluate[Int](
+ "{a: {g: 5, h: 10}, b: {g: 50, h: 100}}[#input.toString()].h",
+ Context("abc").withVariable("input", "b")
+ ) shouldBe 100
}
test("parsing first selection on array") {
diff --git a/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala b/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala
index acb539b703c..dfb0f530d80 100644
--- a/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala
+++ b/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala
@@ -43,7 +43,7 @@ object UniversalSchemaBasedSerdeProvider {
createSchemaIdFromMessageExtractor(isConfluent, isAzure)
}
- // SchemaId can be obtain in several ways. Precedent:
+ // SchemaId can be obtained in several ways. Precedent:
// * from nu kafka headers - it is our own, Nussknacker headers standard format: key.schemaId and value.schemaId headers
// * from azure header - content-type: avro/binary+schemaId (only value schema ids are supported)
// * from payload serialized in 'Confluent way' ([magicbyte][schemaid][payload])
diff --git a/utils/test-utils/src/main/resources/bootstrap-setup.override.yml b/utils/test-utils/src/main/resources/bootstrap-setup.override.yml
index c789679792f..8f48d344fc9 100644
--- a/utils/test-utils/src/main/resources/bootstrap-setup.override.yml
+++ b/utils/test-utils/src/main/resources/bootstrap-setup.override.yml
@@ -1,7 +1,7 @@
services:
bootstrap-setup:
- image: touk/nussknacker-example-scenarios-library:0.3.0
+ image: touk/nussknacker-example-scenarios-library:0.4.1
depends_on:
nginx:
condition: service_healthy
@@ -22,3 +22,7 @@ services:
limits:
memory: 256M
cpus: '0.5'
+
+ designer:
+ expose:
+ - 8181
diff --git a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala
index 44f4770a30c..e2dd40a5272 100644
--- a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala
+++ b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala
@@ -1,18 +1,24 @@
package pl.touk.nussknacker.test.installationexample
+import cats.effect.IO
+import cats.effect.kernel.Resource
+import cats.effect.unsafe.implicits.global
import com.dimafeng.testcontainers.{DockerComposeContainer, ServiceLogConsumer, WaitingForService}
import com.typesafe.scalalogging.LazyLogging
import org.slf4j.Logger
-import org.testcontainers.DockerClientFactory
import org.testcontainers.containers.output.Slf4jLogConsumer
import org.testcontainers.containers.wait.strategy.DockerHealthcheckWaitStrategy
+import pl.touk.nussknacker.test.MiscUtils._
+import pl.touk.nussknacker.test.WithTestHttpClientCreator
import pl.touk.nussknacker.test.containers.ContainerExt.toContainerExt
import pl.touk.nussknacker.test.installationexample.DockerBasedInstallationExampleNuEnvironment.{JSON, slf4jLogger}
+import sttp.client3._
+import sttp.model.MediaType
import ujson.Value
-import pl.touk.nussknacker.test.MiscUtils._
import java.io.{File => JFile}
import java.time.Duration
+import scala.util.Try
class DockerBasedInstallationExampleNuEnvironment(
nussknackerImageVersion: String,
@@ -33,7 +39,7 @@ class DockerBasedInstallationExampleNuEnvironment(
waitingFor = Some(
WaitingForService(
"bootstrap-setup",
- new DockerHealthcheckWaitStrategy().withStartupTimeout(Duration.ofSeconds(120))
+ new DockerHealthcheckWaitStrategy().withStartupTimeout(Duration.ofSeconds(150))
)
),
// Change to 'true' to enable logging
@@ -42,7 +48,16 @@ class DockerBasedInstallationExampleNuEnvironment(
start()
- val client: DockerBasedInstallationExampleClient = new DockerBasedInstallationExampleClient(this)
+ private val (dockerBasedInstallationExampleClient, closeHandler) =
+ DockerBasedInstallationExampleClient.create(this).allocated.unsafeRunSync()
+
+ val client: DockerBasedInstallationExampleClient = dockerBasedInstallationExampleClient
+
+ override def stop(): Unit = {
+ closeHandler.unsafeRunSync()
+ super.stop()
+ }
+
}
object DockerBasedInstallationExampleNuEnvironment extends LazyLogging {
@@ -53,9 +68,22 @@ object DockerBasedInstallationExampleNuEnvironment extends LazyLogging {
}
-class DockerBasedInstallationExampleClient(env: DockerBasedInstallationExampleNuEnvironment) {
+object DockerBasedInstallationExampleClient extends WithTestHttpClientCreator {
+
+ def create(env: DockerBasedInstallationExampleNuEnvironment): Resource[IO, DockerBasedInstallationExampleClient] = {
+ createHttpClient(sslContext = None)
+ .map(new DockerBasedInstallationExampleClient(env, _))
+ }
+
+}
+
+class DockerBasedInstallationExampleClient private (
+ env: DockerBasedInstallationExampleNuEnvironment,
+ sttpBackend: SttpBackend[Identity, Any]
+) {
private val bootstrapSetupService = unsafeContainerByServiceName("bootstrap-setup")
+ private val nginxService = unsafeContainerByServiceName("nginx")
def deployAndWaitForRunningState(scenarioName: String): Unit = {
bootstrapSetupService.executeBash(
@@ -88,8 +116,22 @@ class DockerBasedInstallationExampleClient(env: DockerBasedInstallationExampleNu
bootstrapSetupService.executeBash(s"""/app/utils/kafka/purge-topic.sh "$topic" """)
}
+ def sendHttpRequest(serviceSlug: String, payload: JSON): Either[Throwable, HttpResponse] = {
+ val response = sttp.client3.basicRequest
+ .post(uri"http://${nginxService.getHost}:8181/scenario/$serviceSlug")
+ .contentType(MediaType.ApplicationJson)
+ .body(payload.render())
+ .response(asStringAlways)
+ .send(sttpBackend)
+
+ Try(ujson.read(response.body)).toEither
+ .map(body => HttpResponse(response.code.code, ujson.read(body)))
+ }
+
private def unsafeContainerByServiceName(name: String) = env
.getContainerByServiceName(name)
.getOrElse(throw new IllegalStateException(s"'$name' service not available!"))
}
+
+final case class HttpResponse(status: Int, body: JSON)
diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala
new file mode 100644
index 00000000000..6036710ca82
--- /dev/null
+++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala
@@ -0,0 +1,27 @@
+package pl.touk.nussknacker.engine.util
+
+import java.net.{URI, URL}
+import java.nio.file.Path
+
+object StringUtils {
+
+ implicit class ToUrl(val value: String) extends AnyVal {
+
+ def convertToURL(workingDirectoryOpt: Option[Path] = None): URL = {
+ val uri = new URI(value)
+ if (uri.isAbsolute) {
+ uri.toURL
+ } else {
+ val pathPart = uri.getSchemeSpecificPart
+ val path = workingDirectoryOpt.map { workingDirectory =>
+ workingDirectory.resolve(pathPart)
+ } getOrElse {
+ Path.of(pathPart)
+ }
+ path.toUri.toURL
+ }
+ }
+
+ }
+
+}
diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala
new file mode 100644
index 00000000000..6029b834aa5
--- /dev/null
+++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala
@@ -0,0 +1,37 @@
+package pl.touk.nussknacker.engine.util
+
+import java.io.File
+import java.net.URL
+
+object UrlUtils {
+
+ implicit class ExpandFiles(val url: URL) extends AnyVal {
+
+ def expandFiles(extension: String): List[URL] = {
+ url match {
+ case u if u.getProtocol.toLowerCase == "file" =>
+ val file = new File(u.toURI)
+ if (file.isDirectory) {
+ val expanded = file
+ .listFiles()
+ .toList
+ .filterNot(_.getName.startsWith("."))
+ .map(_.toURI.toURL)
+ .flatMap(_.expandFiles(extension))
+
+ expanded match {
+ case Nil => List.empty
+ case nonEmpty if nonEmpty.exists(_.getFile.endsWith(extension)) => expanded
+ case _ => u :: Nil
+ }
+ } else {
+ u :: Nil
+ }
+ case u =>
+ u :: Nil
+ }
+ }
+
+ }
+
+}
diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/DeploymentManagersClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/DeploymentManagersClassLoader.scala
new file mode 100644
index 00000000000..9fdaa16c31c
--- /dev/null
+++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/DeploymentManagersClassLoader.scala
@@ -0,0 +1,32 @@
+package pl.touk.nussknacker.engine.util.loader
+
+import cats.effect.{IO, Resource}
+import com.typesafe.scalalogging.LazyLogging
+import pl.touk.nussknacker.engine.util.UrlUtils.ExpandFiles
+
+import java.net.URL
+import java.nio.file.Path
+import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader
+
+object DeploymentManagersClassLoader extends LazyLogging {
+
+ def create(managersDirs: List[Path]): Resource[IO, DeploymentManagersClassLoader] = {
+ Resource.make(
+ acquire = IO.delay {
+ logger.debug(
+ s"Loading deployment managers from the following locations: ${managersDirs.map(_.toString).mkString(", ")}"
+ )
+ new DeploymentManagersClassLoader(
+ managersDirs.flatMap(_.toUri.toURL.expandFiles(".jar")),
+ this.getClass.getClassLoader
+ )
+ }
+ )(
+ release = loader => IO.delay(loader.close())
+ )
+ }
+
+}
+
+class DeploymentManagersClassLoader private (urls: Seq[URL], parent: ClassLoader) extends URLClassLoader(urls, parent)
+
diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala
index 470d9190cac..e001ab226e8 100644
--- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala
+++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala
@@ -1,14 +1,16 @@
package pl.touk.nussknacker.engine.util.loader
import com.typesafe.scalalogging.LazyLogging
+import pl.touk.nussknacker.engine.util.StringUtils._
+import pl.touk.nussknacker.engine.util.UrlUtils._
-import java.io.File
-import java.net.{URI, URL, URLClassLoader}
+import java.net.URL
import java.nio.file.Path
+import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader
-case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) {
+class ModelClassLoader private (val urls: List[URL], parent: ClassLoader) extends URLClassLoader(urls, parent) {
- override def toString: String = s"ModelClassLoader(${toString(classLoader)})"
+ override def toString: String = s"ModelClassLoader(${toString(this)})"
private def toString(classLoader: ClassLoader): String = classLoader match {
case null => "null"
@@ -20,57 +22,18 @@ case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL])
object ModelClassLoader extends LazyLogging {
// for e.g. testing in process module
- val empty: ModelClassLoader = ModelClassLoader(getClass.getClassLoader, List())
-
- val defaultJarExtension = ".jar"
-
- private def expandFiles(urls: Iterable[URL], jarExtension: String): Iterable[URL] = {
- urls.flatMap {
- case url if url.getProtocol.toLowerCase == "file" =>
- val file = new File(url.toURI)
- if (file.isDirectory) {
- val expanded =
- expandFiles(file.listFiles().filterNot(_.getName.startsWith(".")).map(_.toURI.toURL), jarExtension)
- if (expanded.isEmpty) {
- List.empty
- } else if (expanded.exists(_.getFile.endsWith(jarExtension))) { // not expand if nested jars not exists
- expanded
- } else {
- List(url)
- }
- } else {
- List(url)
- }
- case url => List(url)
- }
- }
-
- private def convertToURL(urlString: String, workingDirectoryOpt: Option[Path]): URL = {
- val uri = new URI(urlString)
- if (uri.isAbsolute) {
- uri.toURL
- } else {
- val pathPart = uri.getSchemeSpecificPart
- val path = workingDirectoryOpt.map { workingDirectory =>
- workingDirectory.resolve(pathPart)
- } getOrElse {
- Path.of(pathPart)
- }
- path.toUri.toURL
- }
- }
+ val empty: ModelClassLoader = new ModelClassLoader(List.empty, getClass.getClassLoader)
+ val defaultJarExtension = ".jar"
// workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229
def apply(
urls: List[String],
workingDirectoryOpt: Option[Path],
+ deploymentManagersClassLoader: DeploymentManagersClassLoader,
jarExtension: String = defaultJarExtension
): ModelClassLoader = {
- val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension)
- ModelClassLoader(
- new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader),
- postProcessedURLs.toList
- )
+ val postProcessedURLs = urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension))
+ new ModelClassLoader(postProcessedURLs, deploymentManagersClassLoader)
}
}
diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala
index 2012bbf3c9a..3a1fb180fea 100644
--- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala
+++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala
@@ -12,7 +12,7 @@ import scala.reflect.{ClassTag, classTag}
object ScalaServiceLoader extends LazyLogging {
import scala.jdk.CollectionConverters._
- def loadClass[T](classLoader: ClassLoader)(createDefault: => T)(implicit classTag: ClassTag[T]): T =
+ def loadClass[T: ClassTag](classLoader: ClassLoader)(createDefault: => T): T =
chooseClass[T](createDefault, load[T](classLoader))
def chooseClass[T](createDefault: => T, loaded: List[T]): T = {
diff --git a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala
index b6caffde94d..0a1edca95d6 100644
--- a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala
+++ b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala
@@ -1,12 +1,25 @@
package pl.touk.nussknacker.engine.util.loader
+import cats.effect.unsafe.implicits.global
+import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import java.net.{URL, URLClassLoader}
import java.nio.file.Path
-class ModelClassLoaderSpec extends AnyFunSuite with Matchers {
+class ModelClassLoaderSpec extends AnyFunSuite with Matchers with BeforeAndAfterAll {
+
+ private val (deploymentManagersClassLoader, releaseDeploymentManagersClassLoaderResources) =
+ DeploymentManagersClassLoader
+ .create(List.empty)
+ .allocated
+ .unsafeRunSync()
+
+ override protected def afterAll(): Unit = {
+ releaseDeploymentManagersClassLoaderResources.unsafeRunSync()
+ super.afterAll()
+ }
test("should detect nested URLs in classloader") {
def resource(file: String): URL = getClass.getResource("/modelClassLoader" + file)
@@ -14,7 +27,12 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers {
val urls = List(resource(""), nonFileUrl)
- val loader = ModelClassLoader(urls.map(_.toURI.toString), workingDirectoryOpt = None, jarExtension = ".jara")
+ val loader = ModelClassLoader(
+ urls.map(_.toURI.toString),
+ workingDirectoryOpt = None,
+ deploymentManagersClassLoader = deploymentManagersClassLoader,
+ jarExtension = ".jara"
+ )
// we're not using .jar to avoid messing with .gitignore
val expected = Set(
@@ -26,13 +44,14 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers {
nonFileUrl
)
loader.urls.toSet shouldBe expected
- loader.classLoader.asInstanceOf[URLClassLoader].getURLs.toSet shouldBe expected
+ loader.asInstanceOf[URLClassLoader].getURLs.toSet shouldBe expected
}
test("should resolve classpath using working directory when defined") {
val loader = ModelClassLoader(
- List("relative/path", "/absolute/path"),
- workingDirectoryOpt = Some(Path.of("/some/working/directory"))
+ urls = List("relative/path", "/absolute/path"),
+ workingDirectoryOpt = Some(Path.of("/some/working/directory")),
+ deploymentManagersClassLoader = deploymentManagersClassLoader
)
loader.urls shouldEqual List(new URL("file:/some/working/directory/relative/path"), new URL("file:/absolute/path"))
}
diff --git a/utils/utils/src/main/java/pl/touk/nussknacker/springframework/util/BigDecimalScaleEnsurer.scala b/utils/utils/src/main/java/pl/touk/nussknacker/springframework/util/BigDecimalScaleEnsurer.scala
index 156290f7d87..99aeb6d1835 100644
--- a/utils/utils/src/main/java/pl/touk/nussknacker/springframework/util/BigDecimalScaleEnsurer.scala
+++ b/utils/utils/src/main/java/pl/touk/nussknacker/springframework/util/BigDecimalScaleEnsurer.scala
@@ -21,7 +21,7 @@ There is the risk that big decimals enter process in other ways (for instance fr
and they may have small scales. This may again lead to unexpected
behaviour when using division operator. This issue can be solved by using our own version of OpDivide class, but for now we decided
not to do it.
-*/
+ */
object BigDecimalScaleEnsurer {
// visible for testing
val DEFAULT_BIG_DECIMAL_SCALE = 18
@@ -29,4 +29,5 @@ object BigDecimalScaleEnsurer {
def ensureBigDecimalScale(value: java.math.BigDecimal): java.math.BigDecimal = {
value.setScale(Math.max(value.scale(), BigDecimalScaleEnsurer.DEFAULT_BIG_DECIMAL_SCALE), RoundingMode.UNNECESSARY)
}
+
}
diff --git a/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/ExecutionContextWithIORuntime.scala b/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/ExecutionContextWithIORuntime.scala
new file mode 100644
index 00000000000..1f27d6024a4
--- /dev/null
+++ b/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/ExecutionContextWithIORuntime.scala
@@ -0,0 +1,58 @@
+package pl.touk.nussknacker.engine.util
+
+import cats.effect.{IO, Resource}
+import cats.effect.unsafe.{IORuntime, IORuntimeConfig}
+
+import java.util.concurrent.Executors
+import scala.concurrent.ExecutionContext
+
+trait ExecutionContextWithIORuntime extends ExecutionContext {
+ implicit def ioRuntime: IORuntime
+}
+
+class ExecutionContextWithIORuntimeAdapter private (executionContext: ExecutionContext)
+ extends ExecutionContextWithIORuntime {
+
+ private val cachedThreadPool = Executors.newCachedThreadPool()
+
+ override implicit val ioRuntime: IORuntime = IORuntime(
+ compute = executionContext,
+ blocking = ExecutionContext.fromExecutor(cachedThreadPool),
+ scheduler = IORuntime.global.scheduler,
+ shutdown = () => (),
+ config = IORuntimeConfig()
+ )
+
+ Runtime.getRuntime.addShutdownHook(new Thread() {
+ override def run(): Unit = close()
+ })
+
+ override def execute(runnable: Runnable): Unit = executionContext.execute(runnable)
+
+ override def reportFailure(cause: Throwable): Unit = executionContext.reportFailure(cause)
+
+ @volatile private var closed = false
+
+ def close(): Unit = {
+ synchronized {
+ if (!closed) {
+ ioRuntime.shutdown()
+ cachedThreadPool.shutdown()
+ closed = true
+ }
+ }
+ }
+
+}
+
+object ExecutionContextWithIORuntimeAdapter {
+
+ def createFrom(executionContext: ExecutionContext): Resource[IO, ExecutionContextWithIORuntimeAdapter] = {
+ Resource.make(IO.delay(new ExecutionContextWithIORuntimeAdapter(executionContext)))(ec => IO.delay(ec.close()))
+ }
+
+ def unsafeCreateFrom(executionContext: ExecutionContext): ExecutionContextWithIORuntimeAdapter = {
+ new ExecutionContextWithIORuntimeAdapter(executionContext)
+ }
+
+}