From 8adbf621db6feaf4930df7ea8c4a8a4a1ecac999 Mon Sep 17 00:00:00 2001 From: MK Software Date: Tue, 12 Nov 2024 10:53:22 +0100 Subject: [PATCH 01/73] wip --- build.sbt | 45 +++++++++++++------ ...sConfigBasedProcessingTypeDataLoader.scala | 14 +++++- .../repository/ScenarioActionRepository.scala | 5 +-- ...tionsAndCommentsToScenarioActivities.scala | 3 +- ...PeriodicCustomActionsProviderFactory.scala | 10 ++--- .../engine/api/deployment/ProcessAction.scala | 1 + .../engine/util/loader/ModelClassLoader.scala | 27 ++++++----- 7 files changed, 66 insertions(+), 39 deletions(-) diff --git a/build.sbt b/build.sbt index d83e32a5ed6..81118ba3524 100644 --- a/build.sbt +++ b/build.sbt @@ -346,6 +346,7 @@ val caffeineCacheV = "3.1.8" val sttpV = "3.9.8" val tapirV = "1.11.7" val openapiCirceYamlV = "0.11.3" +val retryV = "0.3.6" //we use legacy version because this one supports Scala 2.12 val monocleV = "2.1.0" val jmxPrometheusJavaagentV = "0.20.0" @@ -437,7 +438,8 @@ def assemblySettings( includeScala: Boolean, filterProvidedDeps: Boolean = true ): List[Def.SettingsDefinition] = { - // This work around need to be optional because for designer module it causes excluding of scala lib (because we has there other work around for Idea classpath and provided deps) + // This work around need to be optional because for designer module it causes excluding of scala lib + // (because we have there other work around for Idea classpath and provided deps) val filterProvidedDepsSettingOpt = if (filterProvidedDeps) { Some( // For some reason problem described in https://github.com/sbt/sbt-assembly/issues/295 appears, workaround also works... @@ -469,7 +471,7 @@ lazy val modelArtifacts = taskKey[List[(File, String)]]("model artifacts") lazy val devArtifacts = taskKey[List[(File, String)]]("dev artifacts") -lazy val managerArtifacts = taskKey[List[(File, String)]]("manager artifacts") +lazy val deploymentManagerArtifacts = taskKey[List[(File, String)]]("deployment manager artifacts") def filterDevConfigArtifacts(files: Seq[(File, String)]) = { val devConfigFiles = Set("dev-tables-definition.sql", "dev-application.conf", "dev-oauth2-users.conf") @@ -481,7 +483,7 @@ lazy val distribution: Project = sbt .settings(commonSettings) .enablePlugins(JavaAgent, SbtNativePackager, JavaServerAppPackaging) .settings( - managerArtifacts := { + deploymentManagerArtifacts := { List( (flinkDeploymentManager / assembly).value -> "managers/nussknacker-flink-manager.jar", (liteK8sDeploymentManager / assembly).value -> "managers/lite-k8s-manager.jar", @@ -520,7 +522,7 @@ lazy val distribution: Project = sbt else filterDevConfigArtifacts((Universal / mappings).value) universalMappingsWithDevConfigFilter ++ - (managerArtifacts).value ++ + (deploymentManagerArtifacts).value ++ (componentArtifacts).value ++ (if (addDevArtifacts) Seq((developmentTestsDeploymentManager / assembly).value -> "managers/development-tests-manager.jar") @@ -617,7 +619,7 @@ lazy val flinkDeploymentManager = (project in flink("management")) ExclusionRule("com.esotericsoftware", "kryo-shaded"), ), "org.apache.flink" % "flink-statebackend-rocksdb" % flinkV % flinkScope, - "com.softwaremill.retry" %% "retry" % "0.3.6", + "com.softwaremill.retry" %% "retry" % retryV, "org.wiremock" % "wiremock" % wireMockV % Test, "org.scalatestplus" %% "mockito-5-10" % scalaTestPlusV % Test, ) ++ flinkLibScalaDeps(scalaVersion.value, Some(flinkScope)) @@ -1484,6 +1486,7 @@ lazy val developmentTestsDeployManagerArtifacts = taskKey[List[(File, String)]]("development tests deployment manager artifacts") developmentTestsDeployManagerArtifacts := List( + (liteEmbeddedDeploymentManager / assembly).value -> "managers/lite-embedded-manager.jar", (developmentTestsDeploymentManager / assembly).value -> "managers/developmentTestsManager.jar" ) @@ -1944,6 +1947,9 @@ lazy val designer = (project in file("designer/server")) .value, Test / test := (Test / test) .dependsOn( +// flinkDeploymentManager / Compile / assembly, +// liteK8sDeploymentManager / Compile / assembly, + liteEmbeddedDeploymentManager / Compile / assembly, defaultModel / Compile / assembly, flinkTableApiComponents / Compile / assembly, flinkDevModel / Compile / assembly, @@ -1951,8 +1957,15 @@ lazy val designer = (project in file("designer/server")) flinkExecutor / prepareItLibs ) .value, + (Test / managedClasspath) += baseDirectory.value / "engine" / "lite" / "embeddedDeploymentManager" / "target" / "scala-2.13", +// unmanagedResourceDirectories in Test <+= baseDirectory ( _ /"engine/lite/embeddedDeploymentManager/target/scala-2.13" ), +// Test / testOptions += Tests.Setup(() => { +// val classpath = (Test / unmanagedClasspath).value +// println(s"Test classpath: $classpath") +// }), +// Test / unmanagedClasspath += baseDirectory.value / "engine" / "lite" / "embeddedDeploymentManager" / "target" / "scala-2.13" / "classes", /* - We depend on copyClientDist in packageBin and assembly to be make sure fe files will be included in jar and fajar + We depend on copyClientDist in packageBin and assembly to be make sure FE files will be included in jar and fajar We abuse sbt a little bit, but we don't want to put webpack in generate resources phase, as it's long and it would make compilation v. long. This is not too nice, but so far only alternative is to put designer dists copyClientDist outside sbt and use bash to control when it's done - and this can lead to bugs and edge cases (release, dist/docker, dist/tgz, assembly...) @@ -1990,6 +2003,7 @@ lazy val designer = (project in file("designer/server")) "org.apache.xmlgraphics" % "fop" % "2.9" exclude ("commons-logging", "commons-logging"), "com.beachape" %% "enumeratum-circe" % enumeratumV, "tf.tofu" %% "derevo-circe" % "0.13.0", + "com.softwaremill.retry" %% "retry" % retryV, "com.softwaremill.sttp.apispec" %% "openapi-circe-yaml" % openapiCirceYamlV, "com.softwaremill.sttp.tapir" %% "tapir-akka-http-server" % tapirV, "com.softwaremill.sttp.tapir" %% "tapir-core" % tapirV, @@ -2033,6 +2047,7 @@ lazy val designer = (project in file("designer/server")) defaultHelpers % Test, testUtils % Test, flinkTestUtils % Test, + developmentTestsDeploymentManager % Test, componentsApi % "test->test", // All DeploymentManager dependencies are added because they are needed to run NussknackerApp* with // dev-application.conf. Currently, we doesn't have a separate classpath for DMs like we have for components. @@ -2040,11 +2055,11 @@ lazy val designer = (project in file("designer/server")) // that are also load added their test dependencies on the classpath by the Idea. It causes that // UniversalKafkaSourceFactory is loaded from app classloader and GenericRecord which is defined in typesToExtract // is missing from this classloader - flinkDeploymentManager % Provided, - liteEmbeddedDeploymentManager % Provided, - liteK8sDeploymentManager % Provided, - developmentTestsDeploymentManager % Provided, - flinkPeriodicDeploymentManager % Provided, +// flinkDeploymentManager % Provided, // todo: remove +// liteEmbeddedDeploymentManager % Test, +// liteK8sDeploymentManager % Provided, +// developmentTestsDeploymentManager % Provided, +// flinkPeriodicDeploymentManager % Provided, schemedKafkaComponentsUtils % Provided, ) @@ -2249,9 +2264,13 @@ prepareDev := { (flinkExecutor / prepareItLibs).value val workTarget = (designer / baseDirectory).value / "work" val artifacts = - (distribution / componentArtifacts).value ++ (distribution / devArtifacts).value ++ developmentTestsDeployManagerArtifacts.value ++ + (distribution / componentArtifacts).value ++ + (distribution / devArtifacts).value ++ + developmentTestsDeployManagerArtifacts.value ++ Def - .taskDyn(if (addManagerArtifacts) distribution / managerArtifacts else Def.task[List[(File, String)]](Nil)) + .taskDyn( + if (addManagerArtifacts) distribution / deploymentManagerArtifacts else Def.task[List[(File, String)]](Nil) + ) .value ++ (flinkExecutor / additionalBundledArtifacts).value IO.copy(artifacts.map { case (source, target) => (source, workTarget / target) }) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 6a7dcaff32d..3d387f82d60 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -5,7 +5,7 @@ import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine._ import pl.touk.nussknacker.engine.api.process.ProcessingType import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap -import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader +import pl.touk.nussknacker.engine.util.loader.{ModelClassLoader, ScalaServiceLoader} import pl.touk.nussknacker.ui.NussknackerConfig import pl.touk.nussknacker.ui.process.processingtype._ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction @@ -67,5 +67,17 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { ScalaServiceLoader.loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType) } +// private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { +// ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( +// typeConfig.deploymentManagerType, +// ModelClassLoader( +// "engine/lite/deploymentManager/target/scala-2.13/classes/" :: +// "engine/lite/embeddedDeploymentManager/target/scala-2.13/classes/" :: +// "engine/lite/runtime/target/scala-2.13/classes/" :: +// Nil, +// None) +// .classLoader +// ) +// } } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala index 287f44bceca..57a193fc755 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala @@ -7,7 +7,6 @@ import pl.touk.nussknacker.engine.api.Comment import pl.touk.nussknacker.engine.api.deployment.ProcessActionState.ProcessActionState import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, ProcessingType, VersionId} -import pl.touk.nussknacker.engine.management.periodic.InstantBatchCustomAction import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap import pl.touk.nussknacker.ui.app.BuildInfo import pl.touk.nussknacker.ui.db.entity.{ @@ -291,7 +290,7 @@ class DbScenarioActionRepository private ( ScenarioActivityType.ScenarioPaused case ScenarioActionName.Rename => ScenarioActivityType.ScenarioNameChanged - case InstantBatchCustomAction.name => + case ScenarioActionName.RunNow => ScenarioActivityType.PerformedSingleExecution case otherCustomName => ScenarioActivityType.CustomAction(otherCustomName.value) @@ -519,7 +518,7 @@ class DbScenarioActionRepository private ( case ScenarioActivityType.OutgoingMigration => None case ScenarioActivityType.PerformedSingleExecution => - Some(InstantBatchCustomAction.name) + Some(ScenarioActionName.RunNow) case ScenarioActivityType.PerformedScheduledExecution => None case ScenarioActivityType.AutomaticUpdate => diff --git a/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala b/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala index 7e6cfd9aab3..1982895a0dc 100644 --- a/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala +++ b/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala @@ -13,7 +13,6 @@ import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} import pl.touk.nussknacker.engine.api.{MetaData, ProcessAdditionalFields, RequestResponseMetaData} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess -import pl.touk.nussknacker.engine.management.periodic.InstantBatchCustomAction import pl.touk.nussknacker.restmodel.component.ScenarioComponentsUsages import pl.touk.nussknacker.test.base.db.WithHsqlDbTesting import pl.touk.nussknacker.test.base.it.NuItTest @@ -230,7 +229,7 @@ class V1_057__MigrateActionsAndCommentsToScenarioActivities } "migrate custom action 'run now' with comment to scenario_activities table" in { testMigratingActionWithComment( - scenarioActionName = InstantBatchCustomAction.name, + scenarioActionName = ScenarioActionName.RunNow, actionComment = Some("Run now: Deployed at the request of business"), expectedActivity = (sid, sad, user, date, sv) => ScenarioActivity.PerformedSingleExecution( diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/WithRunNowPeriodicCustomActionsProviderFactory.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/WithRunNowPeriodicCustomActionsProviderFactory.scala index 9501316c622..547bd2bd5b8 100644 --- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/WithRunNowPeriodicCustomActionsProviderFactory.scala +++ b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/WithRunNowPeriodicCustomActionsProviderFactory.scala @@ -7,6 +7,7 @@ import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.deployment.{CustomActionDefinition, CustomActionResult} import pl.touk.nussknacker.engine.management.periodic.db.PeriodicProcessesRepository import pl.touk.nussknacker.engine.api.deployment.DMCustomActionCommand +import pl.touk.nussknacker.engine.api.deployment.ScenarioActionName.RunNow import java.net.URI import scala.concurrent.{ExecutionContext, Future} @@ -24,8 +25,8 @@ class WithRunNowPeriodicCustomActionsProviderFactory extends PeriodicCustomActio override def invokeCustomAction(actionRequest: DMCustomActionCommand): Future[CustomActionResult] = { actionRequest.actionName match { - case InstantBatchCustomAction.name => actionInstantBatch(actionRequest) - case _ => Future.failed(new NotImplementedError()) + case ScenarioActionName.RunNow => actionInstantBatch(actionRequest) + case _ => Future.failed(new NotImplementedError()) } } @@ -58,12 +59,9 @@ class WithRunNowPeriodicCustomActionsProviderFactory extends PeriodicCustomActio //TODO: replace custom action with dedicated command in core services case object InstantBatchCustomAction { - // name is displayed as label under the button - val name: ScenarioActionName = ScenarioActionName("run now") - def apply(): CustomActionDefinition = { CustomActionDefinition( - actionName = name, + actionName = RunNow, allowedStateStatusNames = List("SCHEDULED"), icon = Some(new URI("/assets/custom-actions/batch-instant.svg")), parameters = Nil diff --git a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala index ebb91e628c3..62475c226a2 100644 --- a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala +++ b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala @@ -70,6 +70,7 @@ object ScenarioActionName { val UnArchive: ScenarioActionName = ScenarioActionName("UNARCHIVE") val Pause: ScenarioActionName = ScenarioActionName("PAUSE") // TODO: To implement in future.. val Rename: ScenarioActionName = ScenarioActionName("RENAME") + val RunNow: ScenarioActionName = ScenarioActionName("run now") val DefaultActions: List[ScenarioActionName] = Nil diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index 470d9190cac..d3d00b9e4cc 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -21,8 +21,20 @@ case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) object ModelClassLoader extends LazyLogging { // for e.g. testing in process module val empty: ModelClassLoader = ModelClassLoader(getClass.getClassLoader, List()) + val defaultJarExtension = ".jar" - val defaultJarExtension = ".jar" + // workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229 + def apply( + urls: List[String], + workingDirectoryOpt: Option[Path], + jarExtension: String = defaultJarExtension + ): ModelClassLoader = { + val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension) + ModelClassLoader( + new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), + postProcessedURLs.toList + ) + } private def expandFiles(urls: Iterable[URL], jarExtension: String): Iterable[URL] = { urls.flatMap { @@ -60,17 +72,4 @@ object ModelClassLoader extends LazyLogging { } } - // workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229 - def apply( - urls: List[String], - workingDirectoryOpt: Option[Path], - jarExtension: String = defaultJarExtension - ): ModelClassLoader = { - val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension) - ModelClassLoader( - new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), - postProcessedURLs.toList - ) - } - } From aae2697791163c508845bd07b8aa1849cab40371 Mon Sep 17 00:00:00 2001 From: MK Software Date: Mon, 18 Nov 2024 16:21:20 +0100 Subject: [PATCH 02/73] wip --- build.sbt | 2 +- ...sConfigBasedProcessingTypeDataLoader.scala | 8 ++- .../embedded/EmbeddedDeploymentManager.scala | 48 ++---------------- .../EmbeddedDeploymentManagerProvider.scala | 49 +++++++++++++++++++ 4 files changed, 60 insertions(+), 47 deletions(-) create mode 100644 engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala diff --git a/build.sbt b/build.sbt index 81118ba3524..12159d9bf1b 100644 --- a/build.sbt +++ b/build.sbt @@ -1949,7 +1949,7 @@ lazy val designer = (project in file("designer/server")) .dependsOn( // flinkDeploymentManager / Compile / assembly, // liteK8sDeploymentManager / Compile / assembly, - liteEmbeddedDeploymentManager / Compile / assembly, +// liteEmbeddedDeploymentManager / Compile / assembly, defaultModel / Compile / assembly, flinkTableApiComponents / Compile / assembly, flinkDevModel / Compile / assembly, diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 3d387f82d60..b16f4026af3 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -11,6 +11,10 @@ import pl.touk.nussknacker.ui.process.processingtype._ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState +import java.net.URL +import java.nio.file.Paths +import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader + class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConfig) extends ProcessingTypeDataLoader with LazyLogging { @@ -65,7 +69,9 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf } private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { - ScalaServiceLoader.loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType) + val managersClassLoader = + new URLClassLoader(Seq(Paths.get("designer/server/work/managers/").toUri.toURL), this.getClass.getClassLoader) + ScalaServiceLoader.loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType, managersClassLoader) } // private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { // ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala index 178f5ab5cbf..1da01f2de74 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala @@ -1,10 +1,6 @@ package pl.touk.nussknacker.engine.embedded -import cats.data.Validated.valid -import cats.data.ValidatedNel -import com.typesafe.config.Config import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt import pl.touk.nussknacker.engine.api._ import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus @@ -12,51 +8,13 @@ import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.Proble import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.deployment.{DeploymentData, DeploymentId, ExternalDeploymentId} -import pl.touk.nussknacker.engine.embedded.requestresponse.RequestResponseDeploymentStrategy -import pl.touk.nussknacker.engine.embedded.streaming.StreamingDeploymentStrategy -import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer -import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory} -import pl.touk.nussknacker.engine.{BaseModelData, CustomProcessValidator, DeploymentManagerDependencies, ModelData} -import pl.touk.nussknacker.lite.manager.{LiteDeploymentManager, LiteDeploymentManagerProvider} -import pl.touk.nussknacker.engine.newdeployment +import pl.touk.nussknacker.engine.{ModelData, newdeployment} +import pl.touk.nussknacker.lite.manager.LiteDeploymentManager -import scala.concurrent.duration.{DurationInt, FiniteDuration} +import scala.concurrent.duration.DurationInt import scala.concurrent.{Await, ExecutionContext, Future} import scala.util.{Failure, Success, Try} -class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider { - - override def createDeploymentManager( - modelData: BaseModelData, - dependencies: DeploymentManagerDependencies, - engineConfig: Config, - scenarioStateCacheTTL: Option[FiniteDuration] - ): ValidatedNel[String, DeploymentManager] = { - import dependencies._ - val strategy = forMode(engineConfig)( - new StreamingDeploymentStrategy, - RequestResponseDeploymentStrategy(engineConfig) - ) - - val metricRegistry = LiteMetricRegistryFactory.usingHostnameAsDefaultInstanceId.prepareRegistry(engineConfig) - val contextPreparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry)) - - strategy.open(modelData.asInvokableModelData, contextPreparer) - valid(new EmbeddedDeploymentManager(modelData.asInvokableModelData, deployedScenariosProvider, strategy)) - } - - override protected def defaultRequestResponseSlug(scenarioName: ProcessName, config: Config): String = - RequestResponseDeploymentStrategy.defaultSlug(scenarioName) - - override def additionalValidators(config: Config): List[CustomProcessValidator] = forMode(config)( - Nil, - List(EmbeddedRequestResponseScenarioValidator) - ) - - override def name: String = "lite-embedded" - -} - /* FIXME: better synchronization - comment below isn't true anymore + make HA ready Currently we assume that all operations that modify state (i.e. deploy and cancel) are performed from diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala new file mode 100644 index 00000000000..57fb4297ee3 --- /dev/null +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala @@ -0,0 +1,49 @@ +package pl.touk.nussknacker.engine.embedded + +import cats.data.Validated.valid +import cats.data.ValidatedNel +import com.typesafe.config.Config +import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt +import pl.touk.nussknacker.engine.{BaseModelData, CustomProcessValidator, DeploymentManagerDependencies} +import pl.touk.nussknacker.engine.api.deployment.DeploymentManager +import pl.touk.nussknacker.engine.api.process.ProcessName +import pl.touk.nussknacker.engine.embedded.requestresponse.RequestResponseDeploymentStrategy +import pl.touk.nussknacker.engine.embedded.streaming.StreamingDeploymentStrategy +import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer +import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory} +import pl.touk.nussknacker.lite.manager.LiteDeploymentManagerProvider + +import scala.concurrent.duration.FiniteDuration + +class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider { + + override def createDeploymentManager( + modelData: BaseModelData, + dependencies: DeploymentManagerDependencies, + engineConfig: Config, + scenarioStateCacheTTL: Option[FiniteDuration] + ): ValidatedNel[String, DeploymentManager] = { + import dependencies._ + val strategy = forMode(engineConfig)( + new StreamingDeploymentStrategy, + RequestResponseDeploymentStrategy(engineConfig) + ) + + val metricRegistry = LiteMetricRegistryFactory.usingHostnameAsDefaultInstanceId.prepareRegistry(engineConfig) + val contextPreparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry)) + + strategy.open(modelData.asInvokableModelData, contextPreparer) + valid(new EmbeddedDeploymentManager(modelData.asInvokableModelData, deployedScenariosProvider, strategy)) + } + + override protected def defaultRequestResponseSlug(scenarioName: ProcessName, config: Config): String = + RequestResponseDeploymentStrategy.defaultSlug(scenarioName) + + override def additionalValidators(config: Config): List[CustomProcessValidator] = forMode(config)( + Nil, + List(EmbeddedRequestResponseScenarioValidator) + ) + + override def name: String = "lite-embedded" + +} From 0aab679e6c0621f587e868d890d2ea690536eb5c Mon Sep 17 00:00:00 2001 From: MK Software Date: Wed, 11 Dec 2024 16:29:17 +0100 Subject: [PATCH 03/73] added: test --- .run/NussknackerRemoteDebug.run.xml | 29 +-- build.sbt | 2 +- .../bootstrap-setup-scenarios.override.yml | 1 + .../resources/loan-request/LoanRequest.json | 239 ++++++++++++++++++ .../pl/touk/nussknacker/LoanRequestSpec.scala | 27 ++ .../lite/ScenarioInterpreterFactory.scala | 3 +- examples/installation/docker-compose.yml | 1 + examples/installation/nginx/nginx.conf | 8 + .../resources/bootstrap-setup.override.yml | 6 +- ...asedNuInstallationExampleEnvironment.scala | 50 +++- 10 files changed, 344 insertions(+), 22 deletions(-) create mode 100644 e2e-tests/src/test/resources/loan-request/LoanRequest.json create mode 100644 e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala diff --git a/.run/NussknackerRemoteDebug.run.xml b/.run/NussknackerRemoteDebug.run.xml index ea46b4eabdc..9a65e2df263 100644 --- a/.run/NussknackerRemoteDebug.run.xml +++ b/.run/NussknackerRemoteDebug.run.xml @@ -1,16 +1,15 @@ - - - - + + + \ No newline at end of file diff --git a/build.sbt b/build.sbt index 4bd5ac02c2b..e8ec0cc35a3 100644 --- a/build.sbt +++ b/build.sbt @@ -346,7 +346,6 @@ val caffeineCacheV = "3.1.8" val sttpV = "3.9.8" val tapirV = "1.11.7" val openapiCirceYamlV = "0.11.3" -val retryV = "0.3.6" //we use legacy version because this one supports Scala 2.12 val monocleV = "2.1.0" val jmxPrometheusJavaagentV = "0.20.0" @@ -2085,6 +2084,7 @@ lazy val designer = (project in file("designer/server")) // liteK8sDeploymentManager % Provided, // developmentTestsDeploymentManager % Provided, // flinkPeriodicDeploymentManager % Provided, + requestResponseRuntime % Test, schemedKafkaComponentsUtils % Provided, ) diff --git a/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml b/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml index 6e1c1f81ab0..38b70fa7b0e 100644 --- a/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml +++ b/e2e-tests/src/test/resources/bootstrap-setup-scenarios.override.yml @@ -4,3 +4,4 @@ services: volumes: - ../../e2e-tests/src/test/resources/detect-large-transactions:/scenario-examples/detect-large-transactions - ../../e2e-tests/src/test/resources/determine-offered-plan:/scenario-examples/determine-offered-plan + - ../../e2e-tests/src/test/resources/loan-request:/scenario-examples/loan-request diff --git a/e2e-tests/src/test/resources/loan-request/LoanRequest.json b/e2e-tests/src/test/resources/loan-request/LoanRequest.json new file mode 100644 index 00000000000..a82ba1ec844 --- /dev/null +++ b/e2e-tests/src/test/resources/loan-request/LoanRequest.json @@ -0,0 +1,239 @@ +{ + "metaData": { + "id": "LoanRequest", + "additionalFields": { + "description": null, + "properties": { + "inputSchema": "{\n \"type\": \"object\",\n \"properties\": {\n \"customerId\": {\n \"type\": \"string\"\n },\n \"location\": {\n \"type\": \"object\",\n \"properties\": {\n \"city\": {\n \"type\": \"string\"\n },\n \"street\": {\n \"type\": \"string\"\n }\n }\n },\n \"requestType\": {\n \"type\": \"string\"\n },\n \"requestedAmount\": {\n \"type\": \"number\"\n }\n },\n \"required\": [\"customerId\", \"location\", \"requestType\", \"requestedAmount\"],\n \"additionalProperties\": false\n}", + "outputSchema": "{\n \"type\": \"object\",\n \"properties\": {\n \"acceptedAmount\": {\n \"type\": \"number\",\n \"description\": \"Accepted amount\"\n },\n \"message\": {\n \"type\": \"string\",\n \"description\": \"Additional message\"\n }\n },\n \"required\": [\"acceptedAmount\", \"message\"],\n \"additionalProperties\": false\n}", + "slug": "loan" + }, + "metaDataType": "RequestResponseMetaData" + } + }, + "nodes": [ + { + "id": "request", + "ref": { + "typ": "request", + "parameters": [ + ] + }, + "additionalFields": { + "description": null, + "layoutData": { + "x": 360, + "y": 0 + } + }, + "type": "Source" + }, + { + "defaultNext": [ + ], + "nexts": [ + { + "expression": { + "language": "spel", + "expression": "#input.requestType == 'loan'" + }, + "nodes": [ + { + "id": "loan response", + "ref": { + "typ": "response", + "parameters": [ + { + "name": "acceptedAmount", + "expression": { + "language": "spel", + "expression": "50" + } + }, + { + "name": "message", + "expression": { + "language": "spel", + "expression": "'only small amount available'" + } + } + ] + }, + "endResult": null, + "isDisabled": null, + "additionalFields": { + "description": null, + "layoutData": { + "x": 0, + "y": 360 + } + }, + "type": "Sink" + } + ] + }, + { + "expression": { + "language": "spel", + "expression": "#input.requestType == 'mortgage'" + }, + "nodes": [ + { + "defaultNext": [ + ], + "nexts": [ + { + "expression": { + "language": "spel", + "expression": "#input.location.city == 'Warszawa'" + }, + "nodes": [ + { + "id": "Warsaw mortgage", + "ref": { + "typ": "response", + "parameters": [ + { + "name": "acceptedAmount", + "expression": { + "language": "spel", + "expression": "1000" + } + }, + { + "name": "message", + "expression": { + "language": "spel", + "expression": "'Large sum for Warszawa'" + } + } + ] + }, + "endResult": null, + "isDisabled": null, + "additionalFields": { + "description": null, + "layoutData": { + "x": 180, + "y": 540 + } + }, + "type": "Sink" + } + ] + }, + { + "expression": { + "language": "spel", + "expression": "true" + }, + "nodes": [ + { + "id": "Other city mortgage", + "ref": { + "typ": "response", + "parameters": [ + { + "name": "Raw editor", + "expression": { + "language": "spel", + "expression": "false" + } + }, + { + "name": "acceptedAmount", + "expression": { + "language": "spel", + "expression": "100" + } + }, + { + "name": "message", + "expression": { + "language": "spel", + "expression": "'Large sum for other city'" + } + } + ] + }, + "endResult": null, + "isDisabled": null, + "additionalFields": { + "description": null, + "layoutData": { + "x": 540, + "y": 540 + } + }, + "type": "Sink" + } + ] + } + ], + "id": "switch", + "additionalFields": { + "description": null, + "layoutData": { + "x": 360, + "y": 360 + } + }, + "type": "Switch" + } + ] + }, + { + "expression": { + "language": "spel", + "expression": "true" + }, + "nodes": [ + { + "id": "unknown", + "ref": { + "typ": "response", + "parameters": [ + { + "name": "acceptedAmount", + "expression": { + "language": "spel", + "expression": "0" + } + }, + { + "name": "message", + "expression": { + "language": "spel", + "expression": "'Unknown loan type'" + } + } + ] + }, + "endResult": null, + "isDisabled": null, + "additionalFields": { + "description": null, + "layoutData": { + "x": 720, + "y": 360 + } + }, + "type": "Sink" + } + ] + } + ], + "id": "loan type", + "additionalFields": { + "description": null, + "layoutData": { + "x": 360, + "y": 180 + } + }, + "type": "Switch" + } + ], + "additionalBranches": [ + ] +} diff --git a/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala b/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala new file mode 100644 index 00000000000..a8bb31e6dea --- /dev/null +++ b/e2e-tests/src/test/scala/pl/touk/nussknacker/LoanRequestSpec.scala @@ -0,0 +1,27 @@ +package pl.touk.nussknacker + +import org.scalatest.freespec.AnyFreeSpecLike +import org.scalatest.matchers.should.Matchers +import pl.touk.nussknacker.test.VeryPatientScalaFutures +import pl.touk.nussknacker.test.installationexample.HttpResponse + +class LoanRequestSpec extends AnyFreeSpecLike with BaseE2ESpec with Matchers with VeryPatientScalaFutures { + + "Properly handle loan request" in { + val result = client.sendHttpRequest( + serviceSlug = "loan", + payload = ujson.read { + """{ + | "customerId": "anon", + | "requestedAmount": 1555, + | "requestType": "mortgage", + | "location": { "city": "Warszawa", "street": "MarszaƂkowska" } + |}""".stripMargin + } + ) + result should be( + Right(HttpResponse(200, ujson.read("""{"acceptedAmount":1000,"message":"Large sum for Warszawa"}"""))) + ) + } + +} diff --git a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala index b1ba4825aa1..129b7d4a426 100644 --- a/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala +++ b/engine/lite/runtime/src/main/scala/pl/touk/nussknacker/engine/lite/ScenarioInterpreterFactory.scala @@ -296,7 +296,8 @@ object ScenarioInterpreterFactory { } ) - case other => throw new IllegalArgumentException(s"Not supported sink: $other") + case other => + throw new IllegalArgumentException(s"Not supported sink: $other") } private def compilePartInvokers(parts: List[SubsequentPart]): CompilationResult[Map[String, PartInterpreterType]] = diff --git a/examples/installation/docker-compose.yml b/examples/installation/docker-compose.yml index a8e977f117a..dc2c832f1da 100644 --- a/examples/installation/docker-compose.yml +++ b/examples/installation/docker-compose.yml @@ -7,6 +7,7 @@ services: restart: unless-stopped ports: - 8080:8080 + - 8181:8181 depends_on: grafana: condition: service_healthy diff --git a/examples/installation/nginx/nginx.conf b/examples/installation/nginx/nginx.conf index ac3b74ba535..bf82dec8d7c 100644 --- a/examples/installation/nginx/nginx.conf +++ b/examples/installation/nginx/nginx.conf @@ -19,6 +19,14 @@ http { proxy_pass http://designer:8080; } } + # exposing Request-Response Lite Embedded services defined in Nu + server { + listen 8181; + + location / { + proxy_pass http://designer:8181/; + } + } } events {} diff --git a/utils/test-utils/src/main/resources/bootstrap-setup.override.yml b/utils/test-utils/src/main/resources/bootstrap-setup.override.yml index c789679792f..8f48d344fc9 100644 --- a/utils/test-utils/src/main/resources/bootstrap-setup.override.yml +++ b/utils/test-utils/src/main/resources/bootstrap-setup.override.yml @@ -1,7 +1,7 @@ services: bootstrap-setup: - image: touk/nussknacker-example-scenarios-library:0.3.0 + image: touk/nussknacker-example-scenarios-library:0.4.1 depends_on: nginx: condition: service_healthy @@ -22,3 +22,7 @@ services: limits: memory: 256M cpus: '0.5' + + designer: + expose: + - 8181 diff --git a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala index 44f4770a30c..241ac05ff16 100644 --- a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala +++ b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala @@ -1,18 +1,24 @@ package pl.touk.nussknacker.test.installationexample +import cats.effect.IO +import cats.effect.kernel.Resource +import cats.effect.unsafe.implicits.global import com.dimafeng.testcontainers.{DockerComposeContainer, ServiceLogConsumer, WaitingForService} import com.typesafe.scalalogging.LazyLogging import org.slf4j.Logger -import org.testcontainers.DockerClientFactory import org.testcontainers.containers.output.Slf4jLogConsumer import org.testcontainers.containers.wait.strategy.DockerHealthcheckWaitStrategy +import pl.touk.nussknacker.test.MiscUtils._ +import pl.touk.nussknacker.test.WithTestHttpClientCreator import pl.touk.nussknacker.test.containers.ContainerExt.toContainerExt import pl.touk.nussknacker.test.installationexample.DockerBasedInstallationExampleNuEnvironment.{JSON, slf4jLogger} +import sttp.client3._ +import sttp.model.MediaType import ujson.Value -import pl.touk.nussknacker.test.MiscUtils._ import java.io.{File => JFile} import java.time.Duration +import scala.util.Try class DockerBasedInstallationExampleNuEnvironment( nussknackerImageVersion: String, @@ -42,7 +48,16 @@ class DockerBasedInstallationExampleNuEnvironment( start() - val client: DockerBasedInstallationExampleClient = new DockerBasedInstallationExampleClient(this) + private val (dockerBasedInstallationExampleClient, closeHandler) = + DockerBasedInstallationExampleClient.create(this).allocated.unsafeRunSync() + + val client: DockerBasedInstallationExampleClient = dockerBasedInstallationExampleClient + + override def stop(): Unit = { + closeHandler.unsafeRunSync() + super.stop() + } + } object DockerBasedInstallationExampleNuEnvironment extends LazyLogging { @@ -53,9 +68,22 @@ object DockerBasedInstallationExampleNuEnvironment extends LazyLogging { } -class DockerBasedInstallationExampleClient(env: DockerBasedInstallationExampleNuEnvironment) { +object DockerBasedInstallationExampleClient extends WithTestHttpClientCreator { + + def create(env: DockerBasedInstallationExampleNuEnvironment): Resource[IO, DockerBasedInstallationExampleClient] = { + createHttpClient(sslContext = None) + .map(new DockerBasedInstallationExampleClient(env, _)) + } + +} + +class DockerBasedInstallationExampleClient private ( + env: DockerBasedInstallationExampleNuEnvironment, + sttpBackend: SttpBackend[Identity, Any] +) { private val bootstrapSetupService = unsafeContainerByServiceName("bootstrap-setup") + private val nginxService = unsafeContainerByServiceName("nginx") def deployAndWaitForRunningState(scenarioName: String): Unit = { bootstrapSetupService.executeBash( @@ -88,8 +116,22 @@ class DockerBasedInstallationExampleClient(env: DockerBasedInstallationExampleNu bootstrapSetupService.executeBash(s"""/app/utils/kafka/purge-topic.sh "$topic" """) } + def sendHttpRequest(serviceSlug: String, payload: JSON): Either[Throwable, HttpResponse] = { + val response = sttp.client3.basicRequest + .post(uri"http://${nginxService.getHost}:8181/scenario/$serviceSlug") + .contentType(MediaType.ApplicationJson) + .body(payload.render()) + .response(asStringAlways) + .send(sttpBackend) + + Try(ujson.read(response.body)).toEither + .map(body => HttpResponse(response.code.code, ujson.read(body))) + } + private def unsafeContainerByServiceName(name: String) = env .getContainerByServiceName(name) .getOrElse(throw new IllegalStateException(s"'$name' service not available!")) } + +final case class HttpResponse(status: Int, body: JSON) From 69b16bd3aa4bc3aafb38443e2145421c7e4c0fc2 Mon Sep 17 00:00:00 2001 From: MK Software Date: Thu, 12 Dec 2024 13:01:27 +0100 Subject: [PATCH 04/73] wip --- .run/NussknackerApp.run.xml | 4 +- build.sbt | 3 +- .../main/resources/defaultDesignerConfig.conf | 9 +- .../nussknacker/ui/NussknackerConfig.scala | 23 +++- .../extrajs/ExtraScriptsListingPreparer.scala | 2 +- ...sConfigBasedProcessingTypeDataLoader.scala | 112 ++++++++---------- .../nussknacker/test/base/it/NuItTest.scala | 1 + docs/configuration/Common.md | 65 +++++----- .../EmbeddedDeploymentManagerProvider.scala | 4 +- .../universal/bin/nussknacker-entrypoint.sh | 3 +- nussknacker-dist/src/universal/bin/run.sh | 3 +- .../loader/ProcessConfigCreatorLoader.scala | 1 + .../nussknacker/engine/util/StringUtils.scala | 27 +++++ .../nussknacker/engine/util/UrlUtils.scala | 37 ++++++ .../ConfigWithUnresolvedVersionExt.scala | 8 ++ .../engine/util/loader/ModelClassLoader.scala | 50 +++----- .../util/loader/ScalaServiceLoader.scala | 2 +- 17 files changed, 214 insertions(+), 140 deletions(-) create mode 100644 utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala create mode 100644 utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala diff --git a/.run/NussknackerApp.run.xml b/.run/NussknackerApp.run.xml index 4f08f2afcf2..ea2e8f14d9b 100644 --- a/.run/NussknackerApp.run.xml +++ b/.run/NussknackerApp.run.xml @@ -1,6 +1,6 @@ - - + \ No newline at end of file diff --git a/build.sbt b/build.sbt index e67ddbd0e29..f066fb6c911 100644 --- a/build.sbt +++ b/build.sbt @@ -1503,7 +1503,8 @@ lazy val developmentTestsDeployManagerArtifacts = developmentTestsDeployManagerArtifacts := List( (liteEmbeddedDeploymentManager / assembly).value -> "managers/lite-embedded-manager.jar", - (developmentTestsDeploymentManager / assembly).value -> "managers/developmentTestsManager.jar" + (developmentTestsDeploymentManager / assembly).value -> "managers/developmentTestsManager.jar", + (liteK8sDeploymentManager / assembly).value -> "managers/lite-k8s-manager.jar" ) lazy val buildAndImportRuntimeImageToK3d = taskKey[Unit]("Import runtime image into k3d cluster") diff --git a/designer/server/src/main/resources/defaultDesignerConfig.conf b/designer/server/src/main/resources/defaultDesignerConfig.conf index 2cd256d61af..123c4ffa792 100644 --- a/designer/server/src/main/resources/defaultDesignerConfig.conf +++ b/designer/server/src/main/resources/defaultDesignerConfig.conf @@ -1,6 +1,9 @@ -#We use defaultUConfig.conf instead of reference.conf, as we don't want these properties in config loaded in model configuration -#This configuration file contains sensible designer defaults for all Nussknacker deployments, without assumptions about deployment models and external tools (grafana, flink etc.) -#All models configurations also shouldn't be in this file +# We use defaultUConfig.conf instead of reference.conf, as we don't want these properties in config loaded in model configuration +# This configuration file contains sensible designer defaults for all Nussknacker deployments, without assumptions about deployment +# models and external tools (grafana, flink etc.). All models configurations also shouldn't be in this file + +managersDir: ./managers +managersDir: ${?MANAGERS_DIR} storageDir: ./storage storageDir: ${?STORAGE_DIR} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala index a05b7b175b8..50b72ae62b7 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala @@ -2,10 +2,13 @@ package pl.touk.nussknacker.ui import cats.effect.IO import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap -import pl.touk.nussknacker.engine.{ConfigWithUnresolvedVersion, ProcessingTypeConfig} +import pl.touk.nussknacker.engine.util.StringUtils._ import pl.touk.nussknacker.engine.util.config.ConfigWithUnresolvedVersionExt._ +import pl.touk.nussknacker.engine.{ConfigWithUnresolvedVersion, ProcessingTypeConfig} import pl.touk.nussknacker.ui.config.DesignerConfigLoader +import java.nio.file.{Files, Path, Paths} + trait NussknackerConfig { def loadApplicationConfig(): IO[ConfigWithUnresolvedVersion] @@ -15,13 +18,29 @@ trait NussknackerConfig { .map { config => config .readMap("scenarioTypes") - .getOrElse { throw new RuntimeException("No scenario types configuration provided") } + .getOrElse { throw ConfigurationMalformedException("No scenario types configuration provided") } .mapValuesNow(ProcessingTypeConfig.read) } } + final def managersDir(): IO[Path] = { + loadApplicationConfig() + .map { config => + config.readSafeString("managersDir") match { + case Some(managersDirStr) => + val managersDir = Paths.get(managersDirStr.convertToURL().toURI) + if (Files.isDirectory(managersDir)) managersDir + else throw ConfigurationMalformedException(s"No '$managersDirStr' directory found") + case None => + throw ConfigurationMalformedException(s"No 'managersDir' configuration path found") + } + } + } + } +final case class ConfigurationMalformedException(msg: String) extends RuntimeException(msg) + class LoadableConfigBasedNussknackerConfig(loadConfig: IO[ConfigWithUnresolvedVersion]) extends NussknackerConfig { override def loadApplicationConfig(): IO[ConfigWithUnresolvedVersion] = loadConfig diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala index d5df18a1856..ce7a1a92e88 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/extrajs/ExtraScriptsListingPreparer.scala @@ -9,7 +9,7 @@ import scala.io.Source // The purpose of this listing is to be possible to dynamically (without changing application image) // add some java scripts to our main.html. Example usage: // -// docker run -it --network host -e CLASSPATH="/opt/nussknacker/lib/*:/opt/nussknacker/managers/*:/opt/nussknacker/extra-resources" +// docker run -it --network host -e CLASSPATH="/opt/nussknacker/lib/*:/opt/nussknacker/extra-resources" // -v ./extrajs:/opt/nussknacker/extra-resources/web/static/extra touk/nussknacker:latest // // After this, all *.js in the extrajs directory will be injected into main.html in the lexicographic order. Notice that if you want to locally diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 2f79368047f..7b07cd39e84 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -5,14 +5,14 @@ import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine._ import pl.touk.nussknacker.engine.api.process.ProcessingType import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap -import pl.touk.nussknacker.engine.util.loader.{ModelClassLoader, ScalaServiceLoader} +import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader import pl.touk.nussknacker.ui.NussknackerConfig import pl.touk.nussknacker.ui.process.processingtype._ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState +import pl.touk.nussknacker.engine.util.UrlUtils._ -import java.net.URL -import java.nio.file.Paths +import java.nio.file.Path import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConfig) @@ -23,70 +23,62 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf getModelDependencies: ProcessingType => ModelDependencies, getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies, ): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]] = { - config - .loadProcessingTypeConfigs() - .map { processingTypesConfig => - // This step with splitting DeploymentManagerProvider loading for all processing types - // and after that creating ProcessingTypeData is done because of the deduplication of deployments - // See DeploymentManagerProvider.engineSetupIdentity - val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => - val provider = createDeploymentManagerProvider(processingTypeConfig) - val nameInputData = EngineNameInputData( - provider.defaultEngineSetupName, - provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), - processingTypeConfig.engineSetupName + for { + managersDir <- config.managersDir() + processingTypesConfig <- config.loadProcessingTypeConfigs() + } yield { + // This step with splitting DeploymentManagerProvider loading for all processing types + // and after that creating ProcessingTypeData is done because of the deduplication of deployments + // See DeploymentManagerProvider.engineSetupIdentity + val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => + val provider = createDeploymentManagerProvider(managersDir, processingTypeConfig) + val nameInputData = EngineNameInputData( + provider.defaultEngineSetupName, + provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), + processingTypeConfig.engineSetupName + ) + (processingTypeConfig, provider, nameInputData) + } + val engineSetupNames = + ScenarioParametersDeterminer.determineEngineSetupNames(providerWithNameInputData.mapValuesNow(_._3)) + val processingTypesData = providerWithNameInputData + .map { case (processingType, (processingTypeConfig, deploymentManagerProvider, _)) => + logger.debug(s"Creating Processing Type: $processingType with config: $processingTypeConfig") + val modelDependencies = getModelDependencies(processingType) + val processingTypeData = ProcessingTypeData.createProcessingTypeData( + processingType, + ModelData(processingTypeConfig, modelDependencies), + deploymentManagerProvider, + getDeploymentManagerDependencies(processingType), + engineSetupNames(processingType), + processingTypeConfig.deploymentConfig, + processingTypeConfig.category, + modelDependencies.componentDefinitionExtractionMode ) - (processingTypeConfig, provider, nameInputData) + processingType -> processingTypeData } - val engineSetupNames = - ScenarioParametersDeterminer.determineEngineSetupNames(providerWithNameInputData.mapValuesNow(_._3)) - val processingTypesData = providerWithNameInputData - .map { case (processingType, (processingTypeConfig, deploymentManagerProvider, _)) => - logger.debug(s"Creating Processing Type: $processingType with config: $processingTypeConfig") - val modelDependencies = getModelDependencies(processingType) - val processingTypeData = ProcessingTypeData.createProcessingTypeData( - processingType, - ModelData(processingTypeConfig, modelDependencies), - deploymentManagerProvider, - getDeploymentManagerDependencies(processingType), - engineSetupNames(processingType), - processingTypeConfig.deploymentConfig, - processingTypeConfig.category, - modelDependencies.componentDefinitionExtractionMode - ) - processingType -> processingTypeData - } - // Here all processing types are loaded and we are ready to perform additional configuration validations - // to assert the loaded configuration is correct (fail-fast approach). - val combinedData = CombinedProcessingTypeData.create(processingTypesData) + // Here all processing types are loaded and we are ready to perform additional configuration validations + // to assert the loaded configuration is correct (fail-fast approach). + val combinedData = CombinedProcessingTypeData.create(processingTypesData) - ProcessingTypeDataState( - processingTypesData.mapValuesNow(toValueWithRestriction), - () => combinedData, - // We pass here new Object to enforce update of observers - new Object - ) - } + ProcessingTypeDataState( + processingTypesData.mapValuesNow(toValueWithRestriction), + () => combinedData, + // We pass here new Object to enforce update of observers + new Object + ) + } } - private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { + private def createDeploymentManagerProvider( + managersDir: Path, + typeConfig: ProcessingTypeConfig + ): DeploymentManagerProvider = { val managersClassLoader = - new URLClassLoader(Seq(Paths.get("designer/server/work/managers/").toUri.toURL), this.getClass.getClassLoader) - ScalaServiceLoader.loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType, managersClassLoader) + new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) + ScalaServiceLoader + .loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType, managersClassLoader) // todo: close } - // todo: -// private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { -// ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( -// typeConfig.deploymentManagerType, -// ModelClassLoader( -// "engine/lite/deploymentManager/target/scala-2.13/classes/" :: -// "engine/lite/embeddedDeploymentManager/target/scala-2.13/classes/" :: -// "engine/lite/runtime/target/scala-2.13/classes/" :: -// Nil, -// None) -// .classLoader -// ) -// } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala index 49ec79dc4b8..e79c9aad00a 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala @@ -43,6 +43,7 @@ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with Wit designerConfig .withValue("db", testDbConfig.getConfig("db").root()) .withValue("http.port", fromAnyRef(port)) + .withValue("managersDir", fromAnyRef("designer/server/work/managers")) } } diff --git a/docs/configuration/Common.md b/docs/configuration/Common.md index 8e60f327f4f..19058ac66f6 100644 --- a/docs/configuration/Common.md +++ b/docs/configuration/Common.md @@ -12,38 +12,39 @@ Because we use [HOCON](../#conventions), you can set (or override) any configura ## Basic environment variables -| Variable name | Type | Default value | Description | -|-------------------------------|---------|--------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| JDK_JAVA_OPTIONS | string | | Custom JVM options, e.g `-Xmx512M` | -| JAVA_DEBUG_PORT | int | | Port to Remote JVM Debugger. By default debugger is turned off. | -| CONFIG_FILE | string | $NUSSKNACKER_DIR/conf/application.conf | Location of application configuration. You can pass comma separated list of files, they will be merged in given order, using HOCON fallback mechanism | -| LOGBACK_FILE | string | $NUSSKNACKER_DIR/conf/docker-logback.xml | Location of logging configuration | -| WORKING_DIR | string | $NUSSKNACKER_DIR | Location of working directory | -| STORAGE_DIR | string | $WORKING_DIR/storage | Location of HSQLDB database storage | -| CLASSPATH | string | $NUSSKNACKER_DIR/lib/*:$NUSSKNACKER_DIR/managers/* | Classpath of the Designer, _lib_ directory contains related jar libraries (e.g. database driver), _managers_ directory contains deployment manager providers | -| LOGS_DIR | string | $WORKING_DIR/logs | Location of logs | -| HTTP_INTERFACE | string | 0.0.0.0 | Network address Nussknacker binds to | -| HTTP_PORT | string | 8080 | HTTP port used by Nussknacker | -| HTTP_PUBLIC_PATH | string | | Public HTTP path prefix the Designer UI is served at, e.g. using external proxy like [nginx](../../installation/Binaries/#configuring-the-designer-with-nginx-http-public-path) | -| DB_URL | string | jdbc:hsqldb:file:${STORAGE_DIR}/db;sql.syntax_ora=true | [See also](../configuration/DesignerConfiguration.md#database-configuration) for more information | -| DB_DRIVER | string | org.hsqldb.jdbc.JDBCDriver | Database driver class name | -| DB_USER | string | SA | User used for connection to database | -| DB_PASSWORD | string | | Password used for connection to database | -| DB_CONNECTION_TIMEOUT | int | 30000 | Connection to database timeout in milliseconds | -| AUTHENTICATION_METHOD | string | BasicAuth | Method of authentication. One of: BasicAuth, OAuth2 | -| AUTHENTICATION_USERS_FILE | string | $NUSSKNACKER_DIR/conf/users.conf | Location of users configuration file | -| AUTHENTICATION_HEADERS_ACCEPT | string | application/json | | -| AUTHENTICATION_REALM | string | nussknacker | [Realm](https://datatracker.ietf.org/doc/html/rfc2617#section-1.2) | -| FLINK_REST_URL | string | http://localhost:8081 | URL to Flink's REST API - used for scenario deployment | -| FLINK_ROCKSDB_ENABLE | boolean | true | Enable RocksDB state backend support | -| KAFKA_ADDRESS | string | localhost:9092 | Kafka address used by Kafka components (sources, sinks) | -| KAFKA_AUTO_OFFSET_RESET | string | | See [Kafka documentation](https://kafka.apache.org/documentation/#consumerconfigs_auto.offset.reset). For development purposes it may be convenient to set this value to 'earliest', when not set the default from Kafka ('latest' at the moment) is used | -| SCHEMA_REGISTRY_URL | string | http://localhost:8082 | Address of Confluent Schema registry used for storing data model | -| GRAFANA_URL | string | /grafana | URL to Grafana, used in UI. Should be relative to Nussknacker URL to avoid additional CORS configuration | -| INFLUXDB_URL | string | http://localhost:8086 | URL to InfluxDB used by counts mechanism | -| PROMETHEUS_METRICS_PORT | int | | When defined, JMX MBeans are exposed as Prometheus metrics on this port | -| PROMETHEUS_AGENT_CONFIG_FILE | int | $NUSSKNACKER_DIR/conf/jmx_prometheus.yaml | Default configuration for JMX Prometheus agent. Used only when agent is enabled. See `PROMETHEUS_METRICS_PORT` | -| TABLES_DEFINITION_FILE | string | $NUSSKNACKER_DIR/conf/dev-tables-definition.sql | Location of file containing definitions of tables for Flink Table API components in Flink Sql | +| Variable name | Type | Default value | Description | +|-------------------------------|----------|--------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| JDK_JAVA_OPTIONS | string | | Custom JVM options, e.g `-Xmx512M` | +| JAVA_DEBUG_PORT | int | | Port to Remote JVM Debugger. By default debugger is turned off. | +| CONFIG_FILE | string | $NUSSKNACKER_DIR/conf/application.conf | Location of application configuration. You can pass comma separated list of files, they will be merged in given order, using HOCON fallback mechanism | +| LOGBACK_FILE | string | $NUSSKNACKER_DIR/conf/docker-logback.xml | Location of logging configuration | +| WORKING_DIR | string | $NUSSKNACKER_DIR | Location of working directory | +| STORAGE_DIR | string | $WORKING_DIR/storage | Location of HSQLDB database storage | +| MANAGERS_DIR | string | $WORKING_DIR/managers | Location of deployment managers jars | +| CLASSPATH | string | $NUSSKNACKER_DIR/lib/* | Classpath of the Designer, _lib_ directory contains related jar libraries (e.g. database driver) | +| LOGS_DIR | string | $WORKING_DIR/logs | Location of logs | +| HTTP_INTERFACE | string | 0.0.0.0 | Network address Nussknacker binds to | +| HTTP_PORT | string | 8080 | HTTP port used by Nussknacker | +| HTTP_PUBLIC_PATH | string | | Public HTTP path prefix the Designer UI is served at, e.g. using external proxy like [nginx](../../installation/Binaries/#configuring-the-designer-with-nginx-http-public-path) | +| DB_URL | string | jdbc:hsqldb:file:${STORAGE_DIR}/db;sql.syntax_ora=true | [See also](../configuration/DesignerConfiguration.md#database-configuration) for more information | +| DB_DRIVER | string | org.hsqldb.jdbc.JDBCDriver | Database driver class name | +| DB_USER | string | SA | User used for connection to database | +| DB_PASSWORD | string | | Password used for connection to database | +| DB_CONNECTION_TIMEOUT | int | 30000 | Connection to database timeout in milliseconds | +| AUTHENTICATION_METHOD | string | BasicAuth | Method of authentication. One of: BasicAuth, OAuth2 | +| AUTHENTICATION_USERS_FILE | string | $NUSSKNACKER_DIR/conf/users.conf | Location of users configuration file | +| AUTHENTICATION_HEADERS_ACCEPT | string | application/json | | +| AUTHENTICATION_REALM | string | nussknacker | [Realm](https://datatracker.ietf.org/doc/html/rfc2617#section-1.2) | +| FLINK_REST_URL | string | http://localhost:8081 | URL to Flink's REST API - used for scenario deployment | +| FLINK_ROCKSDB_ENABLE | boolean | true | Enable RocksDB state backend support | +| KAFKA_ADDRESS | string | localhost:9092 | Kafka address used by Kafka components (sources, sinks) | +| KAFKA_AUTO_OFFSET_RESET | string | | See [Kafka documentation](https://kafka.apache.org/documentation/#consumerconfigs_auto.offset.reset). For development purposes it may be convenient to set this value to 'earliest', when not set the default from Kafka ('latest' at the moment) is used | +| SCHEMA_REGISTRY_URL | string | http://localhost:8082 | Address of Confluent Schema registry used for storing data model | +| GRAFANA_URL | string | /grafana | URL to Grafana, used in UI. Should be relative to Nussknacker URL to avoid additional CORS configuration | +| INFLUXDB_URL | string | http://localhost:8086 | URL to InfluxDB used by counts mechanism | +| PROMETHEUS_METRICS_PORT | int | | When defined, JMX MBeans are exposed as Prometheus metrics on this port | +| PROMETHEUS_AGENT_CONFIG_FILE | int | $NUSSKNACKER_DIR/conf/jmx_prometheus.yaml | Default configuration for JMX Prometheus agent. Used only when agent is enabled. See `PROMETHEUS_METRICS_PORT` | +| TABLES_DEFINITION_FILE | string | $NUSSKNACKER_DIR/conf/dev-tables-definition.sql | Location of file containing definitions of tables for Flink Table API components in Flink Sql | ## OAuth2 environment variables diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala index 57fb4297ee3..f7020d463cb 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManagerProvider.scala @@ -17,6 +17,8 @@ import scala.concurrent.duration.FiniteDuration class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider { + override val name: String = "lite-embedded" + override def createDeploymentManager( modelData: BaseModelData, dependencies: DeploymentManagerDependencies, @@ -44,6 +46,4 @@ class EmbeddedDeploymentManagerProvider extends LiteDeploymentManagerProvider { List(EmbeddedRequestResponseScenarioValidator) ) - override def name: String = "lite-embedded" - } diff --git a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh index 4482a70497a..fc916a2180c 100755 --- a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh +++ b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh @@ -11,9 +11,8 @@ fi NUSSKNACKER_DIR=`dirname "$0" | xargs -I{} readlink -f {}/..` CONF_DIR="$NUSSKNACKER_DIR/conf" LIB_DIR="$NUSSKNACKER_DIR/lib" -MANAGERS_DIR="$NUSSKNACKER_DIR/managers" -CLASSPATH=${CLASSPATH:-$LIB_DIR/*:$MANAGERS_DIR/*} +CLASSPATH=${CLASSPATH:-$LIB_DIR/*} CONFIG_FILE=${CONFIG_FILE-"$CONF_DIR/application.conf"} LOGBACK_FILE=${LOGBACK_FILE-"$CONF_DIR/docker-logback.xml"} diff --git a/nussknacker-dist/src/universal/bin/run.sh b/nussknacker-dist/src/universal/bin/run.sh index 8d6118ef81c..58c7092fee4 100755 --- a/nussknacker-dist/src/universal/bin/run.sh +++ b/nussknacker-dist/src/universal/bin/run.sh @@ -8,9 +8,8 @@ DEFAULT_NUSSKNACKER_DIR=`dirname "$0" | xargs -I{} readlink -f {}/..` NUSSKNACKER_DIR=${NUSSKNACKER_DIR:-$DEFAULT_NUSSKNACKER_DIR} CONF_DIR=${CONF_DIR:-"$NUSSKNACKER_DIR/conf"} LIB_DIR=${LIB_DIR:-"$NUSSKNACKER_DIR/lib"} -MANAGERS_DIR="$NUSSKNACKER_DIR/managers" -CLASSPATH=${CLASSPATH:-$LIB_DIR/*:$MANAGERS_DIR/*} +CLASSPATH=${CLASSPATH:-$LIB_DIR/*} CONFIG_FILE=${CONFIG_FILE:-$CONF_DIR/application.conf} LOGBACK_FILE=${LOGBACK_FILE:-$CONF_DIR/logback.xml} diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala index e626ec79083..9930a28206e 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala @@ -13,6 +13,7 @@ class ProcessConfigCreatorLoader(shouldIncludeConfigCreator: ProcessConfigCreato override val prettyClassName: String = "ProcessConfigCreator" override def loadAll(classLoader: ClassLoader): List[SPCC] = { + // todo: ScalaServiceLoader.load[SPCC](classLoader).filter(shouldIncludeConfigCreator) ++ ScalaServiceLoader .load[JPCC](classLoader) diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala new file mode 100644 index 00000000000..6036710ca82 --- /dev/null +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/StringUtils.scala @@ -0,0 +1,27 @@ +package pl.touk.nussknacker.engine.util + +import java.net.{URI, URL} +import java.nio.file.Path + +object StringUtils { + + implicit class ToUrl(val value: String) extends AnyVal { + + def convertToURL(workingDirectoryOpt: Option[Path] = None): URL = { + val uri = new URI(value) + if (uri.isAbsolute) { + uri.toURL + } else { + val pathPart = uri.getSchemeSpecificPart + val path = workingDirectoryOpt.map { workingDirectory => + workingDirectory.resolve(pathPart) + } getOrElse { + Path.of(pathPart) + } + path.toUri.toURL + } + } + + } + +} diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala new file mode 100644 index 00000000000..6029b834aa5 --- /dev/null +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/UrlUtils.scala @@ -0,0 +1,37 @@ +package pl.touk.nussknacker.engine.util + +import java.io.File +import java.net.URL + +object UrlUtils { + + implicit class ExpandFiles(val url: URL) extends AnyVal { + + def expandFiles(extension: String): List[URL] = { + url match { + case u if u.getProtocol.toLowerCase == "file" => + val file = new File(u.toURI) + if (file.isDirectory) { + val expanded = file + .listFiles() + .toList + .filterNot(_.getName.startsWith(".")) + .map(_.toURI.toURL) + .flatMap(_.expandFiles(extension)) + + expanded match { + case Nil => List.empty + case nonEmpty if nonEmpty.exists(_.getFile.endsWith(extension)) => expanded + case _ => u :: Nil + } + } else { + u :: Nil + } + case u => + u :: Nil + } + } + + } + +} diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala index bbda0e21359..200e998633d 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala @@ -24,6 +24,14 @@ class ConfigWithUnresolvedVersionExt(val config: ConfigWithUnresolvedVersion) { } } + def readSafeString(path: String): Option[String] = { + if (config.resolved.hasPath(path)) { + Some(config.resolved.getString(path)) + } else { + None + } + } + } object ConfigWithUnresolvedVersionExt { diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index d3d00b9e4cc..801406ab15e 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -1,9 +1,11 @@ package pl.touk.nussknacker.engine.util.loader import com.typesafe.scalalogging.LazyLogging +import pl.touk.nussknacker.engine.util.StringUtils._ +import pl.touk.nussknacker.engine.util.UrlUtils._ import java.io.File -import java.net.{URI, URL, URLClassLoader} +import java.net.{URL, URLClassLoader} import java.nio.file.Path case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) { @@ -29,46 +31,30 @@ object ModelClassLoader extends LazyLogging { workingDirectoryOpt: Option[Path], jarExtension: String = defaultJarExtension ): ModelClassLoader = { - val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension) + val postProcessedURLs = validateExistence( + urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension)) + ) ModelClassLoader( new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), postProcessedURLs.toList ) } - private def expandFiles(urls: Iterable[URL], jarExtension: String): Iterable[URL] = { - urls.flatMap { - case url if url.getProtocol.toLowerCase == "file" => - val file = new File(url.toURI) - if (file.isDirectory) { - val expanded = - expandFiles(file.listFiles().filterNot(_.getName.startsWith(".")).map(_.toURI.toURL), jarExtension) - if (expanded.isEmpty) { - List.empty - } else if (expanded.exists(_.getFile.endsWith(jarExtension))) { // not expand if nested jars not exists - expanded - } else { - List(url) - } - } else { - List(url) - } - case url => List(url) + private def validateExistence(urls: Iterable[URL]): Iterable[URL] = { + urls.filterNot(url => doesExist(url)).toList match { + case Nil => urls + case notExisted => + throw new IllegalArgumentException(s"The following URLs don't exist: [${notExisted.mkString(",")}]") } } - private def convertToURL(urlString: String, workingDirectoryOpt: Option[Path]): URL = { - val uri = new URI(urlString) - if (uri.isAbsolute) { - uri.toURL - } else { - val pathPart = uri.getSchemeSpecificPart - val path = workingDirectoryOpt.map { workingDirectory => - workingDirectory.resolve(pathPart) - } getOrElse { - Path.of(pathPart) - } - path.toUri.toURL + private def doesExist(url: URL): Boolean = { + url.getProtocol match { + case "file" => + val file = new File(url.toURI) + file.exists() && file.isFile + case _ => + false } } diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala index 78ae2452575..9712eb5861b 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ScalaServiceLoader.scala @@ -12,7 +12,7 @@ import scala.reflect.{ClassTag, classTag} object ScalaServiceLoader extends LazyLogging { import scala.jdk.CollectionConverters._ - def loadClass[T](classLoader: ClassLoader)(createDefault: => T)(implicit classTag: ClassTag[T]): T = + def loadClass[T: ClassTag](classLoader: ClassLoader)(createDefault: => T): T = chooseClass[T](createDefault, load[T](classLoader)) def chooseClass[T](createDefault: => T, loaded: List[T]): T = { From 76aa44570067fbc3344c8afb48f3dc9e65adda50 Mon Sep 17 00:00:00 2001 From: MK Software Date: Fri, 13 Dec 2024 12:02:40 +0100 Subject: [PATCH 05/73] wip --- build.sbt | 58 +- ...sConfigBasedProcessingTypeDataLoader.scala | 17 +- .../test/base/it/NuResourcesTest.scala | 2 +- .../test/mock/MockDeploymentManager.scala | 3 +- .../test/utils/domain/ProcessTestData.scala | 23 +- .../ui/api/ManagementResourcesSpec.scala | 997 +++++++++--------- .../ui/integration/ConfigurationTest.scala | 3 +- ...amingDeploymentManagerProviderHelper.scala | 3 +- .../FlinkStreamingDeploymentManagerSpec.scala | 3 +- .../touk/nussknacker/engine/ModelData.scala | 9 +- ...asedNuInstallationExampleEnvironment.scala | 2 +- .../engine/util/loader/ModelClassLoader.scala | 69 +- .../util/loader/ModelClassLoaderSpec.scala | 3 +- 13 files changed, 622 insertions(+), 570 deletions(-) diff --git a/build.sbt b/build.sbt index f066fb6c911..c20b2cd0bef 100644 --- a/build.sbt +++ b/build.sbt @@ -118,7 +118,28 @@ def designerMergeStrategy: String => MergeStrategy = { // https://tapir.softwaremill.com/en/latest/docs/openapi.html#using-swaggerui-with-sbt-assembly case PathList("META-INF", "maven", "org.webjars", "swagger-ui", "pom.properties") => MergeStrategy.singleOrError - case x => defaultMergeStrategy(x) + case PathList( + "META-INF", + "native-image", + "io.netty", + "netty-codec", + "generated", + "handlers", + "reflect-config.json" + ) => + MergeStrategy.concat + case PathList( + "META-INF", + "native-image", + "io.netty", + "netty-handler", + "generated", + "handlers", + "reflect-config.json" + ) => + MergeStrategy.concat + case x => + defaultMergeStrategy(x) } val scalaTestReports = Tests.Argument(TestFrameworks.ScalaTest, "-u", "target/surefire-reports", "-oFGD") @@ -612,12 +633,8 @@ lazy val flinkDeploymentManager = (project in flink("management")) libraryDependencies ++= { Seq( "org.typelevel" %% "cats-core" % catsV % Provided, - "org.apache.flink" % "flink-streaming-java" % flinkV % flinkScope - excludeAll ( - ExclusionRule("log4j", "log4j"), - ExclusionRule("org.slf4j", "slf4j-log4j12"), - ExclusionRule("com.esotericsoftware", "kryo-shaded"), - ), + "org.apache.flink" % "flink-streaming-java" % flinkV % Provided, + "org.apache.flink" % "flink-core" % flinkV % Provided, "org.apache.flink" % "flink-statebackend-rocksdb" % flinkV % flinkScope, "com.softwaremill.retry" %% "retry" % retryV, "org.wiremock" % "wiremock" % wireMockV % Test, @@ -645,6 +662,7 @@ lazy val flinkPeriodicDeploymentManager = (project in flink("management/periodic name := "nussknacker-flink-periodic-manager", libraryDependencies ++= { Seq( + "org.apache.flink" % "flink-core" % flinkV % Provided, "org.typelevel" %% "cats-core" % catsV % Provided, "com.typesafe.slick" %% "slick" % slickV % Provided, "com.typesafe.slick" %% "slick-hikaricp" % slickV % "provided, test", @@ -1502,9 +1520,10 @@ lazy val developmentTestsDeployManagerArtifacts = taskKey[List[(File, String)]]("development tests deployment manager artifacts") developmentTestsDeployManagerArtifacts := List( - (liteEmbeddedDeploymentManager / assembly).value -> "managers/lite-embedded-manager.jar", (developmentTestsDeploymentManager / assembly).value -> "managers/developmentTestsManager.jar", - (liteK8sDeploymentManager / assembly).value -> "managers/lite-k8s-manager.jar" + (flinkDeploymentManager / assembly).value -> "managers/nussknacker-flink-manager.jar", + (liteEmbeddedDeploymentManager / assembly).value -> "managers/lite-embedded-manager.jar", + (liteK8sDeploymentManager / assembly).value -> "managers/lite-k8s-manager.jar", ) lazy val buildAndImportRuntimeImageToK3d = taskKey[Unit]("Import runtime image into k3d cluster") @@ -1970,9 +1989,6 @@ lazy val designer = (project in file("designer/server")) .value, Test / test := (Test / test) .dependsOn( -// flinkDeploymentManager / Compile / assembly, -// liteK8sDeploymentManager / Compile / assembly, -// liteEmbeddedDeploymentManager / Compile / assembly, defaultModel / Compile / assembly, flinkTableApiComponents / Compile / assembly, flinkDevModel / Compile / assembly, @@ -1980,13 +1996,7 @@ lazy val designer = (project in file("designer/server")) flinkExecutor / prepareItLibs ) .value, - (Test / managedClasspath) += baseDirectory.value / "engine" / "lite" / "embeddedDeploymentManager" / "target" / "scala-2.13", -// unmanagedResourceDirectories in Test <+= baseDirectory ( _ /"engine/lite/embeddedDeploymentManager/target/scala-2.13" ), -// Test / testOptions += Tests.Setup(() => { -// val classpath = (Test / unmanagedClasspath).value -// println(s"Test classpath: $classpath") -// }), -// Test / unmanagedClasspath += baseDirectory.value / "engine" / "lite" / "embeddedDeploymentManager" / "target" / "scala-2.13" / "classes", + // todo: /* We depend on copyClientDist in packageBin and assembly to be make sure FE files will be included in jar and fajar We abuse sbt a little bit, but we don't want to put webpack in generate resources phase, as it's long and it would @@ -2050,6 +2060,7 @@ lazy val designer = (project in file("designer/server")) "io.circe" %% "circe-yaml" % circeYamlV % Test, "com.github.scopt" %% "scopt" % "4.1.0" % Test, "org.questdb" % "questdb" % "7.4.2", + "org.apache.flink" % "flink-streaming-java" % flinkV exclude ("com.esotericsoftware", "kryo-shaded"), ) ++ forScalaVersion(scalaVersion.value) { case (2, 13) => Seq( @@ -2065,6 +2076,10 @@ lazy val designer = (project in file("designer/server")) processReports, security, deploymentManagerApi, + componentsApi, + requestResponseComponentsApi, + liteComponentsApi, + flinkComponentsApi, restmodel, listenerApi, defaultHelpers % Test, @@ -2073,7 +2088,7 @@ lazy val designer = (project in file("designer/server")) developmentTestsDeploymentManager % Test, componentsApi % "test->test", // All DeploymentManager dependencies are added because they are needed to run NussknackerApp* with - // dev-application.conf. Currently, we doesn't have a separate classpath for DMs like we have for components. + // dev-application.conf. Currently, we don't have a separate classpath for DMs like we have for components. // schemedKafkaComponentsUtils is added because loading the provided liteEmbeddedDeploymentManager causes // that are also load added their test dependencies on the classpath by the Idea. It causes that // UniversalKafkaSourceFactory is loaded from app classloader and GenericRecord which is defined in typesToExtract @@ -2083,8 +2098,7 @@ lazy val designer = (project in file("designer/server")) // liteK8sDeploymentManager % Provided, // developmentTestsDeploymentManager % Provided, // flinkPeriodicDeploymentManager % Provided, - requestResponseRuntime % Test, - schemedKafkaComponentsUtils % Provided, +// schemedKafkaComponentsUtils % Provided, ) lazy val e2eTests = (project in file("e2e-tests")) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 7b07cd39e84..392ab486106 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -12,7 +12,6 @@ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLo import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState import pl.touk.nussknacker.engine.util.UrlUtils._ -import java.nio.file.Path import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConfig) @@ -27,11 +26,13 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf managersDir <- config.managersDir() processingTypesConfig <- config.loadProcessingTypeConfigs() } yield { + val managersClassLoader = + new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) // This step with splitting DeploymentManagerProvider loading for all processing types // and after that creating ProcessingTypeData is done because of the deduplication of deployments // See DeploymentManagerProvider.engineSetupIdentity val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => - val provider = createDeploymentManagerProvider(managersDir, processingTypeConfig) + val provider = createDeploymentManagerProvider(managersClassLoader, processingTypeConfig) val nameInputData = EngineNameInputData( provider.defaultEngineSetupName, provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), @@ -47,7 +48,7 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf val modelDependencies = getModelDependencies(processingType) val processingTypeData = ProcessingTypeData.createProcessingTypeData( processingType, - ModelData(processingTypeConfig, modelDependencies), + ModelData(processingTypeConfig, modelDependencies, managersDir), deploymentManagerProvider, getDeploymentManagerDependencies(processingType), engineSetupNames(processingType), @@ -72,13 +73,13 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf } private def createDeploymentManagerProvider( - managersDir: Path, + classLoader: ClassLoader, typeConfig: ProcessingTypeConfig ): DeploymentManagerProvider = { - val managersClassLoader = - new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) - ScalaServiceLoader - .loadNamed[DeploymentManagerProvider](typeConfig.deploymentManagerType, managersClassLoader) // todo: close + ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( + typeConfig.deploymentManagerType, + classLoader + ) // todo: close } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala index cde22f36654..f2f49baf4ee 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala @@ -128,7 +128,7 @@ trait NuResourcesTest protected val deploymentManagerProvider: DeploymentManagerProvider = new MockManagerProvider(deploymentManager) - private val modelData = ModelData(processingTypeConfig, modelDependencies) + private val modelData = ModelData(processingTypeConfig, modelDependencies, ???) protected val testProcessingTypeDataProvider: ProcessingTypeDataProvider[ProcessingTypeData, _] = mapProcessingTypeDataProvider( diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala index dcd25483e6d..1e5188832e3 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala @@ -47,7 +47,8 @@ class MockDeploymentManager( ) extends FlinkDeploymentManager( ModelData( ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig), - TestFactory.modelDependencies + TestFactory.modelDependencies, + ??? ), DeploymentManagerDependencies( deployedScenariosProvider, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala index 47f2486fa4a..f2bbcb5c3f4 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala @@ -23,7 +23,7 @@ import pl.touk.nussknacker.engine.graph.node.FragmentInputDefinition.{FragmentCl import pl.touk.nussknacker.engine.graph.node._ import pl.touk.nussknacker.engine.graph.sink.SinkRef import pl.touk.nussknacker.engine.graph.source.SourceRef -import pl.touk.nussknacker.engine.kafka.KafkaFactory +//import pl.touk.nussknacker.engine.kafka.KafkaFactory import pl.touk.nussknacker.engine.testing.ModelDefinitionBuilder import pl.touk.nussknacker.restmodel.scenariodetails.{ScenarioParameters, ScenarioWithDetailsForMigrations} import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.TestProcessingType.Streaming @@ -32,6 +32,7 @@ import pl.touk.nussknacker.test.mock.{ StubModelDataWithModelDefinition, TestAdditionalUIConfigProvider } + import pl.touk.nussknacker.ui.definition.ScenarioPropertiesConfigFinalizer import pl.touk.nussknacker.ui.definition.editor.JavaSampleEnum import pl.touk.nussknacker.ui.process.ProcessService.UpdateScenarioCommand @@ -42,7 +43,7 @@ import pl.touk.nussknacker.ui.validation.{ScenarioLabelsValidator, UIProcessVali object ProcessTestData { - import KafkaFactory._ +// import KafkaFactory._ import pl.touk.nussknacker.engine.spel.SpelExtension._ val existingSourceFactory = "barSource" @@ -81,8 +82,10 @@ object ProcessTestData { .withSink(existingSinkFactory) .withSink( existingSinkFactoryKafkaString, - Parameter[String](TopicParamName), - Parameter[Any](SinkValueParamName).copy(isLazyParameter = true) + ???, + ???, // todo: +// Parameter[String](TopicParamName), +// Parameter[Any](SinkValueParamName).copy(isLazyParameter = true) ) .withService(existingServiceId) .withService(otherExistingServiceId) @@ -207,8 +210,10 @@ object ProcessTestData { .emptySink( "end" + idSuffix, "kafka-string", - TopicParamName.value -> "'end.topic'".spel, - SinkValueParamName.value -> "#output".spel + ???, + ???, // todo: +// TopicParamName.value -> "'end.topic'".spel, +// SinkValueParamName.value -> "#output".spel ) } ScenarioBuilder @@ -476,8 +481,10 @@ object ProcessTestData { .emptySink( "end" + idSuffix, "kafka-string", - TopicParamName.value -> spelTemplate("end.topic"), - SinkValueParamName.value -> spelTemplate("#output") + ???, + ???, // todo: +// TopicParamName.value -> spelTemplate("end.topic"), +// SinkValueParamName.value -> spelTemplate("#output") ) } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala index 988b54345e8..c2c21afc9aa 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala @@ -1,498 +1,499 @@ -package pl.touk.nussknacker.ui.api - -import akka.http.scaladsl.model.{ContentTypeRange, StatusCodes} -import akka.http.scaladsl.server -import akka.http.scaladsl.testkit.ScalatestRouteTest -import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} -import cats.instances.all._ -import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport -import io.circe.Json -import org.scalatest.funsuite.AnyFunSuite -import org.scalatest.matchers.BeMatcher -import org.scalatest.matchers.should.Matchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, OptionValues} -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus -import pl.touk.nussknacker.engine.api.deployment.{ProcessAction, ScenarioActionName, ScenarioActivity} -import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId} -import pl.touk.nussknacker.engine.api.{MetaData, StreamMetaData} -import pl.touk.nussknacker.engine.build.ScenarioBuilder -import pl.touk.nussknacker.engine.kafka.KafkaFactory -import pl.touk.nussknacker.engine.spel.SpelExtension._ -import pl.touk.nussknacker.restmodel.scenariodetails._ -import pl.touk.nussknacker.restmodel.{CustomActionRequest, CustomActionResponse} -import pl.touk.nussknacker.security.Permission -import pl.touk.nussknacker.test.PatientScalaFutures -import pl.touk.nussknacker.test.base.it.NuResourcesTest -import pl.touk.nussknacker.test.mock.MockDeploymentManager -import pl.touk.nussknacker.test.utils.domain.TestFactory.{withAllPermissions, withPermissions} -import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory} -import pl.touk.nussknacker.ui.api.description.scenarioActivity.Dtos -import pl.touk.nussknacker.ui.process.ScenarioQuery -import pl.touk.nussknacker.ui.process.exception.ProcessIllegalAction - -// TODO: all these tests should be migrated to ManagementApiHttpServiceBusinessSpec or ManagementApiHttpServiceSecuritySpec -class ManagementResourcesSpec - extends AnyFunSuite - with ScalatestRouteTest - with FailFastCirceSupport - with Matchers - with PatientScalaFutures - with OptionValues - with BeforeAndAfterEach - with BeforeAndAfterAll - with NuResourcesTest { - - import KafkaFactory._ - - private implicit final val string: FromEntityUnmarshaller[String] = - Unmarshaller.stringUnmarshaller.forContentTypes(ContentTypeRange.*) - - private val processName: ProcessName = ProcessTestData.sampleScenario.name - - private def deployedWithVersions(versionId: Long): BeMatcher[Option[ProcessAction]] = { - BeMatcher[(ScenarioActionName, VersionId)](equal((ScenarioActionName.Deploy, VersionId(versionId)))) - .compose[ProcessAction](a => (a.actionName, a.processVersionId)) - .compose[Option[ProcessAction]](opt => opt.value) - } - - test("process deployment should be visible in process history") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deployProcess(processName) ~> checkThatEventually { - status shouldBe StatusCodes.OK - getProcess(processName) ~> check { - decodeDetails.lastStateAction shouldBe deployedWithVersions(2) - updateCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deployProcess(processName) ~> checkThatEventually { - getProcess(processName) ~> check { - decodeDetails.lastStateAction shouldBe deployedWithVersions(2) - } - } - } - } - } - - test("process during deploy cannot be deployed again") { - createDeployedExampleScenario(processName) - - deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.DuringDeploy) { - deployProcess(processName) ~> check { - status shouldBe StatusCodes.Conflict - } - } - } - - test("canceled process can't be canceled again") { - createDeployedCanceledExampleScenario(processName) - - deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { - cancelProcess(processName) ~> check { - status shouldBe StatusCodes.Conflict - } - } - } - - test("can't deploy archived process") { - createArchivedProcess(processName) - - deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { - deployProcess(processName) ~> check { - status shouldBe StatusCodes.Conflict - responseAs[String] shouldBe ProcessIllegalAction - .archived(ScenarioActionName.Deploy, processName) - .message - } - } - } - - test("can't deploy fragment") { - createValidProcess(processName, isFragment = true) - - deployProcess(processName) ~> check { - status shouldBe StatusCodes.Conflict - responseAs[String] shouldBe ProcessIllegalAction - .fragment(ScenarioActionName.Deploy, processName) - .message - } - } - - test("can't cancel fragment") { - createValidProcess(processName, isFragment = true) - - deployProcess(processName) ~> check { - status shouldBe StatusCodes.Conflict - responseAs[String] shouldBe ProcessIllegalAction - .fragment(ScenarioActionName.Deploy, processName) - .message - } - } - - test("deploys and cancels with comment") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deployProcess( - ProcessTestData.sampleScenario.name, - comment = Some("deployComment") - ) ~> checkThatEventually { - getProcess(processName) ~> check { - val processDetails = responseAs[ScenarioWithDetails] - processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true - } - cancelProcess( - ProcessTestData.sampleScenario.name, - comment = Some("cancelComment") - ) ~> check { - status shouldBe StatusCodes.OK - // TODO: remove Deployment:, Stop: after adding custom icons - val expectedDeployComment = "deployComment" - val expectedStopComment = "cancelComment" - val expectedDeployCommentInLegacyService = s"Deployment: $expectedDeployComment" - val expectedStopCommentInLegacyService = s"Stop: $expectedStopComment" - getActivity(ProcessTestData.sampleScenario.name) ~> check { - val comments = responseAs[Dtos.Legacy.ProcessActivity].comments.sortBy(_.id) - comments.map(_.content) shouldBe List( - expectedDeployCommentInLegacyService, - expectedStopCommentInLegacyService - ) - val firstCommentId :: secondCommentId :: Nil = comments.map(_.id) - - Get(s"/processes/${ProcessTestData.sampleScenario.name}/deployments") ~> withAllPermissions( - processesRoute - ) ~> check { - val deploymentHistory = responseAs[List[ProcessAction]] - deploymentHistory.map(a => - (a.processVersionId, a.user, a.actionName, a.commentId, a.comment, a.buildInfo) - ) shouldBe List( - ( - VersionId(2), - TestFactory.user().username, - ScenarioActionName.Cancel, - Some(secondCommentId), - Some(expectedStopComment), - Map() - ), - ( - VersionId(2), - TestFactory.user().username, - ScenarioActionName.Deploy, - Some(firstCommentId), - Some(expectedDeployComment), - TestFactory.buildInfo - ) - ) - } - } - } - } - } - - test("deploy technical process and mark it as deployed") { - createValidProcess(processName) - - deployProcess(processName) ~> checkThatEventually { - status shouldBe StatusCodes.OK - getProcess(processName) ~> check { - val processDetails = responseAs[ScenarioWithDetails] - processDetails.lastStateAction shouldBe deployedWithVersions(1) - processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true - } - } - } - - test("recognize process cancel in deployment list") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { - status shouldBe StatusCodes.OK - getProcess(processName) ~> check { - decodeDetails.lastStateAction shouldBe deployedWithVersions(2) - cancelProcess(ProcessTestData.sampleScenario.name) ~> check { - getProcess(processName) ~> check { - decodeDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Cancel) shouldBe true - } - } - } - } - } - - test("recognize process deploy and cancel in global process list") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { - status shouldBe StatusCodes.OK - - forScenariosReturned(ScenarioQuery.empty) { processes => - val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head - process.lastActionVersionId shouldBe Some(2L) - process.isDeployed shouldBe true - - cancelProcess(ProcessTestData.sampleScenario.name) ~> check { - forScenariosReturned(ScenarioQuery.empty) { processes => - val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head - process.lastActionVersionId shouldBe Some(2L) - process.isCanceled shouldBe true - } - } - } - } - } - - test("not authorize user with write permission to deploy") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - Post(s"/processManagement/deploy/${ProcessTestData.sampleScenario.name}") ~> withPermissions( - deployRoute(), - Permission.Write - ) ~> check { - rejection shouldBe server.AuthorizationFailedRejection - } - } - - test("should allow deployment of scenario with warning") { - val processWithDisabledFilter = ScenarioBuilder - .streaming(processName.value) - .parallelism(1) - .source("startProcess", "csv-source") - .filter("input", "#input != null".spel, Some(true)) - .emptySink( - "end", - "kafka-string", - TopicParamName.value -> "'end.topic'".spel, - SinkValueParamName.value -> "#input".spel - ) - - saveCanonicalProcessAndAssertSuccess(processWithDisabledFilter) - deployProcess(processName) ~> check { - status shouldBe StatusCodes.OK - } - } - - test("should return failure for not validating scenario") { - val invalidScenario = ScenarioBuilder - .streaming(processName.value) - .parallelism(1) - .source("start", "not existing") - .emptySink( - "end", - "kafka-string", - TopicParamName.value -> "'end.topic'".spel, - SinkValueParamName.value -> "#output".spel - ) - saveCanonicalProcessAndAssertSuccess(invalidScenario) - - deploymentManager.withEmptyProcessState(invalidScenario.name) { - deployProcess(invalidScenario.name) ~> check { - responseAs[String] shouldBe "Cannot deploy invalid scenario" - status shouldBe StatusCodes.Conflict - } - getProcess(invalidScenario.name) ~> check { - decodeDetails.state.value.status shouldEqual SimpleStateStatus.NotDeployed - } - } - } - - test("should return failure for not validating deployment") { - val largeParallelismScenario = ProcessTestData.sampleScenario.copy(metaData = - MetaData( - ProcessTestData.sampleScenario.name.value, - StreamMetaData(parallelism = Some(MockDeploymentManager.maxParallelism + 1)) - ) - ) - saveCanonicalProcessAndAssertSuccess(largeParallelismScenario) - - deploymentManager.withFailingDeployment(largeParallelismScenario.name) { - deployProcess(largeParallelismScenario.name) ~> check { - status shouldBe StatusCodes.BadRequest - responseAs[String] shouldBe "Parallelism too large" - } - } - } - - test("return from deploy before deployment manager proceeds") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - - deploymentManager.withWaitForDeployFinish(ProcessTestData.sampleScenario.name) { - deployProcess(ProcessTestData.sampleScenario.name) ~> check { - status shouldBe StatusCodes.OK - } - } - } - - test("snapshots process") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { - snapshot(ProcessTestData.sampleScenario.name) ~> check { - status shouldBe StatusCodes.OK - responseAs[String] shouldBe MockDeploymentManager.savepointPath - } - } - } - - test("stops process") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { - stop(ProcessTestData.sampleScenario.name) ~> check { - status shouldBe StatusCodes.OK - responseAs[String] shouldBe MockDeploymentManager.stopSavepointPath - } - } - } - - test("return test results") { - val testDataContent = - """{"sourceId":"startProcess","record":"ala"} - |{"sourceId":"startProcess","record":"bela"}""".stripMargin - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - - testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { - - status shouldEqual StatusCodes.OK - - val ctx = responseAs[Json].hcursor - .downField("results") - .downField("nodeResults") - .downField("endsuffix") - .downArray - .downField("variables") - - ctx - .downField("output") - .downField("pretty") - .downField("message") - .focus shouldBe Some(Json.fromString("message")) - - ctx - .downField("input") - .downField("pretty") - .downField("firstField") - .focus shouldBe Some(Json.fromString("ala")) - } - } - - test("return test results of errors, including null") { - - import pl.touk.nussknacker.engine.spel.SpelExtension._ - - val process = ScenarioBuilder - .streaming(processName.value) - .parallelism(1) - .source("startProcess", "csv-source") - .filter("input", "new java.math.BigDecimal(null) == 0".spel) - .emptySink( - "end", - "kafka-string", - TopicParamName.value -> "'end.topic'".spel, - SinkValueParamName.value -> "''".spel - ) - val testDataContent = - """{"sourceId":"startProcess","record":"ala"} - |"bela"""".stripMargin - saveCanonicalProcessAndAssertSuccess(process) - - testScenario(process, testDataContent) ~> check { - status shouldEqual StatusCodes.OK - } - } - - test("refuses to test if too much data") { - - import pl.touk.nussknacker.engine.spel.SpelExtension._ - - val process = { - ScenarioBuilder - .streaming(processName.value) - .parallelism(1) - .source("startProcess", "csv-source") - .emptySink("end", "kafka-string", TopicParamName.value -> "'end.topic'".spel) - } - saveCanonicalProcessAndAssertSuccess(process) - val tooLargeTestDataContentList = List((1 to 50).mkString("\n"), (1 to 50000).mkString("-")) - - tooLargeTestDataContentList.foreach { tooLargeData => - testScenario(process, tooLargeData) ~> check { - status shouldEqual StatusCodes.BadRequest - } - } - } - - test("rejects test record with non-existing source") { - saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) - val testDataContent = - """{"sourceId":"startProcess","record":"ala"} - |{"sourceId":"unknown","record":"bela"}""".stripMargin - - testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[String] shouldBe "Record 2 - scenario does not have source id: 'unknown'" - } - } - - test("execute valid custom action") { - createEmptyProcess(ProcessTestData.sampleProcessName) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("hello")) - ) ~> check { - status shouldBe StatusCodes.OK - responseAs[CustomActionResponse] shouldBe CustomActionResponse(isSuccess = true, msg = "Hi") - } - } - - test("execute non existing custom action") { - createEmptyProcess(ProcessTestData.sampleProcessName) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("non-existing")) - ) ~> check { - status shouldBe StatusCodes.NotFound - responseAs[ - String - ] shouldBe s"Couldn't find definition of action non-existing for scenario ${ProcessTestData.sampleProcessName}" - } - } - - test("execute not implemented custom action") { - createEmptyProcess(ProcessTestData.sampleProcessName) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("not-implemented")) - ) ~> check { - status shouldBe StatusCodes.NotImplemented - responseAs[String] shouldBe "an implementation is missing" - } - } - - test("execute custom action with not allowed process status") { - createEmptyProcess(ProcessTestData.sampleProcessName) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("invalid-status")) - ) ~> check { - // TODO: "conflict" is coherrent with "canceled process can't be canceled again" above, consider changing to Forbidden - status shouldBe StatusCodes.Conflict - responseAs[String] shouldBe "Action: invalid-status is not allowed in scenario (fooProcess) state: NOT_DEPLOYED, allowed actions: not-implemented,ARCHIVE,DEPLOY,RENAME,hello." - } - } - - test("should return 403 when execute custom action on archived process") { - createArchivedProcess(ProcessTestData.sampleProcessName) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("hello")) - ) ~> check { - // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden - status shouldBe StatusCodes.Conflict - } - } - - test("should return 403 when execute custom action on fragment") { - createEmptyProcess(ProcessTestData.sampleProcessName, isFragment = true) - customAction( - ProcessTestData.sampleProcessName, - CustomActionRequest(ScenarioActionName("hello")) - ) ~> check { - // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden - status shouldBe StatusCodes.Conflict - } - } - - def decodeDetails: ScenarioWithDetails = responseAs[ScenarioWithDetails] - - def checkThatEventually[T](body: => T): RouteTestResult => T = check(eventually(body)) - -} +//package pl.touk.nussknacker.ui.api +// +//import akka.http.scaladsl.model.{ContentTypeRange, StatusCodes} +//import akka.http.scaladsl.server +//import akka.http.scaladsl.testkit.ScalatestRouteTest +//import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} +//import cats.instances.all._ +//import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport +//import io.circe.Json +//import org.scalatest.funsuite.AnyFunSuite +//import org.scalatest.matchers.BeMatcher +//import org.scalatest.matchers.should.Matchers +//import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, OptionValues} +//import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +//import pl.touk.nussknacker.engine.api.deployment.{ProcessAction, ScenarioActionName, ScenarioActivity} +//import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId} +//import pl.touk.nussknacker.engine.api.{MetaData, StreamMetaData} +//import pl.touk.nussknacker.engine.build.ScenarioBuilder +//import pl.touk.nussknacker.engine.kafka.KafkaFactory +//import pl.touk.nussknacker.engine.spel.SpelExtension._ +//import pl.touk.nussknacker.restmodel.scenariodetails._ +//import pl.touk.nussknacker.restmodel.{CustomActionRequest, CustomActionResponse} +//import pl.touk.nussknacker.security.Permission +//import pl.touk.nussknacker.test.PatientScalaFutures +//import pl.touk.nussknacker.test.base.it.NuResourcesTest +//import pl.touk.nussknacker.test.mock.MockDeploymentManager +//import pl.touk.nussknacker.test.utils.domain.TestFactory.{withAllPermissions, withPermissions} +//import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory} +//import pl.touk.nussknacker.ui.api.description.scenarioActivity.Dtos +//import pl.touk.nussknacker.ui.process.ScenarioQuery +//import pl.touk.nussknacker.ui.process.exception.ProcessIllegalAction +// +// todo: +//// TODO: all these tests should be migrated to ManagementApiHttpServiceBusinessSpec or ManagementApiHttpServiceSecuritySpec +//class ManagementResourcesSpec +// extends AnyFunSuite +// with ScalatestRouteTest +// with FailFastCirceSupport +// with Matchers +// with PatientScalaFutures +// with OptionValues +// with BeforeAndAfterEach +// with BeforeAndAfterAll +// with NuResourcesTest { +// +// import KafkaFactory._ +// +// private implicit final val string: FromEntityUnmarshaller[String] = +// Unmarshaller.stringUnmarshaller.forContentTypes(ContentTypeRange.*) +// +// private val processName: ProcessName = ProcessTestData.sampleScenario.name +// +// private def deployedWithVersions(versionId: Long): BeMatcher[Option[ProcessAction]] = { +// BeMatcher[(ScenarioActionName, VersionId)](equal((ScenarioActionName.Deploy, VersionId(versionId)))) +// .compose[ProcessAction](a => (a.actionName, a.processVersionId)) +// .compose[Option[ProcessAction]](opt => opt.value) +// } +// +// test("process deployment should be visible in process history") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deployProcess(processName) ~> checkThatEventually { +// status shouldBe StatusCodes.OK +// getProcess(processName) ~> check { +// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) +// updateCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deployProcess(processName) ~> checkThatEventually { +// getProcess(processName) ~> check { +// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) +// } +// } +// } +// } +// } +// +// test("process during deploy cannot be deployed again") { +// createDeployedExampleScenario(processName) +// +// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.DuringDeploy) { +// deployProcess(processName) ~> check { +// status shouldBe StatusCodes.Conflict +// } +// } +// } +// +// test("canceled process can't be canceled again") { +// createDeployedCanceledExampleScenario(processName) +// +// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { +// cancelProcess(processName) ~> check { +// status shouldBe StatusCodes.Conflict +// } +// } +// } +// +// test("can't deploy archived process") { +// createArchivedProcess(processName) +// +// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { +// deployProcess(processName) ~> check { +// status shouldBe StatusCodes.Conflict +// responseAs[String] shouldBe ProcessIllegalAction +// .archived(ScenarioActionName.Deploy, processName) +// .message +// } +// } +// } +// +// test("can't deploy fragment") { +// createValidProcess(processName, isFragment = true) +// +// deployProcess(processName) ~> check { +// status shouldBe StatusCodes.Conflict +// responseAs[String] shouldBe ProcessIllegalAction +// .fragment(ScenarioActionName.Deploy, processName) +// .message +// } +// } +// +// test("can't cancel fragment") { +// createValidProcess(processName, isFragment = true) +// +// deployProcess(processName) ~> check { +// status shouldBe StatusCodes.Conflict +// responseAs[String] shouldBe ProcessIllegalAction +// .fragment(ScenarioActionName.Deploy, processName) +// .message +// } +// } +// +// test("deploys and cancels with comment") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deployProcess( +// ProcessTestData.sampleScenario.name, +// comment = Some("deployComment") +// ) ~> checkThatEventually { +// getProcess(processName) ~> check { +// val processDetails = responseAs[ScenarioWithDetails] +// processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true +// } +// cancelProcess( +// ProcessTestData.sampleScenario.name, +// comment = Some("cancelComment") +// ) ~> check { +// status shouldBe StatusCodes.OK +// // TODO: remove Deployment:, Stop: after adding custom icons +// val expectedDeployComment = "deployComment" +// val expectedStopComment = "cancelComment" +// val expectedDeployCommentInLegacyService = s"Deployment: $expectedDeployComment" +// val expectedStopCommentInLegacyService = s"Stop: $expectedStopComment" +// getActivity(ProcessTestData.sampleScenario.name) ~> check { +// val comments = responseAs[Dtos.Legacy.ProcessActivity].comments.sortBy(_.id) +// comments.map(_.content) shouldBe List( +// expectedDeployCommentInLegacyService, +// expectedStopCommentInLegacyService +// ) +// val firstCommentId :: secondCommentId :: Nil = comments.map(_.id) +// +// Get(s"/processes/${ProcessTestData.sampleScenario.name}/deployments") ~> withAllPermissions( +// processesRoute +// ) ~> check { +// val deploymentHistory = responseAs[List[ProcessAction]] +// deploymentHistory.map(a => +// (a.processVersionId, a.user, a.actionName, a.commentId, a.comment, a.buildInfo) +// ) shouldBe List( +// ( +// VersionId(2), +// TestFactory.user().username, +// ScenarioActionName.Cancel, +// Some(secondCommentId), +// Some(expectedStopComment), +// Map() +// ), +// ( +// VersionId(2), +// TestFactory.user().username, +// ScenarioActionName.Deploy, +// Some(firstCommentId), +// Some(expectedDeployComment), +// TestFactory.buildInfo +// ) +// ) +// } +// } +// } +// } +// } +// +// test("deploy technical process and mark it as deployed") { +// createValidProcess(processName) +// +// deployProcess(processName) ~> checkThatEventually { +// status shouldBe StatusCodes.OK +// getProcess(processName) ~> check { +// val processDetails = responseAs[ScenarioWithDetails] +// processDetails.lastStateAction shouldBe deployedWithVersions(1) +// processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true +// } +// } +// } +// +// test("recognize process cancel in deployment list") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { +// status shouldBe StatusCodes.OK +// getProcess(processName) ~> check { +// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) +// cancelProcess(ProcessTestData.sampleScenario.name) ~> check { +// getProcess(processName) ~> check { +// decodeDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Cancel) shouldBe true +// } +// } +// } +// } +// } +// +// test("recognize process deploy and cancel in global process list") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { +// status shouldBe StatusCodes.OK +// +// forScenariosReturned(ScenarioQuery.empty) { processes => +// val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head +// process.lastActionVersionId shouldBe Some(2L) +// process.isDeployed shouldBe true +// +// cancelProcess(ProcessTestData.sampleScenario.name) ~> check { +// forScenariosReturned(ScenarioQuery.empty) { processes => +// val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head +// process.lastActionVersionId shouldBe Some(2L) +// process.isCanceled shouldBe true +// } +// } +// } +// } +// } +// +// test("not authorize user with write permission to deploy") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// Post(s"/processManagement/deploy/${ProcessTestData.sampleScenario.name}") ~> withPermissions( +// deployRoute(), +// Permission.Write +// ) ~> check { +// rejection shouldBe server.AuthorizationFailedRejection +// } +// } +// +// test("should allow deployment of scenario with warning") { +// val processWithDisabledFilter = ScenarioBuilder +// .streaming(processName.value) +// .parallelism(1) +// .source("startProcess", "csv-source") +// .filter("input", "#input != null".spel, Some(true)) +// .emptySink( +// "end", +// "kafka-string", +// TopicParamName.value -> "'end.topic'".spel, +// SinkValueParamName.value -> "#input".spel +// ) +// +// saveCanonicalProcessAndAssertSuccess(processWithDisabledFilter) +// deployProcess(processName) ~> check { +// status shouldBe StatusCodes.OK +// } +// } +// +// test("should return failure for not validating scenario") { +// val invalidScenario = ScenarioBuilder +// .streaming(processName.value) +// .parallelism(1) +// .source("start", "not existing") +// .emptySink( +// "end", +// "kafka-string", +// TopicParamName.value -> "'end.topic'".spel, +// SinkValueParamName.value -> "#output".spel +// ) +// saveCanonicalProcessAndAssertSuccess(invalidScenario) +// +// deploymentManager.withEmptyProcessState(invalidScenario.name) { +// deployProcess(invalidScenario.name) ~> check { +// responseAs[String] shouldBe "Cannot deploy invalid scenario" +// status shouldBe StatusCodes.Conflict +// } +// getProcess(invalidScenario.name) ~> check { +// decodeDetails.state.value.status shouldEqual SimpleStateStatus.NotDeployed +// } +// } +// } +// +// test("should return failure for not validating deployment") { +// val largeParallelismScenario = ProcessTestData.sampleScenario.copy(metaData = +// MetaData( +// ProcessTestData.sampleScenario.name.value, +// StreamMetaData(parallelism = Some(MockDeploymentManager.maxParallelism + 1)) +// ) +// ) +// saveCanonicalProcessAndAssertSuccess(largeParallelismScenario) +// +// deploymentManager.withFailingDeployment(largeParallelismScenario.name) { +// deployProcess(largeParallelismScenario.name) ~> check { +// status shouldBe StatusCodes.BadRequest +// responseAs[String] shouldBe "Parallelism too large" +// } +// } +// } +// +// test("return from deploy before deployment manager proceeds") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// +// deploymentManager.withWaitForDeployFinish(ProcessTestData.sampleScenario.name) { +// deployProcess(ProcessTestData.sampleScenario.name) ~> check { +// status shouldBe StatusCodes.OK +// } +// } +// } +// +// test("snapshots process") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { +// snapshot(ProcessTestData.sampleScenario.name) ~> check { +// status shouldBe StatusCodes.OK +// responseAs[String] shouldBe MockDeploymentManager.savepointPath +// } +// } +// } +// +// test("stops process") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { +// stop(ProcessTestData.sampleScenario.name) ~> check { +// status shouldBe StatusCodes.OK +// responseAs[String] shouldBe MockDeploymentManager.stopSavepointPath +// } +// } +// } +// +// test("return test results") { +// val testDataContent = +// """{"sourceId":"startProcess","record":"ala"} +// |{"sourceId":"startProcess","record":"bela"}""".stripMargin +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// +// testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { +// +// status shouldEqual StatusCodes.OK +// +// val ctx = responseAs[Json].hcursor +// .downField("results") +// .downField("nodeResults") +// .downField("endsuffix") +// .downArray +// .downField("variables") +// +// ctx +// .downField("output") +// .downField("pretty") +// .downField("message") +// .focus shouldBe Some(Json.fromString("message")) +// +// ctx +// .downField("input") +// .downField("pretty") +// .downField("firstField") +// .focus shouldBe Some(Json.fromString("ala")) +// } +// } +// +// test("return test results of errors, including null") { +// +// import pl.touk.nussknacker.engine.spel.SpelExtension._ +// +// val process = ScenarioBuilder +// .streaming(processName.value) +// .parallelism(1) +// .source("startProcess", "csv-source") +// .filter("input", "new java.math.BigDecimal(null) == 0".spel) +// .emptySink( +// "end", +// "kafka-string", +// TopicParamName.value -> "'end.topic'".spel, +// SinkValueParamName.value -> "''".spel +// ) +// val testDataContent = +// """{"sourceId":"startProcess","record":"ala"} +// |"bela"""".stripMargin +// saveCanonicalProcessAndAssertSuccess(process) +// +// testScenario(process, testDataContent) ~> check { +// status shouldEqual StatusCodes.OK +// } +// } +// +// test("refuses to test if too much data") { +// +// import pl.touk.nussknacker.engine.spel.SpelExtension._ +// +// val process = { +// ScenarioBuilder +// .streaming(processName.value) +// .parallelism(1) +// .source("startProcess", "csv-source") +// .emptySink("end", "kafka-string", TopicParamName.value -> "'end.topic'".spel) +// } +// saveCanonicalProcessAndAssertSuccess(process) +// val tooLargeTestDataContentList = List((1 to 50).mkString("\n"), (1 to 50000).mkString("-")) +// +// tooLargeTestDataContentList.foreach { tooLargeData => +// testScenario(process, tooLargeData) ~> check { +// status shouldEqual StatusCodes.BadRequest +// } +// } +// } +// +// test("rejects test record with non-existing source") { +// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) +// val testDataContent = +// """{"sourceId":"startProcess","record":"ala"} +// |{"sourceId":"unknown","record":"bela"}""".stripMargin +// +// testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { +// status shouldEqual StatusCodes.BadRequest +// responseAs[String] shouldBe "Record 2 - scenario does not have source id: 'unknown'" +// } +// } +// +// test("execute valid custom action") { +// createEmptyProcess(ProcessTestData.sampleProcessName) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("hello")) +// ) ~> check { +// status shouldBe StatusCodes.OK +// responseAs[CustomActionResponse] shouldBe CustomActionResponse(isSuccess = true, msg = "Hi") +// } +// } +// +// test("execute non existing custom action") { +// createEmptyProcess(ProcessTestData.sampleProcessName) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("non-existing")) +// ) ~> check { +// status shouldBe StatusCodes.NotFound +// responseAs[ +// String +// ] shouldBe s"Couldn't find definition of action non-existing for scenario ${ProcessTestData.sampleProcessName}" +// } +// } +// +// test("execute not implemented custom action") { +// createEmptyProcess(ProcessTestData.sampleProcessName) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("not-implemented")) +// ) ~> check { +// status shouldBe StatusCodes.NotImplemented +// responseAs[String] shouldBe "an implementation is missing" +// } +// } +// +// test("execute custom action with not allowed process status") { +// createEmptyProcess(ProcessTestData.sampleProcessName) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("invalid-status")) +// ) ~> check { +// // TODO: "conflict" is coherrent with "canceled process can't be canceled again" above, consider changing to Forbidden +// status shouldBe StatusCodes.Conflict +// responseAs[String] shouldBe "Action: invalid-status is not allowed in scenario (fooProcess) state: NOT_DEPLOYED, allowed actions: not-implemented,ARCHIVE,DEPLOY,RENAME,hello." +// } +// } +// +// test("should return 403 when execute custom action on archived process") { +// createArchivedProcess(ProcessTestData.sampleProcessName) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("hello")) +// ) ~> check { +// // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden +// status shouldBe StatusCodes.Conflict +// } +// } +// +// test("should return 403 when execute custom action on fragment") { +// createEmptyProcess(ProcessTestData.sampleProcessName, isFragment = true) +// customAction( +// ProcessTestData.sampleProcessName, +// CustomActionRequest(ScenarioActionName("hello")) +// ) ~> check { +// // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden +// status shouldBe StatusCodes.Conflict +// } +// } +// +// def decodeDetails: ScenarioWithDetails = responseAs[ScenarioWithDetails] +// +// def checkThatEventually[T](body: => T): RouteTestResult => T = check(eventually(body)) +// +//} diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala index 5631ea406fc..f6e9b1a5178 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala @@ -19,7 +19,8 @@ class ConfigurationTest extends AnyFunSuite with Matchers { private def modelData: ModelData = ModelData( ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig), - TestFactory.modelDependencies + TestFactory.modelDependencies, + ??? ) private lazy val modelDataConfig = modelData.modelConfig diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala index 931b35d5794..1aba663121e 100644 --- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala +++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala @@ -35,7 +35,8 @@ object FlinkStreamingDeploymentManagerProviderHelper { workingDirectoryOpt = None, _ => true, ComponentDefinitionExtractionMode.FinalDefinition - ) + ), + ??? ) val actorSystem = ActorSystem("FlinkStreamingDeploymentManagerProviderHelper") val backend = AsyncHttpClientFutureBackend.usingConfig(new DefaultAsyncHttpClientConfig.Builder().build()) diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala index 5872b331cb7..dde02281551 100644 --- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala +++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala @@ -271,7 +271,8 @@ class FlinkStreamingDeploymentManagerSpec extends AnyFunSuite with Matchers with workingDirectoryOpt = None, _ => true, ComponentDefinitionExtractionMode.FinalDefinition - ) + ), + ??? ) val definition = modelData.modelDefinition definition.components.components.map(_.id) should contain(ComponentId(ComponentType.Service, "accountService")) diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala index 43f00f94bd9..56cc9784ffb 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala @@ -40,8 +40,13 @@ object ModelData extends LazyLogging { Map[DesignerWideComponentId, ComponentAdditionalConfig] ) => ModelDefinition - def apply(processingTypeConfig: ProcessingTypeConfig, dependencies: ModelDependencies): ModelData = { - val modelClassLoader = ModelClassLoader(processingTypeConfig.classPath, dependencies.workingDirectoryOpt) + def apply( + processingTypeConfig: ProcessingTypeConfig, + dependencies: ModelDependencies, + managersDir: Path + ): ModelData = { + val modelClassLoader = + ModelClassLoader(managersDir, processingTypeConfig.classPath, dependencies.workingDirectoryOpt) ClassLoaderModelData( _.resolveInputConfigDuringExecution(processingTypeConfig.modelConfig, modelClassLoader.classLoader), modelClassLoader, diff --git a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala index 241ac05ff16..e2dd40a5272 100644 --- a/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala +++ b/utils/test-utils/src/main/scala/pl/touk/nussknacker/test/installationexample/DockerBasedNuInstallationExampleEnvironment.scala @@ -39,7 +39,7 @@ class DockerBasedInstallationExampleNuEnvironment( waitingFor = Some( WaitingForService( "bootstrap-setup", - new DockerHealthcheckWaitStrategy().withStartupTimeout(Duration.ofSeconds(120)) + new DockerHealthcheckWaitStrategy().withStartupTimeout(Duration.ofSeconds(150)) ) ), // Change to 'true' to enable logging diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index 801406ab15e..4a6abb77ad1 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -1,11 +1,10 @@ package pl.touk.nussknacker.engine.util.loader import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine.util.StringUtils._ -import pl.touk.nussknacker.engine.util.UrlUtils._ +import pl.touk.nussknacker.engine.util.UrlUtils.ExpandFiles import java.io.File -import java.net.{URL, URLClassLoader} +import java.net.{URI, URL, URLClassLoader} import java.nio.file.Path case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) { @@ -23,39 +22,59 @@ case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) object ModelClassLoader extends LazyLogging { // for e.g. testing in process module val empty: ModelClassLoader = ModelClassLoader(getClass.getClassLoader, List()) - val defaultJarExtension = ".jar" + + val defaultJarExtension = ".jar" + + private def expandFiles(urls: Iterable[URL], jarExtension: String): Iterable[URL] = { + urls.flatMap { + case url if url.getProtocol.toLowerCase == "file" => + val file = new File(url.toURI) + if (file.isDirectory) { + val expanded = + expandFiles(file.listFiles().filterNot(_.getName.startsWith(".")).map(_.toURI.toURL), jarExtension) + if (expanded.isEmpty) { + List.empty + } else if (expanded.exists(_.getFile.endsWith(jarExtension))) { // not expand if nested jars not exists + expanded + } else { + List(url) + } + } else { + List(url) + } + case url => List(url) + } + } + + private def convertToURL(urlString: String, workingDirectoryOpt: Option[Path]): URL = { + val uri = new URI(urlString) + if (uri.isAbsolute) { + uri.toURL + } else { + val pathPart = uri.getSchemeSpecificPart + val path = workingDirectoryOpt.map { workingDirectory => + workingDirectory.resolve(pathPart) + } getOrElse { + Path.of(pathPart) + } + path.toUri.toURL + } + } // workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229 def apply( + managersDir: Path, urls: List[String], workingDirectoryOpt: Option[Path], jarExtension: String = defaultJarExtension ): ModelClassLoader = { - val postProcessedURLs = validateExistence( - urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension)) - ) + val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension).toList ::: + managersDir.toUri.toURL.expandFiles(".jar") + ModelClassLoader( new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), postProcessedURLs.toList ) } - private def validateExistence(urls: Iterable[URL]): Iterable[URL] = { - urls.filterNot(url => doesExist(url)).toList match { - case Nil => urls - case notExisted => - throw new IllegalArgumentException(s"The following URLs don't exist: [${notExisted.mkString(",")}]") - } - } - - private def doesExist(url: URL): Boolean = { - url.getProtocol match { - case "file" => - val file = new File(url.toURI) - file.exists() && file.isFile - case _ => - false - } - } - } diff --git a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala index b6caffde94d..6aff9c5ca4d 100644 --- a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala +++ b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala @@ -14,7 +14,7 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers { val urls = List(resource(""), nonFileUrl) - val loader = ModelClassLoader(urls.map(_.toURI.toString), workingDirectoryOpt = None, jarExtension = ".jara") + val loader = ModelClassLoader(???, urls.map(_.toURI.toString), workingDirectoryOpt = None, jarExtension = ".jara") // we're not using .jar to avoid messing with .gitignore val expected = Set( @@ -31,6 +31,7 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers { test("should resolve classpath using working directory when defined") { val loader = ModelClassLoader( + ???, List("relative/path", "/absolute/path"), workingDirectoryOpt = Some(Path.of("/some/working/directory")) ) From 3b5e4634bb0e3567d01403b0945a3d1336587305 Mon Sep 17 00:00:00 2001 From: MK Software Date: Fri, 13 Dec 2024 14:49:15 +0100 Subject: [PATCH 06/73] wip --- build.sbt | 23 +- ...sConfigBasedProcessingTypeDataLoader.scala | 15 +- .../test/base/it/NuResourcesTest.scala | 2 +- .../test/mock/MockDeploymentManager.scala | 3 +- .../test/utils/domain/ProcessTestData.scala | 22 +- .../ui/api/ManagementResourcesSpec.scala | 997 +++++++++--------- .../ui/integration/ConfigurationTest.scala | 3 +- ...amingDeploymentManagerProviderHelper.scala | 6 +- .../FlinkStreamingDeploymentManagerSpec.scala | 3 +- .../touk/nussknacker/engine/ModelData.scala | 6 +- .../extension/CastOrConversionExt.scala | 2 +- .../UniversalSchemaBasedSerdeProvider.scala | 2 +- .../engine/util/loader/ModelClassLoader.scala | 5 +- .../util/loader/ModelClassLoaderSpec.scala | 3 +- 14 files changed, 537 insertions(+), 555 deletions(-) diff --git a/build.sbt b/build.sbt index c20b2cd0bef..e9eb33dc08a 100644 --- a/build.sbt +++ b/build.sbt @@ -38,10 +38,6 @@ lazy val silencerV_2_12 = "1.6.0" def propOrEnv(name: String, default: String): String = propOrEnv(name).getOrElse(default) def propOrEnv(name: String): Option[String] = Option(System.getProperty(name)).orElse(sys.env.get(name)) -//by default we include flink and scala, we want to be able to disable this behaviour for performance reasons -val includeFlinkAndScala = propOrEnv("includeFlinkAndScala", "true").toBoolean - -val flinkScope = if (includeFlinkAndScala) "compile" else "provided" val nexusUrlFromProps = propOrEnv("nexusUrl") //TODO: this is pretty clunky, but works so far for our case... val nexusHostFromProps = nexusUrlFromProps.map(_.replaceAll("http[s]?://", "").replaceAll("[:/].*", "")) @@ -633,13 +629,15 @@ lazy val flinkDeploymentManager = (project in flink("management")) libraryDependencies ++= { Seq( "org.typelevel" %% "cats-core" % catsV % Provided, - "org.apache.flink" % "flink-streaming-java" % flinkV % Provided, - "org.apache.flink" % "flink-core" % flinkV % Provided, - "org.apache.flink" % "flink-statebackend-rocksdb" % flinkV % flinkScope, + "org.apache.flink" % "flink-streaming-java" % flinkV excludeAll ( + ExclusionRule("log4j", "log4j"), + ExclusionRule("org.slf4j", "slf4j-log4j12"), + ExclusionRule("com.esotericsoftware", "kryo-shaded"), + ), "com.softwaremill.retry" %% "retry" % retryV, "org.wiremock" % "wiremock" % wireMockV % Test, "org.scalatestplus" %% "mockito-5-10" % scalaTestPlusV % Test, - ) ++ flinkLibScalaDeps(scalaVersion.value, Some(flinkScope)) + ) ++ flinkLibScalaDeps(scalaVersion.value) }, // override scala-collection-compat from com.softwaremill.retry:retry dependencyOverrides += "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionsCompatV @@ -1834,9 +1832,8 @@ lazy val flinkBaseUnboundedComponents = (project in flink("components/base-unbou name := "nussknacker-flink-base-unbounded-components", libraryDependencies ++= Seq( "org.apache.flink" % "flink-streaming-java" % flinkV % Provided, - "com.clearspring.analytics" % "stream" % "2.9.8" // It is used only in QDigest which we don't use, while it's >20MB in size... - exclude ("it.unimi.dsi", "fastutil") + "com.clearspring.analytics" % "stream" % "2.9.8" exclude ("it.unimi.dsi", "fastutil") ) ) .dependsOn( @@ -1958,7 +1955,7 @@ lazy val designer = (project in file("designer/server")) .settings( assemblySettings( "nussknacker-designer-assembly.jar", - includeScala = includeFlinkAndScala, + includeScala = true, filterProvidedDeps = false ): _* ) @@ -2060,7 +2057,8 @@ lazy val designer = (project in file("designer/server")) "io.circe" %% "circe-yaml" % circeYamlV % Test, "com.github.scopt" %% "scopt" % "4.1.0" % Test, "org.questdb" % "questdb" % "7.4.2", - "org.apache.flink" % "flink-streaming-java" % flinkV exclude ("com.esotericsoftware", "kryo-shaded"), + "org.apache.kafka" % "kafka-clients" % kafkaV, + "org.apache.flink" % "flink-streaming-java" % flinkV, ) ++ forScalaVersion(scalaVersion.value) { case (2, 13) => Seq( @@ -2086,6 +2084,7 @@ lazy val designer = (project in file("designer/server")) testUtils % Test, flinkTestUtils % Test, developmentTestsDeploymentManager % Test, + kafkaComponentsUtils % Test, componentsApi % "test->test", // All DeploymentManager dependencies are added because they are needed to run NussknackerApp* with // dev-application.conf. Currently, we don't have a separate classpath for DMs like we have for components. diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 392ab486106..7e2bf3305a5 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -12,6 +12,7 @@ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLo import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState import pl.touk.nussknacker.engine.util.UrlUtils._ +import java.nio.file.Path import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConfig) @@ -26,13 +27,11 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf managersDir <- config.managersDir() processingTypesConfig <- config.loadProcessingTypeConfigs() } yield { - val managersClassLoader = - new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) // This step with splitting DeploymentManagerProvider loading for all processing types // and after that creating ProcessingTypeData is done because of the deduplication of deployments // See DeploymentManagerProvider.engineSetupIdentity val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => - val provider = createDeploymentManagerProvider(managersClassLoader, processingTypeConfig) + val provider = createDeploymentManagerProvider(processingTypeConfig, managersDir) val nameInputData = EngineNameInputData( provider.defaultEngineSetupName, provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), @@ -48,7 +47,7 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf val modelDependencies = getModelDependencies(processingType) val processingTypeData = ProcessingTypeData.createProcessingTypeData( processingType, - ModelData(processingTypeConfig, modelDependencies, managersDir), + ModelData(processingTypeConfig, modelDependencies), deploymentManagerProvider, getDeploymentManagerDependencies(processingType), engineSetupNames(processingType), @@ -73,12 +72,14 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf } private def createDeploymentManagerProvider( - classLoader: ClassLoader, - typeConfig: ProcessingTypeConfig + typeConfig: ProcessingTypeConfig, + managersDir: Path, ): DeploymentManagerProvider = { + val managersClassLoader = + new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( typeConfig.deploymentManagerType, - classLoader + managersClassLoader ) // todo: close } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala index f2f49baf4ee..cde22f36654 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala @@ -128,7 +128,7 @@ trait NuResourcesTest protected val deploymentManagerProvider: DeploymentManagerProvider = new MockManagerProvider(deploymentManager) - private val modelData = ModelData(processingTypeConfig, modelDependencies, ???) + private val modelData = ModelData(processingTypeConfig, modelDependencies) protected val testProcessingTypeDataProvider: ProcessingTypeDataProvider[ProcessingTypeData, _] = mapProcessingTypeDataProvider( diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala index 1e5188832e3..dcd25483e6d 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala @@ -47,8 +47,7 @@ class MockDeploymentManager( ) extends FlinkDeploymentManager( ModelData( ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig), - TestFactory.modelDependencies, - ??? + TestFactory.modelDependencies ), DeploymentManagerDependencies( deployedScenariosProvider, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala index f2bbcb5c3f4..0fbc61496cf 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala @@ -23,7 +23,7 @@ import pl.touk.nussknacker.engine.graph.node.FragmentInputDefinition.{FragmentCl import pl.touk.nussknacker.engine.graph.node._ import pl.touk.nussknacker.engine.graph.sink.SinkRef import pl.touk.nussknacker.engine.graph.source.SourceRef -//import pl.touk.nussknacker.engine.kafka.KafkaFactory +import pl.touk.nussknacker.engine.kafka.KafkaFactory import pl.touk.nussknacker.engine.testing.ModelDefinitionBuilder import pl.touk.nussknacker.restmodel.scenariodetails.{ScenarioParameters, ScenarioWithDetailsForMigrations} import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.TestProcessingType.Streaming @@ -43,7 +43,7 @@ import pl.touk.nussknacker.ui.validation.{ScenarioLabelsValidator, UIProcessVali object ProcessTestData { -// import KafkaFactory._ + import KafkaFactory._ import pl.touk.nussknacker.engine.spel.SpelExtension._ val existingSourceFactory = "barSource" @@ -82,10 +82,8 @@ object ProcessTestData { .withSink(existingSinkFactory) .withSink( existingSinkFactoryKafkaString, - ???, - ???, // todo: -// Parameter[String](TopicParamName), -// Parameter[Any](SinkValueParamName).copy(isLazyParameter = true) + Parameter[String](TopicParamName), + Parameter[Any](SinkValueParamName).copy(isLazyParameter = true) ) .withService(existingServiceId) .withService(otherExistingServiceId) @@ -210,10 +208,8 @@ object ProcessTestData { .emptySink( "end" + idSuffix, "kafka-string", - ???, - ???, // todo: -// TopicParamName.value -> "'end.topic'".spel, -// SinkValueParamName.value -> "#output".spel + TopicParamName.value -> "'end.topic'".spel, + SinkValueParamName.value -> "#output".spel ) } ScenarioBuilder @@ -481,10 +477,8 @@ object ProcessTestData { .emptySink( "end" + idSuffix, "kafka-string", - ???, - ???, // todo: -// TopicParamName.value -> spelTemplate("end.topic"), -// SinkValueParamName.value -> spelTemplate("#output") + TopicParamName.value -> spelTemplate("end.topic"), + SinkValueParamName.value -> spelTemplate("#output") ) } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala index c2c21afc9aa..988b54345e8 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ManagementResourcesSpec.scala @@ -1,499 +1,498 @@ -//package pl.touk.nussknacker.ui.api -// -//import akka.http.scaladsl.model.{ContentTypeRange, StatusCodes} -//import akka.http.scaladsl.server -//import akka.http.scaladsl.testkit.ScalatestRouteTest -//import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} -//import cats.instances.all._ -//import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport -//import io.circe.Json -//import org.scalatest.funsuite.AnyFunSuite -//import org.scalatest.matchers.BeMatcher -//import org.scalatest.matchers.should.Matchers -//import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, OptionValues} -//import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus -//import pl.touk.nussknacker.engine.api.deployment.{ProcessAction, ScenarioActionName, ScenarioActivity} -//import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId} -//import pl.touk.nussknacker.engine.api.{MetaData, StreamMetaData} -//import pl.touk.nussknacker.engine.build.ScenarioBuilder -//import pl.touk.nussknacker.engine.kafka.KafkaFactory -//import pl.touk.nussknacker.engine.spel.SpelExtension._ -//import pl.touk.nussknacker.restmodel.scenariodetails._ -//import pl.touk.nussknacker.restmodel.{CustomActionRequest, CustomActionResponse} -//import pl.touk.nussknacker.security.Permission -//import pl.touk.nussknacker.test.PatientScalaFutures -//import pl.touk.nussknacker.test.base.it.NuResourcesTest -//import pl.touk.nussknacker.test.mock.MockDeploymentManager -//import pl.touk.nussknacker.test.utils.domain.TestFactory.{withAllPermissions, withPermissions} -//import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory} -//import pl.touk.nussknacker.ui.api.description.scenarioActivity.Dtos -//import pl.touk.nussknacker.ui.process.ScenarioQuery -//import pl.touk.nussknacker.ui.process.exception.ProcessIllegalAction -// -// todo: -//// TODO: all these tests should be migrated to ManagementApiHttpServiceBusinessSpec or ManagementApiHttpServiceSecuritySpec -//class ManagementResourcesSpec -// extends AnyFunSuite -// with ScalatestRouteTest -// with FailFastCirceSupport -// with Matchers -// with PatientScalaFutures -// with OptionValues -// with BeforeAndAfterEach -// with BeforeAndAfterAll -// with NuResourcesTest { -// -// import KafkaFactory._ -// -// private implicit final val string: FromEntityUnmarshaller[String] = -// Unmarshaller.stringUnmarshaller.forContentTypes(ContentTypeRange.*) -// -// private val processName: ProcessName = ProcessTestData.sampleScenario.name -// -// private def deployedWithVersions(versionId: Long): BeMatcher[Option[ProcessAction]] = { -// BeMatcher[(ScenarioActionName, VersionId)](equal((ScenarioActionName.Deploy, VersionId(versionId)))) -// .compose[ProcessAction](a => (a.actionName, a.processVersionId)) -// .compose[Option[ProcessAction]](opt => opt.value) -// } -// -// test("process deployment should be visible in process history") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deployProcess(processName) ~> checkThatEventually { -// status shouldBe StatusCodes.OK -// getProcess(processName) ~> check { -// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) -// updateCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deployProcess(processName) ~> checkThatEventually { -// getProcess(processName) ~> check { -// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) -// } -// } -// } -// } -// } -// -// test("process during deploy cannot be deployed again") { -// createDeployedExampleScenario(processName) -// -// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.DuringDeploy) { -// deployProcess(processName) ~> check { -// status shouldBe StatusCodes.Conflict -// } -// } -// } -// -// test("canceled process can't be canceled again") { -// createDeployedCanceledExampleScenario(processName) -// -// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { -// cancelProcess(processName) ~> check { -// status shouldBe StatusCodes.Conflict -// } -// } -// } -// -// test("can't deploy archived process") { -// createArchivedProcess(processName) -// -// deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { -// deployProcess(processName) ~> check { -// status shouldBe StatusCodes.Conflict -// responseAs[String] shouldBe ProcessIllegalAction -// .archived(ScenarioActionName.Deploy, processName) -// .message -// } -// } -// } -// -// test("can't deploy fragment") { -// createValidProcess(processName, isFragment = true) -// -// deployProcess(processName) ~> check { -// status shouldBe StatusCodes.Conflict -// responseAs[String] shouldBe ProcessIllegalAction -// .fragment(ScenarioActionName.Deploy, processName) -// .message -// } -// } -// -// test("can't cancel fragment") { -// createValidProcess(processName, isFragment = true) -// -// deployProcess(processName) ~> check { -// status shouldBe StatusCodes.Conflict -// responseAs[String] shouldBe ProcessIllegalAction -// .fragment(ScenarioActionName.Deploy, processName) -// .message -// } -// } -// -// test("deploys and cancels with comment") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deployProcess( -// ProcessTestData.sampleScenario.name, -// comment = Some("deployComment") -// ) ~> checkThatEventually { -// getProcess(processName) ~> check { -// val processDetails = responseAs[ScenarioWithDetails] -// processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true -// } -// cancelProcess( -// ProcessTestData.sampleScenario.name, -// comment = Some("cancelComment") -// ) ~> check { -// status shouldBe StatusCodes.OK -// // TODO: remove Deployment:, Stop: after adding custom icons -// val expectedDeployComment = "deployComment" -// val expectedStopComment = "cancelComment" -// val expectedDeployCommentInLegacyService = s"Deployment: $expectedDeployComment" -// val expectedStopCommentInLegacyService = s"Stop: $expectedStopComment" -// getActivity(ProcessTestData.sampleScenario.name) ~> check { -// val comments = responseAs[Dtos.Legacy.ProcessActivity].comments.sortBy(_.id) -// comments.map(_.content) shouldBe List( -// expectedDeployCommentInLegacyService, -// expectedStopCommentInLegacyService -// ) -// val firstCommentId :: secondCommentId :: Nil = comments.map(_.id) -// -// Get(s"/processes/${ProcessTestData.sampleScenario.name}/deployments") ~> withAllPermissions( -// processesRoute -// ) ~> check { -// val deploymentHistory = responseAs[List[ProcessAction]] -// deploymentHistory.map(a => -// (a.processVersionId, a.user, a.actionName, a.commentId, a.comment, a.buildInfo) -// ) shouldBe List( -// ( -// VersionId(2), -// TestFactory.user().username, -// ScenarioActionName.Cancel, -// Some(secondCommentId), -// Some(expectedStopComment), -// Map() -// ), -// ( -// VersionId(2), -// TestFactory.user().username, -// ScenarioActionName.Deploy, -// Some(firstCommentId), -// Some(expectedDeployComment), -// TestFactory.buildInfo -// ) -// ) -// } -// } -// } -// } -// } -// -// test("deploy technical process and mark it as deployed") { -// createValidProcess(processName) -// -// deployProcess(processName) ~> checkThatEventually { -// status shouldBe StatusCodes.OK -// getProcess(processName) ~> check { -// val processDetails = responseAs[ScenarioWithDetails] -// processDetails.lastStateAction shouldBe deployedWithVersions(1) -// processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true -// } -// } -// } -// -// test("recognize process cancel in deployment list") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { -// status shouldBe StatusCodes.OK -// getProcess(processName) ~> check { -// decodeDetails.lastStateAction shouldBe deployedWithVersions(2) -// cancelProcess(ProcessTestData.sampleScenario.name) ~> check { -// getProcess(processName) ~> check { -// decodeDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Cancel) shouldBe true -// } -// } -// } -// } -// } -// -// test("recognize process deploy and cancel in global process list") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { -// status shouldBe StatusCodes.OK -// -// forScenariosReturned(ScenarioQuery.empty) { processes => -// val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head -// process.lastActionVersionId shouldBe Some(2L) -// process.isDeployed shouldBe true -// -// cancelProcess(ProcessTestData.sampleScenario.name) ~> check { -// forScenariosReturned(ScenarioQuery.empty) { processes => -// val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head -// process.lastActionVersionId shouldBe Some(2L) -// process.isCanceled shouldBe true -// } -// } -// } -// } -// } -// -// test("not authorize user with write permission to deploy") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// Post(s"/processManagement/deploy/${ProcessTestData.sampleScenario.name}") ~> withPermissions( -// deployRoute(), -// Permission.Write -// ) ~> check { -// rejection shouldBe server.AuthorizationFailedRejection -// } -// } -// -// test("should allow deployment of scenario with warning") { -// val processWithDisabledFilter = ScenarioBuilder -// .streaming(processName.value) -// .parallelism(1) -// .source("startProcess", "csv-source") -// .filter("input", "#input != null".spel, Some(true)) -// .emptySink( -// "end", -// "kafka-string", -// TopicParamName.value -> "'end.topic'".spel, -// SinkValueParamName.value -> "#input".spel -// ) -// -// saveCanonicalProcessAndAssertSuccess(processWithDisabledFilter) -// deployProcess(processName) ~> check { -// status shouldBe StatusCodes.OK -// } -// } -// -// test("should return failure for not validating scenario") { -// val invalidScenario = ScenarioBuilder -// .streaming(processName.value) -// .parallelism(1) -// .source("start", "not existing") -// .emptySink( -// "end", -// "kafka-string", -// TopicParamName.value -> "'end.topic'".spel, -// SinkValueParamName.value -> "#output".spel -// ) -// saveCanonicalProcessAndAssertSuccess(invalidScenario) -// -// deploymentManager.withEmptyProcessState(invalidScenario.name) { -// deployProcess(invalidScenario.name) ~> check { -// responseAs[String] shouldBe "Cannot deploy invalid scenario" -// status shouldBe StatusCodes.Conflict -// } -// getProcess(invalidScenario.name) ~> check { -// decodeDetails.state.value.status shouldEqual SimpleStateStatus.NotDeployed -// } -// } -// } -// -// test("should return failure for not validating deployment") { -// val largeParallelismScenario = ProcessTestData.sampleScenario.copy(metaData = -// MetaData( -// ProcessTestData.sampleScenario.name.value, -// StreamMetaData(parallelism = Some(MockDeploymentManager.maxParallelism + 1)) -// ) -// ) -// saveCanonicalProcessAndAssertSuccess(largeParallelismScenario) -// -// deploymentManager.withFailingDeployment(largeParallelismScenario.name) { -// deployProcess(largeParallelismScenario.name) ~> check { -// status shouldBe StatusCodes.BadRequest -// responseAs[String] shouldBe "Parallelism too large" -// } -// } -// } -// -// test("return from deploy before deployment manager proceeds") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// -// deploymentManager.withWaitForDeployFinish(ProcessTestData.sampleScenario.name) { -// deployProcess(ProcessTestData.sampleScenario.name) ~> check { -// status shouldBe StatusCodes.OK -// } -// } -// } -// -// test("snapshots process") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { -// snapshot(ProcessTestData.sampleScenario.name) ~> check { -// status shouldBe StatusCodes.OK -// responseAs[String] shouldBe MockDeploymentManager.savepointPath -// } -// } -// } -// -// test("stops process") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { -// stop(ProcessTestData.sampleScenario.name) ~> check { -// status shouldBe StatusCodes.OK -// responseAs[String] shouldBe MockDeploymentManager.stopSavepointPath -// } -// } -// } -// -// test("return test results") { -// val testDataContent = -// """{"sourceId":"startProcess","record":"ala"} -// |{"sourceId":"startProcess","record":"bela"}""".stripMargin -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// -// testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { -// -// status shouldEqual StatusCodes.OK -// -// val ctx = responseAs[Json].hcursor -// .downField("results") -// .downField("nodeResults") -// .downField("endsuffix") -// .downArray -// .downField("variables") -// -// ctx -// .downField("output") -// .downField("pretty") -// .downField("message") -// .focus shouldBe Some(Json.fromString("message")) -// -// ctx -// .downField("input") -// .downField("pretty") -// .downField("firstField") -// .focus shouldBe Some(Json.fromString("ala")) -// } -// } -// -// test("return test results of errors, including null") { -// -// import pl.touk.nussknacker.engine.spel.SpelExtension._ -// -// val process = ScenarioBuilder -// .streaming(processName.value) -// .parallelism(1) -// .source("startProcess", "csv-source") -// .filter("input", "new java.math.BigDecimal(null) == 0".spel) -// .emptySink( -// "end", -// "kafka-string", -// TopicParamName.value -> "'end.topic'".spel, -// SinkValueParamName.value -> "''".spel -// ) -// val testDataContent = -// """{"sourceId":"startProcess","record":"ala"} -// |"bela"""".stripMargin -// saveCanonicalProcessAndAssertSuccess(process) -// -// testScenario(process, testDataContent) ~> check { -// status shouldEqual StatusCodes.OK -// } -// } -// -// test("refuses to test if too much data") { -// -// import pl.touk.nussknacker.engine.spel.SpelExtension._ -// -// val process = { -// ScenarioBuilder -// .streaming(processName.value) -// .parallelism(1) -// .source("startProcess", "csv-source") -// .emptySink("end", "kafka-string", TopicParamName.value -> "'end.topic'".spel) -// } -// saveCanonicalProcessAndAssertSuccess(process) -// val tooLargeTestDataContentList = List((1 to 50).mkString("\n"), (1 to 50000).mkString("-")) -// -// tooLargeTestDataContentList.foreach { tooLargeData => -// testScenario(process, tooLargeData) ~> check { -// status shouldEqual StatusCodes.BadRequest -// } -// } -// } -// -// test("rejects test record with non-existing source") { -// saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) -// val testDataContent = -// """{"sourceId":"startProcess","record":"ala"} -// |{"sourceId":"unknown","record":"bela"}""".stripMargin -// -// testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { -// status shouldEqual StatusCodes.BadRequest -// responseAs[String] shouldBe "Record 2 - scenario does not have source id: 'unknown'" -// } -// } -// -// test("execute valid custom action") { -// createEmptyProcess(ProcessTestData.sampleProcessName) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("hello")) -// ) ~> check { -// status shouldBe StatusCodes.OK -// responseAs[CustomActionResponse] shouldBe CustomActionResponse(isSuccess = true, msg = "Hi") -// } -// } -// -// test("execute non existing custom action") { -// createEmptyProcess(ProcessTestData.sampleProcessName) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("non-existing")) -// ) ~> check { -// status shouldBe StatusCodes.NotFound -// responseAs[ -// String -// ] shouldBe s"Couldn't find definition of action non-existing for scenario ${ProcessTestData.sampleProcessName}" -// } -// } -// -// test("execute not implemented custom action") { -// createEmptyProcess(ProcessTestData.sampleProcessName) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("not-implemented")) -// ) ~> check { -// status shouldBe StatusCodes.NotImplemented -// responseAs[String] shouldBe "an implementation is missing" -// } -// } -// -// test("execute custom action with not allowed process status") { -// createEmptyProcess(ProcessTestData.sampleProcessName) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("invalid-status")) -// ) ~> check { -// // TODO: "conflict" is coherrent with "canceled process can't be canceled again" above, consider changing to Forbidden -// status shouldBe StatusCodes.Conflict -// responseAs[String] shouldBe "Action: invalid-status is not allowed in scenario (fooProcess) state: NOT_DEPLOYED, allowed actions: not-implemented,ARCHIVE,DEPLOY,RENAME,hello." -// } -// } -// -// test("should return 403 when execute custom action on archived process") { -// createArchivedProcess(ProcessTestData.sampleProcessName) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("hello")) -// ) ~> check { -// // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden -// status shouldBe StatusCodes.Conflict -// } -// } -// -// test("should return 403 when execute custom action on fragment") { -// createEmptyProcess(ProcessTestData.sampleProcessName, isFragment = true) -// customAction( -// ProcessTestData.sampleProcessName, -// CustomActionRequest(ScenarioActionName("hello")) -// ) ~> check { -// // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden -// status shouldBe StatusCodes.Conflict -// } -// } -// -// def decodeDetails: ScenarioWithDetails = responseAs[ScenarioWithDetails] -// -// def checkThatEventually[T](body: => T): RouteTestResult => T = check(eventually(body)) -// -//} +package pl.touk.nussknacker.ui.api + +import akka.http.scaladsl.model.{ContentTypeRange, StatusCodes} +import akka.http.scaladsl.server +import akka.http.scaladsl.testkit.ScalatestRouteTest +import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} +import cats.instances.all._ +import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport +import io.circe.Json +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.BeMatcher +import org.scalatest.matchers.should.Matchers +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, OptionValues} +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +import pl.touk.nussknacker.engine.api.deployment.{ProcessAction, ScenarioActionName, ScenarioActivity} +import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId} +import pl.touk.nussknacker.engine.api.{MetaData, StreamMetaData} +import pl.touk.nussknacker.engine.build.ScenarioBuilder +import pl.touk.nussknacker.engine.kafka.KafkaFactory +import pl.touk.nussknacker.engine.spel.SpelExtension._ +import pl.touk.nussknacker.restmodel.scenariodetails._ +import pl.touk.nussknacker.restmodel.{CustomActionRequest, CustomActionResponse} +import pl.touk.nussknacker.security.Permission +import pl.touk.nussknacker.test.PatientScalaFutures +import pl.touk.nussknacker.test.base.it.NuResourcesTest +import pl.touk.nussknacker.test.mock.MockDeploymentManager +import pl.touk.nussknacker.test.utils.domain.TestFactory.{withAllPermissions, withPermissions} +import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory} +import pl.touk.nussknacker.ui.api.description.scenarioActivity.Dtos +import pl.touk.nussknacker.ui.process.ScenarioQuery +import pl.touk.nussknacker.ui.process.exception.ProcessIllegalAction + +// TODO: all these tests should be migrated to ManagementApiHttpServiceBusinessSpec or ManagementApiHttpServiceSecuritySpec +class ManagementResourcesSpec + extends AnyFunSuite + with ScalatestRouteTest + with FailFastCirceSupport + with Matchers + with PatientScalaFutures + with OptionValues + with BeforeAndAfterEach + with BeforeAndAfterAll + with NuResourcesTest { + + import KafkaFactory._ + + private implicit final val string: FromEntityUnmarshaller[String] = + Unmarshaller.stringUnmarshaller.forContentTypes(ContentTypeRange.*) + + private val processName: ProcessName = ProcessTestData.sampleScenario.name + + private def deployedWithVersions(versionId: Long): BeMatcher[Option[ProcessAction]] = { + BeMatcher[(ScenarioActionName, VersionId)](equal((ScenarioActionName.Deploy, VersionId(versionId)))) + .compose[ProcessAction](a => (a.actionName, a.processVersionId)) + .compose[Option[ProcessAction]](opt => opt.value) + } + + test("process deployment should be visible in process history") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deployProcess(processName) ~> checkThatEventually { + status shouldBe StatusCodes.OK + getProcess(processName) ~> check { + decodeDetails.lastStateAction shouldBe deployedWithVersions(2) + updateCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deployProcess(processName) ~> checkThatEventually { + getProcess(processName) ~> check { + decodeDetails.lastStateAction shouldBe deployedWithVersions(2) + } + } + } + } + } + + test("process during deploy cannot be deployed again") { + createDeployedExampleScenario(processName) + + deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.DuringDeploy) { + deployProcess(processName) ~> check { + status shouldBe StatusCodes.Conflict + } + } + } + + test("canceled process can't be canceled again") { + createDeployedCanceledExampleScenario(processName) + + deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { + cancelProcess(processName) ~> check { + status shouldBe StatusCodes.Conflict + } + } + } + + test("can't deploy archived process") { + createArchivedProcess(processName) + + deploymentManager.withProcessStateStatus(processName, SimpleStateStatus.Canceled) { + deployProcess(processName) ~> check { + status shouldBe StatusCodes.Conflict + responseAs[String] shouldBe ProcessIllegalAction + .archived(ScenarioActionName.Deploy, processName) + .message + } + } + } + + test("can't deploy fragment") { + createValidProcess(processName, isFragment = true) + + deployProcess(processName) ~> check { + status shouldBe StatusCodes.Conflict + responseAs[String] shouldBe ProcessIllegalAction + .fragment(ScenarioActionName.Deploy, processName) + .message + } + } + + test("can't cancel fragment") { + createValidProcess(processName, isFragment = true) + + deployProcess(processName) ~> check { + status shouldBe StatusCodes.Conflict + responseAs[String] shouldBe ProcessIllegalAction + .fragment(ScenarioActionName.Deploy, processName) + .message + } + } + + test("deploys and cancels with comment") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deployProcess( + ProcessTestData.sampleScenario.name, + comment = Some("deployComment") + ) ~> checkThatEventually { + getProcess(processName) ~> check { + val processDetails = responseAs[ScenarioWithDetails] + processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true + } + cancelProcess( + ProcessTestData.sampleScenario.name, + comment = Some("cancelComment") + ) ~> check { + status shouldBe StatusCodes.OK + // TODO: remove Deployment:, Stop: after adding custom icons + val expectedDeployComment = "deployComment" + val expectedStopComment = "cancelComment" + val expectedDeployCommentInLegacyService = s"Deployment: $expectedDeployComment" + val expectedStopCommentInLegacyService = s"Stop: $expectedStopComment" + getActivity(ProcessTestData.sampleScenario.name) ~> check { + val comments = responseAs[Dtos.Legacy.ProcessActivity].comments.sortBy(_.id) + comments.map(_.content) shouldBe List( + expectedDeployCommentInLegacyService, + expectedStopCommentInLegacyService + ) + val firstCommentId :: secondCommentId :: Nil = comments.map(_.id) + + Get(s"/processes/${ProcessTestData.sampleScenario.name}/deployments") ~> withAllPermissions( + processesRoute + ) ~> check { + val deploymentHistory = responseAs[List[ProcessAction]] + deploymentHistory.map(a => + (a.processVersionId, a.user, a.actionName, a.commentId, a.comment, a.buildInfo) + ) shouldBe List( + ( + VersionId(2), + TestFactory.user().username, + ScenarioActionName.Cancel, + Some(secondCommentId), + Some(expectedStopComment), + Map() + ), + ( + VersionId(2), + TestFactory.user().username, + ScenarioActionName.Deploy, + Some(firstCommentId), + Some(expectedDeployComment), + TestFactory.buildInfo + ) + ) + } + } + } + } + } + + test("deploy technical process and mark it as deployed") { + createValidProcess(processName) + + deployProcess(processName) ~> checkThatEventually { + status shouldBe StatusCodes.OK + getProcess(processName) ~> check { + val processDetails = responseAs[ScenarioWithDetails] + processDetails.lastStateAction shouldBe deployedWithVersions(1) + processDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Deploy) shouldBe true + } + } + } + + test("recognize process cancel in deployment list") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { + status shouldBe StatusCodes.OK + getProcess(processName) ~> check { + decodeDetails.lastStateAction shouldBe deployedWithVersions(2) + cancelProcess(ProcessTestData.sampleScenario.name) ~> check { + getProcess(processName) ~> check { + decodeDetails.lastStateAction.exists(_.actionName == ScenarioActionName.Cancel) shouldBe true + } + } + } + } + } + + test("recognize process deploy and cancel in global process list") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deployProcess(ProcessTestData.sampleScenario.name) ~> checkThatEventually { + status shouldBe StatusCodes.OK + + forScenariosReturned(ScenarioQuery.empty) { processes => + val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head + process.lastActionVersionId shouldBe Some(2L) + process.isDeployed shouldBe true + + cancelProcess(ProcessTestData.sampleScenario.name) ~> check { + forScenariosReturned(ScenarioQuery.empty) { processes => + val process = processes.find(_.name == ProcessTestData.sampleScenario.name.value).head + process.lastActionVersionId shouldBe Some(2L) + process.isCanceled shouldBe true + } + } + } + } + } + + test("not authorize user with write permission to deploy") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + Post(s"/processManagement/deploy/${ProcessTestData.sampleScenario.name}") ~> withPermissions( + deployRoute(), + Permission.Write + ) ~> check { + rejection shouldBe server.AuthorizationFailedRejection + } + } + + test("should allow deployment of scenario with warning") { + val processWithDisabledFilter = ScenarioBuilder + .streaming(processName.value) + .parallelism(1) + .source("startProcess", "csv-source") + .filter("input", "#input != null".spel, Some(true)) + .emptySink( + "end", + "kafka-string", + TopicParamName.value -> "'end.topic'".spel, + SinkValueParamName.value -> "#input".spel + ) + + saveCanonicalProcessAndAssertSuccess(processWithDisabledFilter) + deployProcess(processName) ~> check { + status shouldBe StatusCodes.OK + } + } + + test("should return failure for not validating scenario") { + val invalidScenario = ScenarioBuilder + .streaming(processName.value) + .parallelism(1) + .source("start", "not existing") + .emptySink( + "end", + "kafka-string", + TopicParamName.value -> "'end.topic'".spel, + SinkValueParamName.value -> "#output".spel + ) + saveCanonicalProcessAndAssertSuccess(invalidScenario) + + deploymentManager.withEmptyProcessState(invalidScenario.name) { + deployProcess(invalidScenario.name) ~> check { + responseAs[String] shouldBe "Cannot deploy invalid scenario" + status shouldBe StatusCodes.Conflict + } + getProcess(invalidScenario.name) ~> check { + decodeDetails.state.value.status shouldEqual SimpleStateStatus.NotDeployed + } + } + } + + test("should return failure for not validating deployment") { + val largeParallelismScenario = ProcessTestData.sampleScenario.copy(metaData = + MetaData( + ProcessTestData.sampleScenario.name.value, + StreamMetaData(parallelism = Some(MockDeploymentManager.maxParallelism + 1)) + ) + ) + saveCanonicalProcessAndAssertSuccess(largeParallelismScenario) + + deploymentManager.withFailingDeployment(largeParallelismScenario.name) { + deployProcess(largeParallelismScenario.name) ~> check { + status shouldBe StatusCodes.BadRequest + responseAs[String] shouldBe "Parallelism too large" + } + } + } + + test("return from deploy before deployment manager proceeds") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + + deploymentManager.withWaitForDeployFinish(ProcessTestData.sampleScenario.name) { + deployProcess(ProcessTestData.sampleScenario.name) ~> check { + status shouldBe StatusCodes.OK + } + } + } + + test("snapshots process") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { + snapshot(ProcessTestData.sampleScenario.name) ~> check { + status shouldBe StatusCodes.OK + responseAs[String] shouldBe MockDeploymentManager.savepointPath + } + } + } + + test("stops process") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + deploymentManager.withProcessRunning(ProcessTestData.sampleScenario.name) { + stop(ProcessTestData.sampleScenario.name) ~> check { + status shouldBe StatusCodes.OK + responseAs[String] shouldBe MockDeploymentManager.stopSavepointPath + } + } + } + + test("return test results") { + val testDataContent = + """{"sourceId":"startProcess","record":"ala"} + |{"sourceId":"startProcess","record":"bela"}""".stripMargin + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + + testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { + + status shouldEqual StatusCodes.OK + + val ctx = responseAs[Json].hcursor + .downField("results") + .downField("nodeResults") + .downField("endsuffix") + .downArray + .downField("variables") + + ctx + .downField("output") + .downField("pretty") + .downField("message") + .focus shouldBe Some(Json.fromString("message")) + + ctx + .downField("input") + .downField("pretty") + .downField("firstField") + .focus shouldBe Some(Json.fromString("ala")) + } + } + + test("return test results of errors, including null") { + + import pl.touk.nussknacker.engine.spel.SpelExtension._ + + val process = ScenarioBuilder + .streaming(processName.value) + .parallelism(1) + .source("startProcess", "csv-source") + .filter("input", "new java.math.BigDecimal(null) == 0".spel) + .emptySink( + "end", + "kafka-string", + TopicParamName.value -> "'end.topic'".spel, + SinkValueParamName.value -> "''".spel + ) + val testDataContent = + """{"sourceId":"startProcess","record":"ala"} + |"bela"""".stripMargin + saveCanonicalProcessAndAssertSuccess(process) + + testScenario(process, testDataContent) ~> check { + status shouldEqual StatusCodes.OK + } + } + + test("refuses to test if too much data") { + + import pl.touk.nussknacker.engine.spel.SpelExtension._ + + val process = { + ScenarioBuilder + .streaming(processName.value) + .parallelism(1) + .source("startProcess", "csv-source") + .emptySink("end", "kafka-string", TopicParamName.value -> "'end.topic'".spel) + } + saveCanonicalProcessAndAssertSuccess(process) + val tooLargeTestDataContentList = List((1 to 50).mkString("\n"), (1 to 50000).mkString("-")) + + tooLargeTestDataContentList.foreach { tooLargeData => + testScenario(process, tooLargeData) ~> check { + status shouldEqual StatusCodes.BadRequest + } + } + } + + test("rejects test record with non-existing source") { + saveCanonicalProcessAndAssertSuccess(ProcessTestData.sampleScenario) + val testDataContent = + """{"sourceId":"startProcess","record":"ala"} + |{"sourceId":"unknown","record":"bela"}""".stripMargin + + testScenario(ProcessTestData.sampleScenario, testDataContent) ~> check { + status shouldEqual StatusCodes.BadRequest + responseAs[String] shouldBe "Record 2 - scenario does not have source id: 'unknown'" + } + } + + test("execute valid custom action") { + createEmptyProcess(ProcessTestData.sampleProcessName) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("hello")) + ) ~> check { + status shouldBe StatusCodes.OK + responseAs[CustomActionResponse] shouldBe CustomActionResponse(isSuccess = true, msg = "Hi") + } + } + + test("execute non existing custom action") { + createEmptyProcess(ProcessTestData.sampleProcessName) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("non-existing")) + ) ~> check { + status shouldBe StatusCodes.NotFound + responseAs[ + String + ] shouldBe s"Couldn't find definition of action non-existing for scenario ${ProcessTestData.sampleProcessName}" + } + } + + test("execute not implemented custom action") { + createEmptyProcess(ProcessTestData.sampleProcessName) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("not-implemented")) + ) ~> check { + status shouldBe StatusCodes.NotImplemented + responseAs[String] shouldBe "an implementation is missing" + } + } + + test("execute custom action with not allowed process status") { + createEmptyProcess(ProcessTestData.sampleProcessName) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("invalid-status")) + ) ~> check { + // TODO: "conflict" is coherrent with "canceled process can't be canceled again" above, consider changing to Forbidden + status shouldBe StatusCodes.Conflict + responseAs[String] shouldBe "Action: invalid-status is not allowed in scenario (fooProcess) state: NOT_DEPLOYED, allowed actions: not-implemented,ARCHIVE,DEPLOY,RENAME,hello." + } + } + + test("should return 403 when execute custom action on archived process") { + createArchivedProcess(ProcessTestData.sampleProcessName) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("hello")) + ) ~> check { + // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden + status shouldBe StatusCodes.Conflict + } + } + + test("should return 403 when execute custom action on fragment") { + createEmptyProcess(ProcessTestData.sampleProcessName, isFragment = true) + customAction( + ProcessTestData.sampleProcessName, + CustomActionRequest(ScenarioActionName("hello")) + ) ~> check { + // TODO: "conflict" is coherrent with "can't deploy fragment" above, consider changing to Forbidden + status shouldBe StatusCodes.Conflict + } + } + + def decodeDetails: ScenarioWithDetails = responseAs[ScenarioWithDetails] + + def checkThatEventually[T](body: => T): RouteTestResult => T = check(eventually(body)) + +} diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala index f6e9b1a5178..5631ea406fc 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/integration/ConfigurationTest.scala @@ -19,8 +19,7 @@ class ConfigurationTest extends AnyFunSuite with Matchers { private def modelData: ModelData = ModelData( ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig), - TestFactory.modelDependencies, - ??? + TestFactory.modelDependencies ) private lazy val modelDataConfig = modelData.modelConfig diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala index 1aba663121e..c0ed55ff9d5 100644 --- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala +++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala @@ -7,8 +7,7 @@ import pl.touk.nussknacker.engine.api.deployment.{ DeploymentManager, NoOpScenarioActivityManager, ProcessingTypeActionServiceStub, - ProcessingTypeDeployedScenariosProviderStub, - ScenarioActivityManager + ProcessingTypeDeployedScenariosProviderStub } import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode import pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProvider @@ -35,8 +34,7 @@ object FlinkStreamingDeploymentManagerProviderHelper { workingDirectoryOpt = None, _ => true, ComponentDefinitionExtractionMode.FinalDefinition - ), - ??? + ) ) val actorSystem = ActorSystem("FlinkStreamingDeploymentManagerProviderHelper") val backend = AsyncHttpClientFutureBackend.usingConfig(new DefaultAsyncHttpClientConfig.Builder().build()) diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala index dde02281551..5872b331cb7 100644 --- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala +++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala @@ -271,8 +271,7 @@ class FlinkStreamingDeploymentManagerSpec extends AnyFunSuite with Matchers with workingDirectoryOpt = None, _ => true, ComponentDefinitionExtractionMode.FinalDefinition - ), - ??? + ) ) val definition = modelData.modelDefinition definition.components.components.map(_.id) should contain(ComponentId(ComponentType.Service, "accountService")) diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala index 56cc9784ffb..cde680cadd4 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala @@ -42,11 +42,9 @@ object ModelData extends LazyLogging { def apply( processingTypeConfig: ProcessingTypeConfig, - dependencies: ModelDependencies, - managersDir: Path + dependencies: ModelDependencies ): ModelData = { - val modelClassLoader = - ModelClassLoader(managersDir, processingTypeConfig.classPath, dependencies.workingDirectoryOpt) + val modelClassLoader = ModelClassLoader(processingTypeConfig.classPath, dependencies.workingDirectoryOpt) ClassLoaderModelData( _.resolveInputConfigDuringExecution(processingTypeConfig.modelConfig, modelClassLoader.classLoader), modelClassLoader, diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala index e2264e67e9d..58b78a73981 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/extension/CastOrConversionExt.scala @@ -25,7 +25,7 @@ import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneId, ZoneOffset} import java.util.{Currency, UUID} import scala.util.Try -// todo: lbg - add casting methods to UTIL +// TODO: lbg - add casting methods to UTIL class CastOrConversionExt(classesBySimpleName: Map[String, Class[_]]) { private val castException = new ClassCastException(s"Cannot cast value to given class") diff --git a/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala b/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala index acb539b703c..dfb0f530d80 100644 --- a/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala +++ b/utils/schemed-kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/schemedkafka/schemaregistry/universal/UniversalSchemaBasedSerdeProvider.scala @@ -43,7 +43,7 @@ object UniversalSchemaBasedSerdeProvider { createSchemaIdFromMessageExtractor(isConfluent, isAzure) } - // SchemaId can be obtain in several ways. Precedent: + // SchemaId can be obtained in several ways. Precedent: // * from nu kafka headers - it is our own, Nussknacker headers standard format: key.schemaId and value.schemaId headers // * from azure header - content-type: avro/binary+schemaId (only value schema ids are supported) // * from payload serialized in 'Confluent way' ([magicbyte][schemaid][payload]) diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index 4a6abb77ad1..2e81ca5710d 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -63,14 +63,11 @@ object ModelClassLoader extends LazyLogging { // workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229 def apply( - managersDir: Path, urls: List[String], workingDirectoryOpt: Option[Path], jarExtension: String = defaultJarExtension ): ModelClassLoader = { - val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension).toList ::: - managersDir.toUri.toURL.expandFiles(".jar") - + val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension).toList ModelClassLoader( new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), postProcessedURLs.toList diff --git a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala index 6aff9c5ca4d..b6caffde94d 100644 --- a/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala +++ b/utils/utils-internal/src/test/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoaderSpec.scala @@ -14,7 +14,7 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers { val urls = List(resource(""), nonFileUrl) - val loader = ModelClassLoader(???, urls.map(_.toURI.toString), workingDirectoryOpt = None, jarExtension = ".jara") + val loader = ModelClassLoader(urls.map(_.toURI.toString), workingDirectoryOpt = None, jarExtension = ".jara") // we're not using .jar to avoid messing with .gitignore val expected = Set( @@ -31,7 +31,6 @@ class ModelClassLoaderSpec extends AnyFunSuite with Matchers { test("should resolve classpath using working directory when defined") { val loader = ModelClassLoader( - ???, List("relative/path", "/absolute/path"), workingDirectoryOpt = Some(Path.of("/some/working/directory")) ) From 679ba89a911c47022896c3e148e494e06ad19ad3 Mon Sep 17 00:00:00 2001 From: MK Software Date: Fri, 13 Dec 2024 17:36:22 +0100 Subject: [PATCH 07/73] wip --- .../test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala index e79c9aad00a..1170543d034 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala @@ -43,7 +43,7 @@ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with Wit designerConfig .withValue("db", testDbConfig.getConfig("db").root()) .withValue("http.port", fromAnyRef(port)) - .withValue("managersDir", fromAnyRef("designer/server/work/managers")) + .withValue("managersDir", fromAnyRef("designer/server/work")) } } From bad1538594ca8a0ce0468e93398ec8294d5ffbb4 Mon Sep 17 00:00:00 2001 From: MK Software Date: Fri, 13 Dec 2024 22:28:14 +0100 Subject: [PATCH 08/73] wip --- build.sbt | 48 +++++++++---------- .../main/resources/defaultDesignerConfig.conf | 4 +- .../nussknacker/ui/NussknackerConfig.scala | 21 +++++--- ...sConfigBasedProcessingTypeDataLoader.scala | 8 ++-- .../resources/config/common-designer.conf | 6 +++ .../nussknacker/test/base/it/NuItTest.scala | 1 - .../src/universal/conf/dev-application.conf | 2 + .../ConfigWithUnresolvedVersionExt.scala | 4 +- 8 files changed, 54 insertions(+), 40 deletions(-) diff --git a/build.sbt b/build.sbt index e9eb33dc08a..4b57151bbda 100644 --- a/build.sbt +++ b/build.sbt @@ -628,15 +628,15 @@ lazy val flinkDeploymentManager = (project in flink("management")) IntegrationTest / parallelExecution := false, libraryDependencies ++= { Seq( - "org.typelevel" %% "cats-core" % catsV % Provided, - "org.apache.flink" % "flink-streaming-java" % flinkV excludeAll ( - ExclusionRule("log4j", "log4j"), - ExclusionRule("org.slf4j", "slf4j-log4j12"), - ExclusionRule("com.esotericsoftware", "kryo-shaded"), - ), - "com.softwaremill.retry" %% "retry" % retryV, - "org.wiremock" % "wiremock" % wireMockV % Test, - "org.scalatestplus" %% "mockito-5-10" % scalaTestPlusV % Test, + "org.typelevel" %% "cats-core" % catsV % Provided, + "org.apache.flink" % "flink-streaming-java" % flinkV excludeAll ( + ExclusionRule("log4j", "log4j"), + ExclusionRule("org.slf4j", "slf4j-log4j12"), + ExclusionRule("com.esotericsoftware", "kryo-shaded"), + ), + "com.softwaremill.retry" %% "retry" % retryV, + "org.wiremock" % "wiremock" % wireMockV % Test, + "org.scalatestplus" %% "mockito-5-10" % scalaTestPlusV % Test, ) ++ flinkLibScalaDeps(scalaVersion.value) }, // override scala-collection-compat from com.softwaremill.retry:retry @@ -1947,6 +1947,8 @@ lazy val deploymentManagerApi = (project in file("designer/deployment-manager-ap ) .dependsOn(extensionsApi, testUtils % Test) +lazy val prepareDesignerTests = taskKey[Unit]("Prepare all necessary artifacts before running designer module tests") + lazy val designer = (project in file("designer/server")) .configs(SlowTests) .enablePlugins(GenerateDesignerOpenApiPlugin) @@ -1977,22 +1979,20 @@ lazy val designer = (project in file("designer/server")) CopyOptions.apply(overwrite = true, preserveLastModified = true, preserveExecutable = false) ) }, + prepareDesignerTests := { + (flinkDeploymentManager / assembly).value + (liteEmbeddedDeploymentManager / assembly).value + (liteK8sDeploymentManager / assembly).value + (defaultModel / assembly).value + (flinkTableApiComponents / assembly).value + (flinkDevModel / assembly).value + (flinkExecutor / assembly).value + (flinkExecutor / prepareItLibs).value + }, ThisBuild / parallelExecution := false, - SlowTests / test := (SlowTests / test) - .dependsOn( - flinkDevModel / Compile / assembly, - flinkExecutor / Compile / assembly - ) - .value, - Test / test := (Test / test) - .dependsOn( - defaultModel / Compile / assembly, - flinkTableApiComponents / Compile / assembly, - flinkDevModel / Compile / assembly, - flinkExecutor / Compile / assembly, - flinkExecutor / prepareItLibs - ) - .value, + SlowTests / test := (SlowTests / test).dependsOn(prepareDesignerTests).value, + Test / test := (Test / test).dependsOn(prepareDesignerTests).value, + Test / testOptions += Tests.Setup(() => prepareDesignerTests.value), // todo: /* We depend on copyClientDist in packageBin and assembly to be make sure FE files will be included in jar and fajar diff --git a/designer/server/src/main/resources/defaultDesignerConfig.conf b/designer/server/src/main/resources/defaultDesignerConfig.conf index 123c4ffa792..bc94ee04729 100644 --- a/designer/server/src/main/resources/defaultDesignerConfig.conf +++ b/designer/server/src/main/resources/defaultDesignerConfig.conf @@ -2,8 +2,8 @@ # This configuration file contains sensible designer defaults for all Nussknacker deployments, without assumptions about deployment # models and external tools (grafana, flink etc.). All models configurations also shouldn't be in this file -managersDir: ./managers -managersDir: ${?MANAGERS_DIR} +managersDirs: ["./managers"] +managersDirs: [ ${?MANAGERS_DIR} ] storageDir: ./storage storageDir: ${?STORAGE_DIR} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala index 50b72ae62b7..72b40955662 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerConfig.scala @@ -23,16 +23,23 @@ trait NussknackerConfig { } } - final def managersDir(): IO[Path] = { + final def managersDirs(): IO[List[Path]] = { loadApplicationConfig() .map { config => - config.readSafeString("managersDir") match { - case Some(managersDirStr) => - val managersDir = Paths.get(managersDirStr.convertToURL().toURI) - if (Files.isDirectory(managersDir)) managersDir - else throw ConfigurationMalformedException(s"No '$managersDirStr' directory found") + config.readStringList("managersDirs") match { + case Some(managersDirs) => + val paths = managersDirs.map(_.convertToURL().toURI).map(Paths.get) + val invalidPaths = paths + .map(p => (p, !Files.isDirectory(p))) + .collect { case (p, true) => p } + + if (invalidPaths.isEmpty) paths + else + throw ConfigurationMalformedException( + s"Cannot find the following directories: ${invalidPaths.mkString(", ")}" + ) case None => - throw ConfigurationMalformedException(s"No 'managersDir' configuration path found") + throw ConfigurationMalformedException(s"No 'managersDirs' configuration path found") } } } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 7e2bf3305a5..88d095a0e44 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -24,14 +24,14 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies, ): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]] = { for { - managersDir <- config.managersDir() + managersDirs <- config.managersDirs() processingTypesConfig <- config.loadProcessingTypeConfigs() } yield { // This step with splitting DeploymentManagerProvider loading for all processing types // and after that creating ProcessingTypeData is done because of the deduplication of deployments // See DeploymentManagerProvider.engineSetupIdentity val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => - val provider = createDeploymentManagerProvider(processingTypeConfig, managersDir) + val provider = createDeploymentManagerProvider(processingTypeConfig, managersDirs) val nameInputData = EngineNameInputData( provider.defaultEngineSetupName, provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), @@ -73,10 +73,10 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf private def createDeploymentManagerProvider( typeConfig: ProcessingTypeConfig, - managersDir: Path, + managersDirs: List[Path], ): DeploymentManagerProvider = { val managersClassLoader = - new URLClassLoader(managersDir.toUri.toURL.expandFiles(".jar"), this.getClass.getClassLoader) + new URLClassLoader(managersDirs.flatMap(_.toUri.toURL.expandFiles(".jar")), this.getClass.getClassLoader) ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( typeConfig.deploymentManagerType, managersClassLoader diff --git a/designer/server/src/test/resources/config/common-designer.conf b/designer/server/src/test/resources/config/common-designer.conf index 508a1300a82..f44971cae0e 100644 --- a/designer/server/src/test/resources/config/common-designer.conf +++ b/designer/server/src/test/resources/config/common-designer.conf @@ -79,3 +79,9 @@ usageStatisticsReports { } repositoryGaugesCacheDuration: 10 seconds + +managersDirs: [ + "engine/lite/embeddedDeploymentManager/target/scala-"${scala.major.version}"/", + "engine/lite/k8sDeploymentManager/target/scala-"${scala.major.version}"/", + "engine/flink/management/target/scala-"${scala.major.version}"/" +] diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala index 1170543d034..49ec79dc4b8 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala @@ -43,7 +43,6 @@ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with Wit designerConfig .withValue("db", testDbConfig.getConfig("db").root()) .withValue("http.port", fromAnyRef(port)) - .withValue("managersDir", fromAnyRef("designer/server/work")) } } diff --git a/nussknacker-dist/src/universal/conf/dev-application.conf b/nussknacker-dist/src/universal/conf/dev-application.conf index ddf3c4c27aa..59426c7b65f 100644 --- a/nussknacker-dist/src/universal/conf/dev-application.conf +++ b/nussknacker-dist/src/universal/conf/dev-application.conf @@ -56,6 +56,8 @@ metricsConfig { fragmentPropertiesDocsUrl: "https://nussknacker.io/documentation/docs/scenarios_authoring/Fragments/" +managersDirs: [ "designer/server/work" ] + scenarioTypes { "streaming": { deploymentConfig: ${flinkDeploymentConfig} diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala index 200e998633d..bee98da5e15 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/config/ConfigWithUnresolvedVersionExt.scala @@ -24,9 +24,9 @@ class ConfigWithUnresolvedVersionExt(val config: ConfigWithUnresolvedVersion) { } } - def readSafeString(path: String): Option[String] = { + def readStringList(path: String): Option[List[String]] = { if (config.resolved.hasPath(path)) { - Some(config.resolved.getString(path)) + Some(config.resolved.getStringList(path).asScala.toList) } else { None } From 078ae59564eca638cd057adcae4bc8dba3d3f304 Mon Sep 17 00:00:00 2001 From: MK Software Date: Sat, 14 Dec 2024 10:09:04 +0100 Subject: [PATCH 09/73] cypress tests fix --- designer/client/docker-compose.yml | 1 + nussknacker-dist/src/universal/conf/dev-application.conf | 1 + 2 files changed, 2 insertions(+) diff --git a/designer/client/docker-compose.yml b/designer/client/docker-compose.yml index 95c5739190d..a78c49978c2 100644 --- a/designer/client/docker-compose.yml +++ b/designer/client/docker-compose.yml @@ -11,6 +11,7 @@ services: KAFKA_AUTO_OFFSET_RESET: "earliest" SCHEMA_REGISTRY_URL: http://redpanda:8081 CONFIG_FILE: /opt/nussknacker/conf/dev-application.conf + MANAGERS_DIR: /opt/nussknacker/managers CONFIG_FORCE_usageStatisticsReports_enabled: "false" CONFIG_FORCE_scenarioTypes_streaming__dev_deploymentConfig_type: stub ports: diff --git a/nussknacker-dist/src/universal/conf/dev-application.conf b/nussknacker-dist/src/universal/conf/dev-application.conf index 59426c7b65f..6a909ddce1f 100644 --- a/nussknacker-dist/src/universal/conf/dev-application.conf +++ b/nussknacker-dist/src/universal/conf/dev-application.conf @@ -57,6 +57,7 @@ metricsConfig { fragmentPropertiesDocsUrl: "https://nussknacker.io/documentation/docs/scenarios_authoring/Fragments/" managersDirs: [ "designer/server/work" ] +managersDirs: [ ${?MANAGERS_DIR} ] scenarioTypes { "streaming": { From a9e6b27ca3b261da7a794ded73ec46064cef2730 Mon Sep 17 00:00:00 2001 From: MK Software Date: Sat, 14 Dec 2024 18:57:49 +0100 Subject: [PATCH 10/73] fix --- .../ProcessingTypesConfigBasedProcessingTypeDataLoader.scala | 3 +++ docs/installation/Binaries.md | 2 +- nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh | 1 + nussknacker-dist/src/universal/bin/run.sh | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index 88d095a0e44..a0bfd856a06 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -75,6 +75,9 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf typeConfig: ProcessingTypeConfig, managersDirs: List[Path], ): DeploymentManagerProvider = { + logger.debug( + s"Loading deployment managers from the following locations: ${managersDirs.map(_.toString).mkString(", ")}" + ) val managersClassLoader = new URLClassLoader(managersDirs.flatMap(_.toUri.toURL.expandFiles(".jar")), this.getClass.getClassLoader) ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( diff --git a/docs/installation/Binaries.md b/docs/installation/Binaries.md index 8dba7954a53..62bb79b8853 100644 --- a/docs/installation/Binaries.md +++ b/docs/installation/Binaries.md @@ -49,7 +49,7 @@ We provide following scripts: | $NUSSKNACKER_DIR/model/flinkExecutor.jar | | JAR with Flink executor, used by scenarios running on Flink | | $NUSSKNACKER_DIR/components | | Directory with Nussknacker Component Provider JARS | | $NUSSKNACKER_DIR/lib | | Directory with Nussknacker base libraries | -| $NUSSKNACKER_DIR/managers | | Directory with Nussknacker Deployment Managers | +| $NUSSKNACKER_DIR/managers | Configured by MANAGERS_DIR property | Directory with Nussknacker Deployment Managers | ## Logging diff --git a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh index fc916a2180c..167de644c44 100755 --- a/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh +++ b/nussknacker-dist/src/universal/bin/nussknacker-entrypoint.sh @@ -21,6 +21,7 @@ WORKING_DIR=${WORKING_DIR:-$NUSSKNACKER_DIR} export AUTHENTICATION_USERS_FILE=${AUTHENTICATION_USERS_FILE:-$CONF_DIR/users.conf} export TABLES_DEFINITION_FILE=${TABLES_DEFINITION_FILE:-$CONF_DIR/dev-tables-definition.sql} export STORAGE_DIR="${STORAGE_DIR:-$WORKING_DIR/storage}" +export MANAGERS_DIR="${MANAGERS_DIR:-$WORKING_DIR/managers}" if [ "$PROMETHEUS_METRICS_PORT" == "" ]; then JAVA_PROMETHEUS_OPTS="" diff --git a/nussknacker-dist/src/universal/bin/run.sh b/nussknacker-dist/src/universal/bin/run.sh index 58c7092fee4..656692efc72 100755 --- a/nussknacker-dist/src/universal/bin/run.sh +++ b/nussknacker-dist/src/universal/bin/run.sh @@ -21,6 +21,7 @@ PID_FILE="$WORKING_DIR/nussknacker-designer.pid" export AUTHENTICATION_USERS_FILE=${AUTHENTICATION_USERS_FILE:-$CONF_DIR/users.conf} export TABLES_DEFINITION_FILE=${TABLES_DEFINITION_FILE:-$CONF_DIR/dev-tables-definition.sql} export STORAGE_DIR=${STORAGE_DIR:-$WORKING_DIR/storage} +export MANAGERS_DIR=${MANAGERS_DIR:-$WORKING_DIR/managers} export FLINK_REST_URL=${FLINK_REST_URL:-http://localhost:8081} export KAFKA_ADDRESS=${KAFKA_ADDRESS:-localhost:9092} From 5a060333e9f88e0786155fec38140457034b5d60 Mon Sep 17 00:00:00 2001 From: MK Software Date: Sat, 14 Dec 2024 21:14:00 +0100 Subject: [PATCH 11/73] fix --- .../engine/util/loader/ModelClassLoader.scala | 66 +++++++------------ 1 file changed, 25 insertions(+), 41 deletions(-) diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index 2e81ca5710d..801406ab15e 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -1,10 +1,11 @@ package pl.touk.nussknacker.engine.util.loader import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine.util.UrlUtils.ExpandFiles +import pl.touk.nussknacker.engine.util.StringUtils._ +import pl.touk.nussknacker.engine.util.UrlUtils._ import java.io.File -import java.net.{URI, URL, URLClassLoader} +import java.net.{URL, URLClassLoader} import java.nio.file.Path case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) { @@ -22,44 +23,7 @@ case class ModelClassLoader private (classLoader: ClassLoader, urls: List[URL]) object ModelClassLoader extends LazyLogging { // for e.g. testing in process module val empty: ModelClassLoader = ModelClassLoader(getClass.getClassLoader, List()) - - val defaultJarExtension = ".jar" - - private def expandFiles(urls: Iterable[URL], jarExtension: String): Iterable[URL] = { - urls.flatMap { - case url if url.getProtocol.toLowerCase == "file" => - val file = new File(url.toURI) - if (file.isDirectory) { - val expanded = - expandFiles(file.listFiles().filterNot(_.getName.startsWith(".")).map(_.toURI.toURL), jarExtension) - if (expanded.isEmpty) { - List.empty - } else if (expanded.exists(_.getFile.endsWith(jarExtension))) { // not expand if nested jars not exists - expanded - } else { - List(url) - } - } else { - List(url) - } - case url => List(url) - } - } - - private def convertToURL(urlString: String, workingDirectoryOpt: Option[Path]): URL = { - val uri = new URI(urlString) - if (uri.isAbsolute) { - uri.toURL - } else { - val pathPart = uri.getSchemeSpecificPart - val path = workingDirectoryOpt.map { workingDirectory => - workingDirectory.resolve(pathPart) - } getOrElse { - Path.of(pathPart) - } - path.toUri.toURL - } - } + val defaultJarExtension = ".jar" // workingDirectoryOpt is for the purpose of easier testing. We can't easily change the working directory otherwise - see https://stackoverflow.com/a/840229 def apply( @@ -67,11 +31,31 @@ object ModelClassLoader extends LazyLogging { workingDirectoryOpt: Option[Path], jarExtension: String = defaultJarExtension ): ModelClassLoader = { - val postProcessedURLs = expandFiles(urls.map(convertToURL(_, workingDirectoryOpt)), jarExtension).toList + val postProcessedURLs = validateExistence( + urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension)) + ) ModelClassLoader( new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), postProcessedURLs.toList ) } + private def validateExistence(urls: Iterable[URL]): Iterable[URL] = { + urls.filterNot(url => doesExist(url)).toList match { + case Nil => urls + case notExisted => + throw new IllegalArgumentException(s"The following URLs don't exist: [${notExisted.mkString(",")}]") + } + } + + private def doesExist(url: URL): Boolean = { + url.getProtocol match { + case "file" => + val file = new File(url.toURI) + file.exists() && file.isFile + case _ => + false + } + } + } From b1100617d9af9342a772422f814e93f993c97d41 Mon Sep 17 00:00:00 2001 From: MK Software Date: Sat, 14 Dec 2024 21:42:16 +0100 Subject: [PATCH 12/73] improvements --- .../touk/nussknacker/ui/NussknackerApp.scala | 9 +++--- .../ui/factory/NussknackerAppFactory.scala | 24 +++++++++----- .../DeploymentManagersClassLoader.scala | 31 +++++++++++++++++++ ...sConfigBasedProcessingTypeDataLoader.scala | 27 +++++----------- .../nussknacker/test/base/it/NuItTest.scala | 6 ++-- .../test/base/it/NuResourcesTest.scala | 8 +++-- .../ScenarioParametersServiceTest.scala | 8 +++-- 7 files changed, 75 insertions(+), 38 deletions(-) create mode 100644 designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/DeploymentManagersClassLoader.scala diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala index d99984f966a..11c4896c13e 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/NussknackerApp.scala @@ -6,10 +6,11 @@ import pl.touk.nussknacker.ui.factory.NussknackerAppFactory object NussknackerApp extends IOApp { override def run(args: List[String]): IO[ExitCode] = { - for { - appFactory <- IO(new NussknackerAppFactory(getClass.getClassLoader)) - _ <- appFactory.createApp().use { _ => IO.never } - } yield ExitCode.Success + val program = for { + appFactory <- NussknackerAppFactory.create(getClass.getClassLoader) + app <- appFactory.createApp() + } yield app + program.useForever.as(ExitCode.Success) } } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala index e9be44d0bcf..f9af8e6a566 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala @@ -16,19 +16,27 @@ import pl.touk.nussknacker.ui.server.{AkkaHttpBasedRouteProvider, NussknackerHtt import java.time.Clock -object NussknackerAppFactory +object NussknackerAppFactory { -class NussknackerAppFactory(nussknackerConfig: NussknackerConfig, processingTypeDataLoader: ProcessingTypeDataLoader) - extends LazyLogging { - - def this(nussknackerConfig: NussknackerConfig) = { - this(nussknackerConfig, new ProcessingTypesConfigBasedProcessingTypeDataLoader(nussknackerConfig)) + def create(nussknackerConfig: NussknackerConfig): Resource[IO, NussknackerAppFactory] = { + for { + managersDirs <- Resource.eval(nussknackerConfig.managersDirs()) + deploymentManagerClassLoader <- DeploymentManagersClassLoader.create(managersDirs) + } yield new NussknackerAppFactory( + nussknackerConfig, + new ProcessingTypesConfigBasedProcessingTypeDataLoader(nussknackerConfig, deploymentManagerClassLoader) + ) } - def this(classLoader: ClassLoader) = { - this(new LoadableDesignerConfigBasedNussknackerConfig(classLoader)) + def create(classLoader: ClassLoader): Resource[IO, NussknackerAppFactory] = { + create(new LoadableDesignerConfigBasedNussknackerConfig(classLoader)) } +} + +class NussknackerAppFactory(nussknackerConfig: NussknackerConfig, processingTypeDataLoader: ProcessingTypeDataLoader) + extends LazyLogging { + def createApp(clock: Clock = Clock.systemUTC()): Resource[IO, Unit] = { for { config <- Resource.eval(nussknackerConfig.loadApplicationConfig()) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/DeploymentManagersClassLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/DeploymentManagersClassLoader.scala new file mode 100644 index 00000000000..55d3e3b83c2 --- /dev/null +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/DeploymentManagersClassLoader.scala @@ -0,0 +1,31 @@ +package pl.touk.nussknacker.ui.process.processingtype.loader + +import cats.effect.{IO, Resource} +import com.typesafe.scalalogging.LazyLogging +import pl.touk.nussknacker.engine.util.UrlUtils.ExpandFiles + +import java.net.URL +import java.nio.file.Path +import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader + +object DeploymentManagersClassLoader extends LazyLogging { + + def create(managersDirs: List[Path]): Resource[IO, DeploymentManagersClassLoader] = { + Resource.make( + acquire = IO.delay { + logger.debug( + s"Loading deployment managers from the following locations: ${managersDirs.map(_.toString).mkString(", ")}" + ) + new DeploymentManagersClassLoader( + managersDirs.flatMap(_.toUri.toURL.expandFiles(".jar")), + this.getClass.getClassLoader + ) + } + )( + release = loader => IO.delay(loader.close()) + ) + } + +} + +class DeploymentManagersClassLoader private (urls: Seq[URL], parent: ClassLoader) extends URLClassLoader(urls, parent) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala index a0bfd856a06..12b7649508b 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala @@ -10,13 +10,11 @@ import pl.touk.nussknacker.ui.NussknackerConfig import pl.touk.nussknacker.ui.process.processingtype._ import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState -import pl.touk.nussknacker.engine.util.UrlUtils._ -import java.nio.file.Path -import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader - -class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConfig) - extends ProcessingTypeDataLoader +class ProcessingTypesConfigBasedProcessingTypeDataLoader( + config: NussknackerConfig, + deploymentManagersClassLoader: DeploymentManagersClassLoader +) extends ProcessingTypeDataLoader with LazyLogging { override def loadProcessingTypeData( @@ -24,14 +22,13 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies, ): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]] = { for { - managersDirs <- config.managersDirs() processingTypesConfig <- config.loadProcessingTypeConfigs() } yield { // This step with splitting DeploymentManagerProvider loading for all processing types // and after that creating ProcessingTypeData is done because of the deduplication of deployments // See DeploymentManagerProvider.engineSetupIdentity val providerWithNameInputData = processingTypesConfig.mapValuesNow { processingTypeConfig => - val provider = createDeploymentManagerProvider(processingTypeConfig, managersDirs) + val provider = createDeploymentManagerProvider(processingTypeConfig) val nameInputData = EngineNameInputData( provider.defaultEngineSetupName, provider.engineSetupIdentity(processingTypeConfig.deploymentConfig), @@ -71,19 +68,11 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(config: NussknackerConf } } - private def createDeploymentManagerProvider( - typeConfig: ProcessingTypeConfig, - managersDirs: List[Path], - ): DeploymentManagerProvider = { - logger.debug( - s"Loading deployment managers from the following locations: ${managersDirs.map(_.toString).mkString(", ")}" - ) - val managersClassLoader = - new URLClassLoader(managersDirs.flatMap(_.toUri.toURL.expandFiles(".jar")), this.getClass.getClassLoader) + private def createDeploymentManagerProvider(typeConfig: ProcessingTypeConfig): DeploymentManagerProvider = { ScalaServiceLoader.loadNamed[DeploymentManagerProvider]( typeConfig.deploymentManagerType, - managersClassLoader - ) // todo: close + deploymentManagersClassLoader + ) } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala index 49ec79dc4b8..a3470489523 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuItTest.scala @@ -11,7 +11,6 @@ import pl.touk.nussknacker.test.config.WithDesignerConfig import pl.touk.nussknacker.ui.LoadableConfigBasedNussknackerConfig import pl.touk.nussknacker.ui.config.DesignerConfigLoader import pl.touk.nussknacker.ui.factory.NussknackerAppFactory -import pl.touk.nussknacker.ui.process.processingtype.loader._ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with WithClock with BeforeAndAfterAll { this: Suite with WithDesignerConfig => @@ -27,8 +26,9 @@ trait NuItTest extends WithHsqlDbTesting with DefaultUniquePortProvider with Wit val nussknackerConfig = new LoadableConfigBasedNussknackerConfig( IO.delay(DesignerConfigLoader.from(adjustNuTestConfig())) ) - releaseAppResources = new NussknackerAppFactory(nussknackerConfig) - .createApp(clock = clock) + releaseAppResources = NussknackerAppFactory + .create(nussknackerConfig) + .flatMap(_.createApp(clock)) .allocated .unsafeRunSync() ._2 diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala index cde22f36654..758f8578222 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala @@ -48,7 +48,10 @@ import pl.touk.nussknacker.ui.process.deployment._ import pl.touk.nussknacker.ui.process.fragment.DefaultFragmentRepository import pl.touk.nussknacker.ui.process.marshall.CanonicalProcessConverter import pl.touk.nussknacker.ui.process.processingtype._ -import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypesConfigBasedProcessingTypeDataLoader +import pl.touk.nussknacker.ui.process.processingtype.loader.{ + DeploymentManagersClassLoader, + ProcessingTypesConfigBasedProcessingTypeDataLoader +} import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider import pl.touk.nussknacker.ui.process.repository.ProcessRepository.CreateProcessAction import pl.touk.nussknacker.ui.process.repository._ @@ -148,7 +151,8 @@ trait NuResourcesTest protected val typeToConfig: ProcessingTypeDataProvider[ProcessingTypeData, CombinedProcessingTypeData] = { val processingTypeDataReader = new ProcessingTypesConfigBasedProcessingTypeDataLoader( - new LoadableConfigBasedNussknackerConfig(IO.pure(ConfigWithUnresolvedVersion(testConfig))) + new LoadableConfigBasedNussknackerConfig(IO.pure(ConfigWithUnresolvedVersion(testConfig))), + DeploymentManagersClassLoader.create(List.empty).allocated.map(_._1).unsafeRunSync() ) ProcessingTypeDataProvider( processingTypeDataReader diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala index d0502b2815c..fa3cb1d07de 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala @@ -16,7 +16,10 @@ import pl.touk.nussknacker.restmodel.scenariodetails.ScenarioParameters import pl.touk.nussknacker.security.Permission import pl.touk.nussknacker.test.ValidatedValuesDetailedMessage import pl.touk.nussknacker.test.utils.domain.TestFactory -import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypesConfigBasedProcessingTypeDataLoader +import pl.touk.nussknacker.ui.process.processingtype.loader.{ + DeploymentManagersClassLoader, + ProcessingTypesConfigBasedProcessingTypeDataLoader +} import pl.touk.nussknacker.ui.security.api.{LoggedUser, RealLoggedUser} import cats.effect.unsafe.implicits.global import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode @@ -284,7 +287,8 @@ class ScenarioParametersServiceTest val processingTypeDataReader = new ProcessingTypesConfigBasedProcessingTypeDataLoader( new LoadableConfigBasedNussknackerConfig(IO.pure { ConfigWithUnresolvedVersion(ConfigFactory.parseFile(devApplicationConfFile).withFallback(fallbackConfig)) - }) + }), + DeploymentManagersClassLoader.create(List.empty).allocated.map(_._1).unsafeRunSync() ) val processingTypeData = processingTypeDataReader From ad6f1f0f15617fdd059ca1f0cccfbec5d9193069 Mon Sep 17 00:00:00 2001 From: MK Software Date: Sat, 14 Dec 2024 22:07:26 +0100 Subject: [PATCH 13/73] cleanup --- build.sbt | 15 +-------------- .../ui/api/description/NodesApiEndpoints.scala | 2 +- .../ui/statistics/RawFEStatistics.scala | 2 +- .../NuDesignerApiAvailableToExposeYamlSpec.scala | 2 +- .../StatisticsApiHttpServiceSecuritySpec.scala | 4 ++-- .../process/SampleComponentProviderTest.scala | 4 ++-- .../periodic/PeriodicDeploymentManager.scala | 4 ++-- .../engine/api/deployment/ProcessAction.scala | 2 +- .../pl/touk/nussknacker/engine/ModelData.scala | 2 -- .../util/loader/ProcessConfigCreatorLoader.scala | 1 - 10 files changed, 11 insertions(+), 27 deletions(-) diff --git a/build.sbt b/build.sbt index 4b57151bbda..8ea611e3273 100644 --- a/build.sbt +++ b/build.sbt @@ -1993,7 +1993,6 @@ lazy val designer = (project in file("designer/server")) SlowTests / test := (SlowTests / test).dependsOn(prepareDesignerTests).value, Test / test := (Test / test).dependsOn(prepareDesignerTests).value, Test / testOptions += Tests.Setup(() => prepareDesignerTests.value), - // todo: /* We depend on copyClientDist in packageBin and assembly to be make sure FE files will be included in jar and fajar We abuse sbt a little bit, but we don't want to put webpack in generate resources phase, as it's long and it would @@ -2085,19 +2084,7 @@ lazy val designer = (project in file("designer/server")) flinkTestUtils % Test, developmentTestsDeploymentManager % Test, kafkaComponentsUtils % Test, - componentsApi % "test->test", - // All DeploymentManager dependencies are added because they are needed to run NussknackerApp* with - // dev-application.conf. Currently, we don't have a separate classpath for DMs like we have for components. - // schemedKafkaComponentsUtils is added because loading the provided liteEmbeddedDeploymentManager causes - // that are also load added their test dependencies on the classpath by the Idea. It causes that - // UniversalKafkaSourceFactory is loaded from app classloader and GenericRecord which is defined in typesToExtract - // is missing from this classloader -// flinkDeploymentManager % Provided, // todo: remove -// liteEmbeddedDeploymentManager % Test, -// liteK8sDeploymentManager % Provided, -// developmentTestsDeploymentManager % Provided, -// flinkPeriodicDeploymentManager % Provided, -// schemedKafkaComponentsUtils % Provided, + componentsApi % "test->test" ) lazy val e2eTests = (project in file("e2e-tests")) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala index cedbe843136..4323186b693 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/NodesApiEndpoints.scala @@ -1501,7 +1501,7 @@ object NodesApiEndpoints { } -object TypingDtoSchemas { // todo +object TypingDtoSchemas { // TODO: import pl.touk.nussknacker.engine.api.typed.typing._ import sttp.tapir.Schema.SName diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala index 417849e6451..81973a806b7 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/statistics/RawFEStatistics.scala @@ -9,7 +9,7 @@ object RawFEStatistics { def apply(request: RegisterStatisticsRequestDto): RawFEStatistics = new RawFEStatistics( - // todo change to groupMapReduce in scala 2.13 + // TODO: change to groupMapReduce in scala 2.13 raw = request.statistics .groupBy(_.name.shortName) .map { case (k, v) => diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala index 3580f463aba..a18e961459f 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala @@ -33,7 +33,7 @@ class NuDesignerApiAvailableToExposeYamlSpec extends AnyFunSuite with Matchers { val examplesValidationResult = OpenAPIExamplesValidator.forTapir.validateExamples( specYaml = generatedSpec, excludeResponseValidationForOperationIds = List( - "getApiProcessesScenarionameActivityActivities" // todo NU-1772: responses contain discriminator, it is not properly handled by validator + "getApiProcessesScenarionameActivityActivities" // TODO: NU-1772: responses contain discriminator, it is not properly handled by validator ) ) val clue = examplesValidationResult diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala index c95d2b45487..f24069b3f8b 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceSecuritySpec.scala @@ -41,7 +41,7 @@ class StatisticsApiHttpServiceSecuritySpec } } - // todo what about anonymous user + // TODO: what about anonymous user "not authenticated should" - { "forbid access" in { given() @@ -69,7 +69,7 @@ class StatisticsApiHttpServiceSecuritySpec } } - // todo what about anonymous user + // TODO: what about anonymous user "not authenticated should" - { "forbid access" in { given() diff --git a/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala b/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala index 9da6994ea9b..4ffc6862c7c 100644 --- a/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala +++ b/engine/flink/management/dev-model/src/test/scala/pl/touk/nussknacker/engine/process/SampleComponentProviderTest.scala @@ -1,6 +1,6 @@ package pl.touk.nussknacker.engine.process -import com.typesafe.config.ConfigFactory +import com.typesafe.config.{Config, ConfigFactory} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pl.touk.nussknacker.engine.api.component.DesignerWideComponentId @@ -16,7 +16,7 @@ import pl.touk.nussknacker.engine.{ClassLoaderModelData, ConfigWithUnresolvedVer class SampleComponentProviderTest extends AnyFunSuite with FlinkSpec with Matchers { - override protected lazy val config = ConfigFactory.empty() + override protected lazy val config: Config = ConfigFactory.empty() test("detects component service") { val process = diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala index 2f510a8de42..e823786a451 100644 --- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala +++ b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala @@ -231,12 +231,12 @@ class PeriodicDeploymentManager private[periodic] ( override def customActionsDefinitions: List[CustomActionDefinition] = List.empty - // TODO We don't handle deployment synchronization on periodic DM because it currently uses it's own deployments and + // TODO: We don't handle deployment synchronization on periodic DM because it currently uses it's own deployments and // its statuses synchronization mechanism (see PeriodicProcessService.synchronizeDeploymentsStates) // We should move periodic mechanism to the core and reuse new synchronization mechanism also in this case. override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport - // todo NU-1772 + // TODO: NU-1772 // In the current implementation: // - PeriodicDeploymentManager is a kind of plugin, and it has its own data source (separate db) // - PeriodicDeploymentManager returns (by implementing ManagerSpecificScenarioActivitiesStoredByManager) custom ScenarioActivities, that are associated with operations performed internally by the manager diff --git a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala index 51cf6ca0042..35aa5c1cf0c 100644 --- a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala +++ b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/ProcessAction.scala @@ -9,7 +9,7 @@ import pl.touk.nussknacker.engine.api.process.{ProcessId, VersionId} import java.time.Instant import java.util.UUID -// todo NU-1772 +// TODO: NU-1772 // - should be eventually replaced with pl.touk.nussknacker.engine.api.deployment.ScenarioActivity // - this class is currently a compatibility layer for older fragments of code, new code should use ScenarioActivity @JsonCodec case class ProcessAction( diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala index cde680cadd4..7b7ff4d292d 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala @@ -128,8 +128,6 @@ case class ClassLoaderModelData private ( override val additionalConfigsFromProvider: Map[DesignerWideComponentId, ComponentAdditionalConfig], // This property is for easier testing when for some reason, some jars with ComponentProvider are // on the test classpath and CPs collide with other once with the same name. - // E.g. we add liteEmbeddedDeploymentManager as a designer provided dependency which also - // add liteKafkaComponents (which are in test scope), see comment next to designer module shouldIncludeConfigCreator: ProcessConfigCreator => Boolean, shouldIncludeComponentProvider: ComponentProvider => Boolean, componentDefinitionExtractionMode: ComponentDefinitionExtractionMode, diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala index 9930a28206e..e626ec79083 100644 --- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala +++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/util/loader/ProcessConfigCreatorLoader.scala @@ -13,7 +13,6 @@ class ProcessConfigCreatorLoader(shouldIncludeConfigCreator: ProcessConfigCreato override val prettyClassName: String = "ProcessConfigCreator" override def loadAll(classLoader: ClassLoader): List[SPCC] = { - // todo: ScalaServiceLoader.load[SPCC](classLoader).filter(shouldIncludeConfigCreator) ++ ScalaServiceLoader .load[JPCC](classLoader) From 7d285f458a440945adbbd1f8ff8262fab5e549ce Mon Sep 17 00:00:00 2001 From: MK Software Date: Sun, 15 Dec 2024 10:51:45 +0100 Subject: [PATCH 14/73] fix --- .../engine/util/loader/ModelClassLoader.scala | 25 ++----------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala index 801406ab15e..cc5ab995d1d 100644 --- a/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala +++ b/utils/utils-internal/src/main/scala/pl/touk/nussknacker/engine/util/loader/ModelClassLoader.scala @@ -4,7 +4,6 @@ import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.util.StringUtils._ import pl.touk.nussknacker.engine.util.UrlUtils._ -import java.io.File import java.net.{URL, URLClassLoader} import java.nio.file.Path @@ -31,31 +30,11 @@ object ModelClassLoader extends LazyLogging { workingDirectoryOpt: Option[Path], jarExtension: String = defaultJarExtension ): ModelClassLoader = { - val postProcessedURLs = validateExistence( - urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension)) - ) + val postProcessedURLs = urls.map(_.convertToURL(workingDirectoryOpt)).flatMap(_.expandFiles(jarExtension)) ModelClassLoader( new URLClassLoader(postProcessedURLs.toArray, this.getClass.getClassLoader), - postProcessedURLs.toList + postProcessedURLs ) } - private def validateExistence(urls: Iterable[URL]): Iterable[URL] = { - urls.filterNot(url => doesExist(url)).toList match { - case Nil => urls - case notExisted => - throw new IllegalArgumentException(s"The following URLs don't exist: [${notExisted.mkString(",")}]") - } - } - - private def doesExist(url: URL): Boolean = { - url.getProtocol match { - case "file" => - val file = new File(url.toURI) - file.exists() && file.isFile - case _ => - false - } - } - } From 807e1b9456385ad2d11e9879a2729758fb278fcc Mon Sep 17 00:00:00 2001 From: MK Software Date: Mon, 16 Dec 2024 09:57:47 +0100 Subject: [PATCH 15/73] fix --- build.sbt | 1 + docs/Changelog.md | 1 + docs/MigrationGuide.md | 1 + 3 files changed, 3 insertions(+) diff --git a/build.sbt b/build.sbt index 8ea611e3273..418dc872366 100644 --- a/build.sbt +++ b/build.sbt @@ -114,6 +114,7 @@ def designerMergeStrategy: String => MergeStrategy = { // https://tapir.softwaremill.com/en/latest/docs/openapi.html#using-swaggerui-with-sbt-assembly case PathList("META-INF", "maven", "org.webjars", "swagger-ui", "pom.properties") => MergeStrategy.singleOrError + // related to flink netty shaded libs case PathList( "META-INF", "native-image", diff --git a/docs/Changelog.md b/docs/Changelog.md index 7e0ae403204..576a00ef356 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -27,6 +27,7 @@ * [#7184](https://github.com/TouK/nussknacker/pull/7184) Improve Nu Designer API notifications endpoint, to include events related to currently displayed scenario * [#7323](https://github.com/TouK/nussknacker/pull/7323) Improve Periodic DeploymentManager db queries * [#7332](https://github.com/TouK/nussknacker/pull/7332) Handle scenario names with spaces when performing migration tests, they were ignored +* [#7335](https://github.com/TouK/nussknacker/pull/7335) introduced `managersDirs` config to configure deployment managers directory paths (you can use `MANAGERS_DIR` env in case of docker-based deployments). The default is `./managers`. ## 1.18 diff --git a/docs/MigrationGuide.md b/docs/MigrationGuide.md index 46857f82e8b..c92b0f57f34 100644 --- a/docs/MigrationGuide.md +++ b/docs/MigrationGuide.md @@ -26,6 +26,7 @@ To see the biggest differences please consult the [changelog](Changelog.md). * `def actionTooltips(processStatus: ProcessStatus): Map[ScenarioActionName, String]` - allows to define custom tooltips for actions, if not defined the default is still used * modified method: * `def statusActions(processStatus: ProcessStatus): List[ScenarioActionName]` - changed argument, to include information about latest and deployed versions +* [#7335](https://github.com/TouK/nussknacker/pull/7335) Deployment managers are loaded using separate class loader (not the Application ClassLoader - `/opt/nussknacker/managers/*` should be removed from CLASSPATH definition). The default location for deployment managers jars is the `managers` folder inside the working directory. ## In version 1.18.0 From 9a50dcf61cc080089f0f5401239762764b9cacc2 Mon Sep 17 00:00:00 2001 From: MK Software Date: Mon, 16 Dec 2024 11:18:47 +0100 Subject: [PATCH 16/73] review fix --- .run/NussknackerApp.run.xml | 5 ++-- build.sbt | 27 +++---------------- .../src/universal/conf/dev-application.conf | 2 +- 3 files changed, 8 insertions(+), 26 deletions(-) diff --git a/.run/NussknackerApp.run.xml b/.run/NussknackerApp.run.xml index ea2e8f14d9b..deda8970b3a 100644 --- a/.run/NussknackerApp.run.xml +++ b/.run/NussknackerApp.run.xml @@ -9,15 +9,16 @@ + + + - -