diff --git a/.gitignore b/.gitignore
index 06465313969..9b80a58f1e3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
-.idea/
+.idea/*
+!.idea/icon.svg
target/
*.iml
.*orig
diff --git a/.idea/icon.svg b/.idea/icon.svg
new file mode 100644
index 00000000000..d2b45f3d311
--- /dev/null
+++ b/.idea/icon.svg
@@ -0,0 +1 @@
+
diff --git a/build.sbt b/build.sbt
index 69f705a93b4..e43de9f0c93 100644
--- a/build.sbt
+++ b/build.sbt
@@ -510,8 +510,7 @@ lazy val distribution: Project = sbt
},
devArtifacts := {
modelArtifacts.value ++ List(
- (flinkDevModel / assembly).value -> "model/devModel.jar",
- (flinkPeriodicDeploymentManager / assembly).value -> "managers/nussknacker-flink-periodic-manager.jar",
+ (flinkDevModel / assembly).value -> "model/devModel.jar",
)
},
Universal / packageName := ("nussknacker" + "-" + version.value),
@@ -611,8 +610,8 @@ lazy val flinkDeploymentManager = (project in flink("management"))
libraryDependencies ++= {
Seq(
"org.typelevel" %% "cats-core" % catsV % Provided,
- "org.apache.flink" % "flink-streaming-java" % flinkV % flinkScope
- excludeAll (
+ ("org.apache.flink" % "flink-streaming-java" % flinkV % flinkScope)
+ .excludeAll(
ExclusionRule("log4j", "log4j"),
ExclusionRule("org.slf4j", "slf4j-log4j12"),
ExclusionRule("com.esotericsoftware", "kryo-shaded"),
@@ -637,37 +636,6 @@ lazy val flinkDeploymentManager = (project in flink("management"))
kafkaTestUtils % "it,test"
)
-lazy val flinkPeriodicDeploymentManager = (project in flink("management/periodic"))
- .settings(commonSettings)
- .settings(assemblyNoScala("nussknacker-flink-periodic-manager.jar"): _*)
- .settings(publishAssemblySettings: _*)
- .settings(
- name := "nussknacker-flink-periodic-manager",
- libraryDependencies ++= {
- Seq(
- "org.typelevel" %% "cats-core" % catsV % Provided,
- "com.typesafe.slick" %% "slick" % slickV % Provided,
- "com.typesafe.slick" %% "slick-hikaricp" % slickV % "provided, test",
- "com.github.tminglei" %% "slick-pg" % slickPgV,
- "org.hsqldb" % "hsqldb" % hsqldbV % Test,
- "org.flywaydb" % "flyway-core" % flywayV % Provided,
- "com.cronutils" % "cron-utils" % cronParserV,
- "com.typesafe.akka" %% "akka-actor" % akkaV,
- "com.typesafe.akka" %% "akka-testkit" % akkaV % Test,
- "com.dimafeng" %% "testcontainers-scala-scalatest" % testContainersScalaV % Test,
- "com.dimafeng" %% "testcontainers-scala-postgresql" % testContainersScalaV % Test,
- )
- }
- )
- .dependsOn(
- flinkDeploymentManager,
- deploymentManagerApi % Provided,
- scenarioCompiler % Provided,
- componentsApi % Provided,
- httpUtils % Provided,
- testUtils % Test
- )
-
lazy val flinkMetricsDeferredReporter = (project in flink("metrics-deferred-reporter"))
.settings(commonSettings)
.settings(
@@ -1816,10 +1784,10 @@ lazy val flinkBaseUnboundedComponents = (project in flink("components/base-unbou
.settings(
name := "nussknacker-flink-base-unbounded-components",
libraryDependencies ++= Seq(
- "org.apache.flink" % "flink-streaming-java" % flinkV % Provided,
- "com.clearspring.analytics" % "stream" % "2.9.8"
+ "org.apache.flink" % "flink-streaming-java" % flinkV % Provided,
// It is used only in QDigest which we don't use, while it's >20MB in size...
- exclude ("it.unimi.dsi", "fastutil")
+ ("com.clearspring.analytics" % "stream" % "2.9.8")
+ .exclude("it.unimi.dsi", "fastutil")
)
)
.dependsOn(
@@ -2009,6 +1977,7 @@ lazy val designer = (project in file("designer/server"))
"com.typesafe.akka" %% "akka-testkit" % akkaV % Test,
"de.heikoseeberger" %% "akka-http-circe" % akkaHttpCirceV,
"com.softwaremill.sttp.client3" %% "async-http-client-backend-cats" % sttpV,
+ "com.cronutils" % "cron-utils" % cronParserV,
"ch.qos.logback" % "logback-core" % logbackV,
"ch.qos.logback" % "logback-classic" % logbackV,
"ch.qos.logback.contrib" % "logback-json-classic" % logbackJsonV,
@@ -2031,6 +2000,7 @@ lazy val designer = (project in file("designer/server"))
"com.beachape" %% "enumeratum-circe" % enumeratumV,
"tf.tofu" %% "derevo-circe" % "0.13.0",
"com.softwaremill.sttp.apispec" %% "openapi-circe-yaml" % openapiCirceYamlV,
+ "com.github.tminglei" %% "slick-pg" % slickPgV,
"com.softwaremill.sttp.tapir" %% "tapir-akka-http-server" % tapirV,
"com.softwaremill.sttp.tapir" %% "tapir-core" % tapirV,
"com.softwaremill.sttp.tapir" %% "tapir-derevo" % tapirV,
@@ -2085,7 +2055,6 @@ lazy val designer = (project in file("designer/server"))
liteEmbeddedDeploymentManager % Provided,
liteK8sDeploymentManager % Provided,
developmentTestsDeploymentManager % Provided,
- flinkPeriodicDeploymentManager % Provided,
schemedKafkaComponentsUtils % Provided,
)
@@ -2175,7 +2144,6 @@ lazy val modules = List[ProjectReference](
requestResponseRuntime,
liteEngineRuntimeApp,
flinkDeploymentManager,
- flinkPeriodicDeploymentManager,
flinkDevModel,
flinkDevModelJava,
flinkTableApiComponents,
diff --git a/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/AssignabilityDeterminer.scala b/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/AssignabilityDeterminer.scala
index 8e4cec7ee42..0c016839665 100644
--- a/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/AssignabilityDeterminer.scala
+++ b/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/AssignabilityDeterminer.scala
@@ -32,6 +32,9 @@ object AssignabilityDeterminer {
def isAssignableStrict(from: TypingResult, to: TypingResult): ValidatedNel[String, Unit] =
isAssignable(from, to, StrictConversionChecker)
+ def isAssignableWithoutConversion(from: TypingResult, to: TypingResult): ValidatedNel[String, Unit] =
+ isAssignable(from, to, WithoutConversionChecker)
+
private def isAssignable(from: TypingResult, to: TypingResult, conversionChecker: ConversionChecker) = {
(from, to) match {
case (_, Unknown) => ().validNel
@@ -223,6 +226,19 @@ object AssignabilityDeterminer {
}
+ private object WithoutConversionChecker extends ConversionChecker {
+
+ override def isConvertable(
+ from: SingleTypingResult,
+ to: TypedClass
+ ): ValidatedNel[String, Unit] = {
+ val errMsgPrefix =
+ s"${from.runtimeObjType.display} is not the same as ${to.display}"
+ condNel(from.withoutValue == to.withoutValue, (), errMsgPrefix)
+ }
+
+ }
+
private object StrictConversionChecker extends ConversionChecker {
override def isConvertable(
diff --git a/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/typing.scala b/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/typing.scala
index 1aff3f9a890..343af1e201d 100644
--- a/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/typing.scala
+++ b/components-api/src/main/scala/pl/touk/nussknacker/engine/api/typed/typing.scala
@@ -40,6 +40,12 @@ object typing {
final def canBeStrictlyConvertedTo(typingResult: TypingResult): Boolean =
AssignabilityDeterminer.isAssignableStrict(this, typingResult).isValid
+ /**
+ * Checks if the conversion to a given typingResult can be made without any conversion.
+ */
+ final def canBeConvertedWithoutConversionTo(typingResult: TypingResult): Boolean =
+ AssignabilityDeterminer.isAssignableWithoutConversion(this, typingResult).isValid
+
def valueOpt: Option[Any]
def withoutValue: TypingResult
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #0.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #0.png
index 92071466a54..96fac7c2011 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #0.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #0.png differ
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #2.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #2.png
index 9a1f6d10481..c6d43f459ab 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #2.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #2.png differ
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #3.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #3.png
index 142e3b6736b..2dab2dfdb68 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #3.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Activities should display activities #3.png differ
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should add text to note and display it as markdown #0.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should add text to note and display it as markdown #0.png
index 7cf3a281fd8..2dd76fb30dc 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should add text to note and display it as markdown #0.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should add text to note and display it as markdown #0.png differ
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should allow to drag sticky note #0.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should allow to drag sticky note #0.png
index 77404342dfd..605d94e27b4 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should allow to drag sticky note #0.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should allow to drag sticky note #0.png differ
diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should disable sticky note when scenario is not saved #0.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should disable sticky note when scenario is not saved #0.png
index e3022e7c644..df07fd6e218 100644
Binary files a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should disable sticky note when scenario is not saved #0.png and b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Sticky notes should disable sticky note when scenario is not saved #0.png differ
diff --git a/designer/client/package-lock.json b/designer/client/package-lock.json
index 97f763865a8..9054fa1411b 100644
--- a/designer/client/package-lock.json
+++ b/designer/client/package-lock.json
@@ -26,6 +26,7 @@
"@touk/window-manager": "1.9.1",
"ace-builds": "1.34.2",
"axios": "1.7.5",
+ "copy-to-clipboard": "3.3.1",
"d3-transition": "3.0.1",
"d3-zoom": "3.0.0",
"dagre": "0.8.5",
@@ -10357,6 +10358,14 @@
"node": ">=0.10.0"
}
},
+ "node_modules/copy-to-clipboard": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz",
+ "integrity": "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==",
+ "dependencies": {
+ "toggle-selection": "^1.0.6"
+ }
+ },
"node_modules/copy-webpack-plugin": {
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz",
@@ -26188,6 +26197,11 @@
"node": ">= 0.10"
}
},
+ "node_modules/toggle-selection": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz",
+ "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ=="
+ },
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@@ -36096,6 +36110,14 @@
"integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=",
"dev": true
},
+ "copy-to-clipboard": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz",
+ "integrity": "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==",
+ "requires": {
+ "toggle-selection": "^1.0.6"
+ }
+ },
"copy-webpack-plugin": {
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz",
@@ -47973,6 +47995,11 @@
"through2": "^2.0.3"
}
},
+ "toggle-selection": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz",
+ "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ=="
+ },
"toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
diff --git a/designer/client/package.json b/designer/client/package.json
index 0330944e1be..fb37fe878d4 100644
--- a/designer/client/package.json
+++ b/designer/client/package.json
@@ -19,6 +19,7 @@
"@touk/window-manager": "1.9.1",
"ace-builds": "1.34.2",
"axios": "1.7.5",
+ "copy-to-clipboard": "3.3.1",
"d3-transition": "3.0.1",
"d3-zoom": "3.0.0",
"dagre": "0.8.5",
diff --git a/designer/client/src/components/graph/node-modal/fragment-input-definition/FragmentInputDefinition.tsx b/designer/client/src/components/graph/node-modal/fragment-input-definition/FragmentInputDefinition.tsx
index c0aa32d74b1..02226d4e2a6 100644
--- a/designer/client/src/components/graph/node-modal/fragment-input-definition/FragmentInputDefinition.tsx
+++ b/designer/client/src/components/graph/node-modal/fragment-input-definition/FragmentInputDefinition.tsx
@@ -6,7 +6,7 @@ import { getProcessDefinitionData } from "../../../../reducers/selectors/setting
import { MapVariableProps } from "../MapVariable";
import { NodeCommonDetailsDefinition } from "../NodeCommonDetailsDefinition";
import { FieldsSelect } from "./FieldsSelect";
-import { find, head, orderBy } from "lodash";
+import { find, head } from "lodash";
import { getDefaultFields } from "./item/utils";
import { FragmentInputParameter } from "./item";
@@ -26,11 +26,9 @@ export function useFragmentInputDefinitionTypeOptions() {
[definitionData?.classes],
);
- const orderedTypeOptions = useMemo(() => orderBy(typeOptions, (item) => [item.label, item.value], ["asc"]), [typeOptions]);
-
const defaultTypeOption = useMemo(() => find(typeOptions, { label: "String" }) || head(typeOptions), [typeOptions]);
return {
- orderedTypeOptions,
+ typeOptions,
defaultTypeOption,
};
}
@@ -40,7 +38,7 @@ export default function FragmentInputDefinition(props: Props): JSX.Element {
const { node, setProperty, isEditMode, showValidation } = passProps;
const readOnly = !isEditMode;
- const { orderedTypeOptions, defaultTypeOption } = useFragmentInputDefinitionTypeOptions();
+ const { typeOptions, defaultTypeOption } = useFragmentInputDefinitionTypeOptions();
const addField = useCallback(() => {
addElement("parameters", getDefaultFields(defaultTypeOption.value));
@@ -57,7 +55,7 @@ export default function FragmentInputDefinition(props: Props): JSX.Element {
removeField={removeElement}
namespace={"parameters"}
fields={fields}
- options={orderedTypeOptions}
+ options={typeOptions}
showValidation={showValidation}
readOnly={readOnly}
variableTypes={variableTypes}
diff --git a/designer/client/src/components/notifications/Notification.tsx b/designer/client/src/components/notifications/Notification.tsx
index b9a4d9e5ce4..988536b5e1a 100644
--- a/designer/client/src/components/notifications/Notification.tsx
+++ b/designer/client/src/components/notifications/Notification.tsx
@@ -1,6 +1,8 @@
import React, { ReactElement } from "react";
import { Alert, AlertColor } from "@mui/material";
import CloseIcon from "@mui/icons-material/Close";
+import { CopyTooltip } from "./copyTooltip";
+import { useTranslation } from "react-i18next";
interface Props {
icon: ReactElement;
@@ -10,9 +12,19 @@ interface Props {
}
export default function Notification({ icon, message, type }: Props): JSX.Element {
- return (
+ const { t } = useTranslation();
+
+ const alertContent = (
}>
{message}
);
+
+ return type === "error" ? (
+
+ {alertContent}
+
+ ) : (
+ alertContent
+ );
}
diff --git a/designer/client/src/components/notifications/copyTooltip.tsx b/designer/client/src/components/notifications/copyTooltip.tsx
new file mode 100644
index 00000000000..f06532ed38d
--- /dev/null
+++ b/designer/client/src/components/notifications/copyTooltip.tsx
@@ -0,0 +1,78 @@
+import React, { PropsWithChildren, useEffect, useState } from "react";
+import copy from "copy-to-clipboard";
+import { Button, Tooltip } from "@mui/material";
+import { CopyAll, Done } from "@mui/icons-material";
+
+export function useCopyClipboard(): [boolean, (value: string) => void] {
+ const [isCopied, setIsCopied] = useState();
+ const [text, setText] = useState();
+
+ useEffect(() => {
+ if (isCopied) {
+ const id = setTimeout(() => {
+ setIsCopied(false);
+ }, 1000);
+
+ return () => {
+ clearTimeout(id);
+ };
+ }
+ }, [isCopied, text]);
+
+ return [
+ isCopied,
+ (value: string) => {
+ setText(value);
+ setIsCopied(copy(value));
+ },
+ ];
+}
+
+export function CopyTooltip({
+ children,
+ text,
+ title,
+}: PropsWithChildren<{
+ text: string;
+ title: string;
+}>): JSX.Element {
+ const [isCopied, copy] = useCopyClipboard();
+ return (
+ : }
+ onClick={(e) => {
+ copy(text);
+ e.stopPropagation();
+ }}
+ >
+ {title}
+
+ }
+ componentsProps={{
+ popper: {
+ sx: {
+ opacity: 0.8,
+ },
+ },
+ tooltip: {
+ sx: {
+ bgcolor: (t) => (t.palette.mode === "dark" ? t.palette.common.white : t.palette.common.black),
+ color: (t) => (t.palette.mode === "dark" ? t.palette.common.black : t.palette.common.white),
+ },
+ },
+ arrow: { sx: { color: (t) => (t.palette.mode === "dark" ? t.palette.common.white : t.palette.common.black) } },
+ }}
+ placement="bottom-start"
+ arrow
+ >
+ {children}
+
+ );
+}
diff --git a/designer/client/src/components/toolbars/activities/ActivityPanelRowItem/ActivityItemHeader.tsx b/designer/client/src/components/toolbars/activities/ActivityPanelRowItem/ActivityItemHeader.tsx
index 8de06c5afbc..8976e6bd133 100644
--- a/designer/client/src/components/toolbars/activities/ActivityPanelRowItem/ActivityItemHeader.tsx
+++ b/designer/client/src/components/toolbars/activities/ActivityPanelRowItem/ActivityItemHeader.tsx
@@ -1,5 +1,5 @@
import React, { PropsWithChildren, useCallback, useMemo } from "react";
-import { Button, styled, Typography } from "@mui/material";
+import { Button, styled, Tooltip, Typography } from "@mui/material";
import { SearchHighlighter } from "../../creator/SearchHighlighter";
import HttpService from "../../../../http/HttpService";
import { ActionMetadata, ActivityAttachment, ActivityComment, ActivityType } from "../types";
@@ -20,6 +20,7 @@ import { ActivityItemCommentModify } from "./ActivityItemCommentModify";
import { getLoggedUser } from "../../../../reducers/selectors/settings";
import { getCapabilities } from "../../../../reducers/selectors/other";
import { EventTrackingSelector, getEventTrackingProps } from "../../../../containers/event-tracking";
+import CircleIcon from "@mui/icons-material/Circle";
const StyledHeaderIcon = styled(UrlIcon)(({ theme }) => ({
width: "16px",
@@ -37,15 +38,18 @@ const StyledHeaderActionRoot = styled("div")(({ theme }) => ({
gap: theme.spacing(0.5),
}));
-const StyledActivityItemHeader = styled("div")<{ isHighlighted: boolean; isDeploymentActive: boolean; isActiveFound: boolean }>(
- ({ theme, isHighlighted, isDeploymentActive, isActiveFound }) => ({
- display: "flex",
- alignItems: "center",
- padding: theme.spacing(0.5, 0.5, 0.5, 0.75),
- borderRadius: theme.spacing(0.5),
- ...getHeaderColors(theme, isHighlighted, isDeploymentActive, isActiveFound),
- }),
-);
+const StyledActivityItemHeader = styled("div")<{
+ isHighlighted: boolean;
+ isDeploymentActive: boolean;
+ isActiveFound: boolean;
+ isVersionSelected: boolean;
+}>(({ theme, isHighlighted, isDeploymentActive, isActiveFound, isVersionSelected }) => ({
+ display: "flex",
+ alignItems: "center",
+ padding: theme.spacing(0.5, 0.5, 0.5, 0.75),
+ borderRadius: theme.spacing(0.5),
+ ...getHeaderColors(theme, isHighlighted, isDeploymentActive, isActiveFound, isVersionSelected),
+}));
const HeaderActivity = ({
activityAction,
@@ -233,30 +237,45 @@ const WithOpenVersion = ({
const ActivityItemHeader = ({ activity, isDeploymentActive, isFound, isActiveFound, searchQuery }: Props) => {
const scenario = useSelector(getScenario);
const { processVersionId } = scenario || {};
+ const { t } = useTranslation();
const isHighlighted = ["SCENARIO_DEPLOYED", "SCENARIO_CANCELED"].includes(activity.type);
const openVersionEnable =
["SCENARIO_MODIFIED", "SCENARIO_DEPLOYED"].includes(activity.type) && activity.scenarioVersionId !== processVersionId;
+ const isVersionSelected = ["SCENARIO_MODIFIED"].includes(activity.type) && activity.scenarioVersionId === processVersionId;
const getHeaderTitle = useMemo(() => {
const text = activity.overrideDisplayableName || activity.activities.displayableName;
+ const activeItemIndicatorText = isDeploymentActive
+ ? t("activityItem.currentlyDeployedVersionText", "Currently deployed version")
+ : isVersionSelected
+ ? t("activityItem.currentlySelectedVersionText", "Currently selected version")
+ : undefined;
+
const headerTitle = (
- ({
- color: theme.palette.text.primary,
- overflow: "hidden",
- textOverflow: "ellipsis",
- textWrap: "noWrap",
- padding: !openVersionEnable && theme.spacing(0, 1),
- })}
- >
- {text}
-
+ <>
+ ({
+ color: theme.palette.text.primary,
+ overflow: "hidden",
+ textOverflow: "ellipsis",
+ textWrap: "noWrap",
+ padding: !openVersionEnable && theme.spacing(0, 1),
+ })}
+ >
+ {text}
+
+ {activeItemIndicatorText && (
+
+
+
+ )}
+ >
);
if (openVersionEnable) {
@@ -273,13 +292,21 @@ const ActivityItemHeader = ({ activity, isDeploymentActive, isFound, isActiveFou
activity.overrideDisplayableName,
activity.scenarioVersionId,
activity.type,
+ isDeploymentActive,
isFound,
+ isVersionSelected,
openVersionEnable,
searchQuery,
+ t,
]);
return (
-
+
{getHeaderTitle}
diff --git a/designer/client/src/components/toolbars/activities/helpers/activityItemColors.ts b/designer/client/src/components/toolbars/activities/helpers/activityItemColors.ts
index 0afdb0b454c..9e03f525c46 100644
--- a/designer/client/src/components/toolbars/activities/helpers/activityItemColors.ts
+++ b/designer/client/src/components/toolbars/activities/helpers/activityItemColors.ts
@@ -4,6 +4,8 @@ import { getBorderColor } from "../../../../containers/theme/helpers";
const defaultBorder = (theme: Theme) => `0.5px solid ${getBorderColor(theme)}`;
const activeBorder = (theme: Theme) => `0.5px solid ${blend(theme.palette.background.paper, theme.palette.primary.main, 0.4)}`;
+const deployedBorder = (theme: Theme) => `0.5px solid ${theme.palette.primary.main}`;
+const selectedVersionBorder = (theme: Theme) => `0.5px solid ${theme.palette.primary.main}`;
const runningActiveFoundHeaderBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.3);
const highlightedHeaderBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.05);
@@ -11,8 +13,15 @@ const highlightedActiveFoundHeaderBackground = (theme: Theme) => blend(theme.pal
const runningHeaderBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.2);
const activeFoundItemBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.2);
const foundItemBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.08);
+const selectedVersionHeaderBackground = (theme: Theme) => blend(theme.palette.background.paper, theme.palette.primary.main, 0.2);
-export const getHeaderColors = (theme: Theme, isHighlighted: boolean, isDeploymentActive: boolean, isActiveFound: boolean) => {
+export const getHeaderColors = (
+ theme: Theme,
+ isHighlighted: boolean,
+ isDeploymentActive: boolean,
+ isActiveFound: boolean,
+ isVersionSelected: boolean,
+) => {
if (isDeploymentActive && isActiveFound) {
return {
backgroundColor: runningActiveFoundHeaderBackground(theme),
@@ -30,7 +39,7 @@ export const getHeaderColors = (theme: Theme, isHighlighted: boolean, isDeployme
if (isDeploymentActive) {
return {
backgroundColor: runningHeaderBackground(theme),
- border: defaultBorder(theme),
+ border: deployedBorder(theme),
};
}
@@ -41,6 +50,13 @@ export const getHeaderColors = (theme: Theme, isHighlighted: boolean, isDeployme
};
}
+ if (isVersionSelected) {
+ return {
+ backgroundColor: selectedVersionHeaderBackground(theme),
+ border: selectedVersionBorder(theme),
+ };
+ }
+
return {
backgroundColor: undefined,
border: "none",
diff --git a/designer/client/src/reducers/selectors/settings.ts b/designer/client/src/reducers/selectors/settings.ts
index 382290a60ea..8177dc4d240 100644
--- a/designer/client/src/reducers/selectors/settings.ts
+++ b/designer/client/src/reducers/selectors/settings.ts
@@ -1,10 +1,12 @@
-import { createSelector } from "reselect";
+import { createSelector, createSelectorCreator, defaultMemoize } from "reselect";
import { MetricsType } from "../../actions/nk";
import { DynamicTabData } from "../../containers/DynamicTab";
import { ComponentGroup, ProcessDefinitionData } from "../../types";
import { RootState } from "../index";
import { AuthenticationSettings, SettingsState } from "../settings";
-import { uniqBy } from "lodash";
+import { isEqual, uniqBy } from "lodash";
+
+const createDeepEqualSelector = createSelectorCreator(defaultMemoize, isEqual);
export const getSettings = (state: RootState): SettingsState => state.settings;
@@ -17,7 +19,10 @@ export const getSurveySettings = createSelector(getFeatureSettings, (s) => s?.su
export const getStickyNotesSettings = createSelector(getFeatureSettings, (s) => s?.stickyNotesSettings);
export const getLoggedUser = createSelector(getSettings, (s) => s.loggedUser);
export const getLoggedUserId = createSelector(getLoggedUser, (s) => s.id);
-export const getProcessDefinitionData = createSelector(getSettings, (s) => s.processDefinitionData || ({} as ProcessDefinitionData));
+export const getProcessDefinitionData = createDeepEqualSelector(
+ getSettings,
+ (s) => s.processDefinitionData || ({} as ProcessDefinitionData),
+);
export const getComponentGroups = createSelector(getProcessDefinitionData, (p) => p.componentGroups || ({} as ComponentGroup[]));
export const getCategories = createSelector(getLoggedUser, (u) => u.categories || []);
export const getWritableCategories = createSelector(getLoggedUser, getCategories, (user, categories) =>
diff --git a/designer/client/src/types/node.ts b/designer/client/src/types/node.ts
index 014a4c13670..fe25fc99084 100644
--- a/designer/client/src/types/node.ts
+++ b/designer/client/src/types/node.ts
@@ -2,7 +2,7 @@ import { ProcessAdditionalFields, ReturnedType } from "./scenarioGraph";
import { FragmentInputParameter } from "../components/graph/node-modal/fragment-input-definition/item";
import { StickyNoteType } from "./stickyNote";
-type Type = "FragmentInput" | typeof StickyNoteType | string;
+type Type = "FragmentInput" | typeof StickyNoteType | string;
export type LayoutData = { x: number; y: number };
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala
index bade92ff999..8aa3ab96743 100644
--- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala
@@ -1,9 +1,11 @@
package pl.touk.nussknacker.engine.api.deployment
+import com.typesafe.config.Config
import pl.touk.nussknacker.engine.api.deployment.inconsistency.InconsistentStateDetector
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services._
import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName, VersionId}
-import pl.touk.nussknacker.engine.newdeployment
import pl.touk.nussknacker.engine.util.WithDataFreshnessStatusUtils.WithDataFreshnessStatusOps
+import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment}
import java.time.Instant
import scala.concurrent.ExecutionContext.Implicits._
@@ -48,6 +50,8 @@ trait DeploymentManager extends AutoCloseable {
def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport
+ def schedulingSupport: SchedulingSupport
+
def processCommand[Result](command: DMScenarioCommand[Result]): Future[Result]
final def getProcessState(
@@ -132,3 +136,25 @@ trait DeploymentSynchronisationSupported extends DeploymentSynchronisationSuppor
}
case object NoDeploymentSynchronisationSupport extends DeploymentSynchronisationSupport
+
+sealed trait SchedulingSupport
+
+trait SchedulingSupported extends SchedulingSupport {
+
+ def createScheduledExecutionPerformer(
+ modelData: BaseModelData,
+ dependencies: DeploymentManagerDependencies,
+ deploymentConfig: Config,
+ ): ScheduledExecutionPerformer
+
+ def customSchedulePropertyExtractorFactory: Option[SchedulePropertyExtractorFactory]
+
+ def customProcessConfigEnricherFactory: Option[ProcessConfigEnricherFactory]
+
+ def customScheduledProcessListenerFactory: Option[ScheduledProcessListenerFactory]
+
+ def customAdditionalDeploymentDataProvider: Option[AdditionalDeploymentDataProvider]
+
+}
+
+case object NoSchedulingSupport extends SchedulingSupport
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala
index 97ca468a2ec..1689041c83b 100644
--- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala
@@ -16,6 +16,7 @@ class CachingProcessStateDeploymentManager(
cacheTTL: FiniteDuration,
override val deploymentSynchronisationSupport: DeploymentSynchronisationSupport,
override val stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport,
+ override val schedulingSupport: SchedulingSupport,
) extends DeploymentManager {
private val cache: AsyncCache[ProcessName, List[StatusDetails]] = Caffeine
@@ -83,7 +84,8 @@ object CachingProcessStateDeploymentManager extends LazyLogging {
delegate,
cacheTTL,
delegate.deploymentSynchronisationSupport,
- delegate.stateQueryForAllScenariosSupport
+ delegate.stateQueryForAllScenariosSupport,
+ delegate.schedulingSupport,
)
}
.getOrElse {
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/DeploymentWithRuntimeParams.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/DeploymentWithRuntimeParams.scala
new file mode 100644
index 00000000000..a7d30f9c982
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/DeploymentWithRuntimeParams.scala
@@ -0,0 +1,12 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.model
+
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
+
+final case class DeploymentWithRuntimeParams(
+ processId: Option[ProcessId],
+ processName: ProcessName,
+ versionId: VersionId,
+ runtimeParams: RuntimeParams,
+)
+
+final case class RuntimeParams(params: Map[String, String])
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduleProperty.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduleProperty.scala
new file mode 100644
index 00000000000..4b8a1f7826b
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduleProperty.scala
@@ -0,0 +1,16 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.model
+
+sealed trait ScheduleProperty
+
+object ScheduleProperty {
+ sealed trait SingleScheduleProperty extends ScheduleProperty
+
+ final case class MultipleScheduleProperty(
+ schedules: Map[String, SingleScheduleProperty]
+ ) extends ScheduleProperty
+
+ final case class CronScheduleProperty(
+ labelOrCronExpr: String
+ ) extends SingleScheduleProperty
+
+}
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledDeploymentDetails.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledDeploymentDetails.scala
new file mode 100644
index 00000000000..3d784ac7f0a
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledDeploymentDetails.scala
@@ -0,0 +1,28 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.model
+
+import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
+
+import java.time.LocalDateTime
+
+case class ScheduledDeploymentDetails(
+ id: Long,
+ processName: ProcessName,
+ versionId: VersionId,
+ scheduleName: Option[String],
+ createdAt: LocalDateTime,
+ runAt: LocalDateTime,
+ deployedAt: Option[LocalDateTime],
+ completedAt: Option[LocalDateTime],
+ status: ScheduledDeploymentStatus,
+)
+
+sealed trait ScheduledDeploymentStatus
+
+object ScheduledDeploymentStatus {
+ case object Scheduled extends ScheduledDeploymentStatus
+ case object Deployed extends ScheduledDeploymentStatus
+ case object Finished extends ScheduledDeploymentStatus
+ case object Failed extends ScheduledDeploymentStatus
+ case object RetryingDeploy extends ScheduledDeploymentStatus
+ case object FailedOnDeploy extends ScheduledDeploymentStatus
+}
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledProcessDetails.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledProcessDetails.scala
new file mode 100644
index 00000000000..8100c6f33c7
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/model/ScheduledProcessDetails.scala
@@ -0,0 +1,9 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.model
+
+import pl.touk.nussknacker.engine.api.{MetaData, ProcessVersion}
+
+case class ScheduledProcessDetails(
+ processVersion: ProcessVersion,
+ processMetaData: MetaData,
+ inputConfigDuringExecutionJson: String,
+)
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/AdditionalDeploymentDataProvider.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/AdditionalDeploymentDataProvider.scala
new file mode 100644
index 00000000000..158836ce6ca
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/AdditionalDeploymentDataProvider.scala
@@ -0,0 +1,9 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.services
+
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.ScheduledDeploymentDetails
+
+trait AdditionalDeploymentDataProvider {
+
+ def prepareAdditionalData(runDetails: ScheduledDeploymentDetails): Map[String, String]
+
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/ProcessConfigEnricher.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ProcessConfigEnricher.scala
similarity index 77%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/ProcessConfigEnricher.scala
rename to designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ProcessConfigEnricher.scala
index e428dbd5ba4..0e89cd851cb 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/ProcessConfigEnricher.scala
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ProcessConfigEnricher.scala
@@ -1,14 +1,10 @@
-package pl.touk.nussknacker.engine.management.periodic.service
+package pl.touk.nussknacker.engine.api.deployment.scheduler.services
import com.typesafe.config.{Config, ConfigFactory}
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduledDeploymentDetails, ScheduledProcessDetails}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.ProcessConfigEnricher.{DeployData, EnrichedProcessConfig, InitialScheduleData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeployment
-import pl.touk.nussknacker.engine.management.periodic.service.ProcessConfigEnricher.{
- DeployData,
- EnrichedProcessConfig,
- InitialScheduleData
-}
import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
import sttp.client3.SttpBackend
@@ -32,7 +28,6 @@ trait ProcessConfigEnricher {
object ProcessConfigEnricher {
trait ProcessConfigEnricherInputData {
- def canonicalProcess: CanonicalProcess
def inputConfigDuringExecutionJson: String
def inputConfigDuringExecution: Config = {
@@ -41,13 +36,16 @@ object ProcessConfigEnricher {
}
- case class InitialScheduleData(canonicalProcess: CanonicalProcess, inputConfigDuringExecutionJson: String)
- extends ProcessConfigEnricherInputData
+ case class InitialScheduleData(
+ canonicalProcess: CanonicalProcess,
+ inputConfigDuringExecutionJson: String
+ ) extends ProcessConfigEnricherInputData
case class DeployData(
- canonicalProcess: CanonicalProcess,
- inputConfigDuringExecutionJson: String,
- deployment: PeriodicProcessDeployment[WithCanonicalProcess]
+ canonicalProcess: CanonicalProcess,
+ processVersion: ProcessVersion,
+ inputConfigDuringExecutionJson: String,
+ deploymentDetails: ScheduledDeploymentDetails,
) extends ProcessConfigEnricherInputData
case class EnrichedProcessConfig(inputConfigDuringExecutionJson: String)
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/SchedulePropertyExtractorFactory.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/SchedulePropertyExtractorFactory.scala
new file mode 100644
index 00000000000..112c14ea68a
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/SchedulePropertyExtractorFactory.scala
@@ -0,0 +1,13 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.services
+
+import com.typesafe.config.Config
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.ScheduleProperty
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+
+trait SchedulePropertyExtractorFactory {
+ def apply(config: Config): SchedulePropertyExtractor
+}
+
+trait SchedulePropertyExtractor {
+ def apply(canonicalProcess: CanonicalProcess): Either[String, ScheduleProperty]
+}
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledExecutionPerformer.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledExecutionPerformer.scala
new file mode 100644
index 00000000000..d971c640d48
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledExecutionPerformer.scala
@@ -0,0 +1,31 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.services
+
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
+import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
+
+import scala.concurrent.Future
+
+trait ScheduledExecutionPerformer {
+
+ def provideInputConfigDuringExecutionJson(): Future[InputConfigDuringExecution]
+
+ def prepareDeploymentWithRuntimeParams(
+ processVersion: ProcessVersion,
+ ): Future[DeploymentWithRuntimeParams]
+
+ def deployWithRuntimeParams(
+ deploymentWithJarData: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ deploymentData: DeploymentData,
+ canonicalProcess: CanonicalProcess,
+ processVersion: ProcessVersion,
+ ): Future[Option[ExternalDeploymentId]]
+
+ def cleanAfterDeployment(
+ runtimeParams: RuntimeParams
+ ): Future[Unit]
+
+}
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledProcessListener.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledProcessListener.scala
new file mode 100644
index 00000000000..b1653dedff3
--- /dev/null
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/scheduler/services/ScheduledProcessListener.scala
@@ -0,0 +1,60 @@
+package pl.touk.nussknacker.engine.api.deployment.scheduler.services
+
+import com.typesafe.config.Config
+import pl.touk.nussknacker.engine.api.deployment.StatusDetails
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.ScheduledDeploymentDetails
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId
+
+/*
+ Listener is at-least-once. If there are problems e.g. with DB, invocation can be repeated for same event.
+ Implementation should be aware of that. Listener is invoked during DB transaction, for that reason it's *synchronous*
+ */
+trait ScheduledProcessListener {
+
+ def onScheduledProcessEvent: PartialFunction[ScheduledProcessEvent, Unit]
+ def close(): Unit = {}
+}
+
+trait ScheduledProcessListenerFactory {
+ def create(config: Config): ScheduledProcessListener
+}
+
+sealed trait ScheduledProcessEvent {
+ val deployment: ScheduledDeploymentDetails
+}
+
+case class DeployedEvent(
+ deployment: ScheduledDeploymentDetails,
+ externalDeploymentId: Option[ExternalDeploymentId]
+) extends ScheduledProcessEvent
+
+case class FinishedEvent(
+ deployment: ScheduledDeploymentDetails,
+ canonicalProcess: CanonicalProcess,
+ processState: Option[StatusDetails]
+) extends ScheduledProcessEvent
+
+case class FailedOnDeployEvent(
+ deployment: ScheduledDeploymentDetails,
+ processState: Option[StatusDetails]
+) extends ScheduledProcessEvent
+
+case class FailedOnRunEvent(
+ deployment: ScheduledDeploymentDetails,
+ processState: Option[StatusDetails]
+) extends ScheduledProcessEvent
+
+case class ScheduledEvent(deployment: ScheduledDeploymentDetails, firstSchedule: Boolean) extends ScheduledProcessEvent
+
+object EmptyListener extends EmptyListener
+
+trait EmptyListener extends ScheduledProcessListener {
+
+ override def onScheduledProcessEvent: PartialFunction[ScheduledProcessEvent, Unit] = Map.empty
+
+}
+
+object EmptyScheduledProcessListenerFactory extends ScheduledProcessListenerFactory {
+ override def create(config: Config): ScheduledProcessListener = EmptyListener
+}
diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala
index 3fad23ad8a9..1919c1d4b35 100644
--- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala
+++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala
@@ -61,6 +61,8 @@ class DeploymentManagerStub extends BaseDeploymentManager with StubbingCommands
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
+
override def close(): Unit = {}
}
diff --git a/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala b/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala
index 172d141e792..c24c358ff9e 100644
--- a/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala
+++ b/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala
@@ -30,7 +30,8 @@ class CachingProcessStateDeploymentManagerSpec
delegate,
10 seconds,
NoDeploymentSynchronisationSupport,
- NoStateQueryForAllScenariosSupport
+ NoStateQueryForAllScenariosSupport,
+ NoSchedulingSupport,
)
val results = List(
@@ -49,7 +50,8 @@ class CachingProcessStateDeploymentManagerSpec
delegate,
10 seconds,
NoDeploymentSynchronisationSupport,
- NoStateQueryForAllScenariosSupport
+ NoStateQueryForAllScenariosSupport,
+ NoSchedulingSupport,
)
val firstInvocation = cachingManager.getProcessStatesDeploymentIdNow(DataFreshnessPolicy.CanBeCached)
@@ -67,7 +69,8 @@ class CachingProcessStateDeploymentManagerSpec
delegate,
10 seconds,
NoDeploymentSynchronisationSupport,
- NoStateQueryForAllScenariosSupport
+ NoStateQueryForAllScenariosSupport,
+ NoSchedulingSupport,
)
val resultForFresh = cachingManager.getProcessStatesDeploymentIdNow(DataFreshnessPolicy.Fresh)
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_103__drop_buildinfo.sql b/designer/server/src/main/resources/db/batch_periodic/migration/common/V1_103__drop_buildinfo.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_103__drop_buildinfo.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/common/V1_103__drop_buildinfo.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_105__add_indexes.sql b/designer/server/src/main/resources/db/batch_periodic/migration/common/V1_105__add_indexes.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_105__add_indexes.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/common/V1_105__add_indexes.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_107__add_retry_at_and_retries_to_periodic_process_deployments.sql b/designer/server/src/main/resources/db/batch_periodic/migration/common/V1_107__add_retry_at_and_retries_to_periodic_process_deployments.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_107__add_retry_at_and_retries_to_periodic_process_deployments.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/common/V1_107__add_retry_at_and_retries_to_periodic_process_deployments.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_109__add_action_id_to_periodic_processes.sql b/designer/server/src/main/resources/db/batch_periodic/migration/common/V1_109__add_action_id_to_periodic_processes.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/common/V1_109__add_action_id_to_periodic_processes.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/common/V1_109__add_action_id_to_periodic_processes.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_101__create_batch_periodic_tables.sql b/designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_101__create_batch_periodic_tables.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_101__create_batch_periodic_tables.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_101__create_batch_periodic_tables.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_102__add_program_args_and_jar_id.sql b/designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_102__add_program_args_and_jar_id.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_102__add_program_args_and_jar_id.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_102__add_program_args_and_jar_id.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_104__multiple_schedules.sql b/designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_104__multiple_schedules.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_104__multiple_schedules.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_104__multiple_schedules.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_106__rename_model_config.sql b/designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_106__rename_model_config.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_106__rename_model_config.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_106__rename_model_config.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_108__add_processing_type.sql b/designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_108__add_processing_type.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/hsql/V1_108__add_processing_type.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/hsql/V1_108__add_processing_type.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_101__create_batch_periodic_tables.sql b/designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_101__create_batch_periodic_tables.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_101__create_batch_periodic_tables.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_101__create_batch_periodic_tables.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_102__add_program_args_and_jar_id.sql b/designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_102__add_program_args_and_jar_id.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_102__add_program_args_and_jar_id.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_102__add_program_args_and_jar_id.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_104__multiple_schedules.sql b/designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_104__multiple_schedules.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_104__multiple_schedules.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_104__multiple_schedules.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_106__rename_model_config.sql b/designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_106__rename_model_config.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_106__rename_model_config.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_106__rename_model_config.sql
diff --git a/engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_108__add_processing_type.sql b/designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_108__add_processing_type.sql
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/db/batch_periodic/migration/postgres/V1_108__add_processing_type.sql
rename to designer/server/src/main/resources/db/batch_periodic/migration/postgres/V1_108__add_processing_type.sql
diff --git a/engine/flink/management/periodic/src/main/resources/web/static/assets/states/scheduled.svg b/designer/server/src/main/resources/web/static/assets/states/scheduled.svg
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/web/static/assets/states/scheduled.svg
rename to designer/server/src/main/resources/web/static/assets/states/scheduled.svg
diff --git a/engine/flink/management/periodic/src/main/resources/web/static/assets/states/wait-reschedule.svg b/designer/server/src/main/resources/web/static/assets/states/wait-reschedule.svg
similarity index 100%
rename from engine/flink/management/periodic/src/main/resources/web/static/assets/states/wait-reschedule.svg
rename to designer/server/src/main/resources/web/static/assets/states/wait-reschedule.svg
diff --git a/designer/server/src/main/scala/db/migration/V1_061__PeriodicDeploymentManagerTablesDefinition.scala b/designer/server/src/main/scala/db/migration/V1_061__PeriodicDeploymentManagerTablesDefinition.scala
new file mode 100644
index 00000000000..75279963168
--- /dev/null
+++ b/designer/server/src/main/scala/db/migration/V1_061__PeriodicDeploymentManagerTablesDefinition.scala
@@ -0,0 +1,153 @@
+package db.migration
+
+import com.typesafe.scalalogging.LazyLogging
+import db.migration.V1_061__PeriodicDeploymentManagerTablesDefinition.Definitions
+import pl.touk.nussknacker.ui.db.migration.SlickMigration
+import slick.jdbc.JdbcProfile
+import slick.lifted.ProvenShape
+import slick.sql.SqlProfile.ColumnOption.NotNull
+
+import java.time.LocalDateTime
+import java.util.UUID
+import scala.concurrent.ExecutionContext.Implicits.global
+
+trait V1_061__PeriodicDeploymentManagerTablesDefinition extends SlickMigration with LazyLogging {
+
+ import profile.api._
+
+ private val definitions = new Definitions(profile)
+
+ override def migrateActions: DBIOAction[Any, NoStream, Effect.All] = {
+ logger.info("Starting migration V1_061__PeriodicDeploymentManagerTablesDefinition")
+ for {
+ _ <- definitions.periodicProcessesTable.schema.create
+ _ <- definitions.periodicProcessDeploymentsTable.schema.create
+ } yield ()
+ }
+
+}
+
+object V1_061__PeriodicDeploymentManagerTablesDefinition {
+
+ class Definitions(val profile: JdbcProfile) {
+ import profile.api._
+
+ val periodicProcessDeploymentsTable = TableQuery[PeriodicProcessDeploymentsTable]
+
+ class PeriodicProcessDeploymentsTable(tag: Tag)
+ extends Table[PeriodicProcessDeploymentEntity](tag, "periodic_scenario_deployments") {
+
+ def id: Rep[Long] = column[Long]("id", O.PrimaryKey, O.AutoInc)
+
+ def periodicProcessId: Rep[Long] = column[Long]("periodic_process_id", NotNull)
+
+ def createdAt: Rep[LocalDateTime] = column[LocalDateTime]("created_at", NotNull)
+
+ def runAt: Rep[LocalDateTime] = column[LocalDateTime]("run_at", NotNull)
+
+ def scheduleName: Rep[Option[String]] = column[Option[String]]("schedule_name")
+
+ def deployedAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("deployed_at")
+
+ def completedAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("completed_at")
+
+ def retriesLeft: Rep[Int] = column[Int]("retries_left")
+
+ def nextRetryAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("next_retry_at")
+
+ def status: Rep[String] = column[String]("status", NotNull)
+
+ def periodicProcessIdIndex = index("periodic_scenario_deployments_periodic_process_id_idx", periodicProcessId)
+ def createdAtIndex = index("periodic_scenario_deployments_created_at_idx", createdAt)
+ def runAtIndex = index("periodic_scenario_deployments_run_at_idx", runAt)
+
+ override def * : ProvenShape[PeriodicProcessDeploymentEntity] = (
+ id,
+ periodicProcessId,
+ createdAt,
+ runAt,
+ scheduleName,
+ deployedAt,
+ completedAt,
+ retriesLeft,
+ nextRetryAt,
+ status
+ ) <>
+ ((PeriodicProcessDeploymentEntity.apply _).tupled, PeriodicProcessDeploymentEntity.unapply)
+
+ }
+
+ case class PeriodicProcessDeploymentEntity(
+ id: Long,
+ periodicProcessId: Long,
+ createdAt: LocalDateTime,
+ runAt: LocalDateTime,
+ scheduleName: Option[String],
+ deployedAt: Option[LocalDateTime],
+ completedAt: Option[LocalDateTime],
+ retriesLeft: Int,
+ nextRetryAt: Option[LocalDateTime],
+ status: String
+ )
+
+ val periodicProcessesTable = TableQuery[PeriodicProcessesTable]
+
+ class PeriodicProcessesTable(tag: Tag) extends Table[PeriodicProcessEntity](tag, "periodic_scenarios") {
+
+ def periodicProcessId: Rep[Long] = column[Long]("id", O.Unique, O.AutoInc)
+
+ def processId: Rep[Option[Long]] = column[Option[Long]]("process_id")
+
+ def processName: Rep[String] = column[String]("process_name", NotNull)
+
+ def processVersionId: Rep[Long] = column[Long]("process_version_id", NotNull)
+
+ def processingType: Rep[String] = column[String]("processing_type", NotNull)
+
+ def runtimeParams: Rep[String] = column[String]("runtime_params")
+
+ def scheduleProperty: Rep[String] = column[String]("schedule_property", NotNull)
+
+ def active: Rep[Boolean] = column[Boolean]("active", NotNull)
+
+ def createdAt: Rep[LocalDateTime] = column[LocalDateTime]("created_at", NotNull)
+
+ def processActionId: Rep[Option[UUID]] = column[Option[UUID]]("process_action_id")
+
+ def inputConfigDuringExecutionJson: Rep[String] = column[String]("input_config_during_execution", NotNull)
+
+ def processNameAndActiveIndex = index("periodic_scenarios_process_name_active_idx", (processName, active))
+
+ override def * : ProvenShape[PeriodicProcessEntity] = (
+ periodicProcessId,
+ processId,
+ processName,
+ processVersionId,
+ processingType,
+ runtimeParams,
+ scheduleProperty,
+ active,
+ createdAt,
+ processActionId,
+ inputConfigDuringExecutionJson,
+ ) <> (PeriodicProcessEntity.apply _ tupled, PeriodicProcessEntity.unapply)
+
+ }
+
+ case class PeriodicProcessEntity(
+ id: Long,
+ processId: Option[Long],
+ processName: String,
+ processVersionId: Long,
+ processingType: String,
+ runtimeParams: String,
+ scheduleProperty: String,
+ active: Boolean,
+ createdAt: LocalDateTime,
+ processActionId: Option[UUID],
+ inputConfigDuringExecutionJson: String,
+ )
+
+ }
+
+}
diff --git a/designer/server/src/main/scala/db/migration/hsql/V1_061__PeriodicDeploymentManagerTables.scala b/designer/server/src/main/scala/db/migration/hsql/V1_061__PeriodicDeploymentManagerTables.scala
new file mode 100644
index 00000000000..350e8fdc9e0
--- /dev/null
+++ b/designer/server/src/main/scala/db/migration/hsql/V1_061__PeriodicDeploymentManagerTables.scala
@@ -0,0 +1,8 @@
+package db.migration.hsql
+
+import db.migration.V1_061__PeriodicDeploymentManagerTablesDefinition
+import slick.jdbc.{HsqldbProfile, JdbcProfile}
+
+class V1_061__PeriodicDeploymentManagerTables extends V1_061__PeriodicDeploymentManagerTablesDefinition {
+ override protected lazy val profile: JdbcProfile = HsqldbProfile
+}
diff --git a/designer/server/src/main/scala/db/migration/postgres/V1_061__PeriodicDeploymentManagerTables.scala b/designer/server/src/main/scala/db/migration/postgres/V1_061__PeriodicDeploymentManagerTables.scala
new file mode 100644
index 00000000000..97271414d93
--- /dev/null
+++ b/designer/server/src/main/scala/db/migration/postgres/V1_061__PeriodicDeploymentManagerTables.scala
@@ -0,0 +1,8 @@
+package db.migration.postgres
+
+import db.migration.V1_061__PeriodicDeploymentManagerTablesDefinition
+import slick.jdbc.{JdbcProfile, PostgresProfile}
+
+class V1_061__PeriodicDeploymentManagerTables extends V1_061__PeriodicDeploymentManagerTablesDefinition {
+ override protected lazy val profile: JdbcProfile = PostgresProfile
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessDeploymentsTable.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessDeploymentsTable.scala
similarity index 84%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessDeploymentsTable.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessDeploymentsTable.scala
index f9b1bacb69b..8c82157e36d 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessDeploymentsTable.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessDeploymentsTable.scala
@@ -1,7 +1,7 @@
-package pl.touk.nussknacker.engine.management.periodic.db
+package pl.touk.nussknacker.ui.db.entity
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.model.{
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model.{
PeriodicProcessDeploymentId,
PeriodicProcessDeploymentStatus,
PeriodicProcessId
@@ -14,15 +14,16 @@ import java.time.LocalDateTime
trait PeriodicProcessDeploymentsTableFactory extends PeriodicProcessesTableFactory {
- protected val profile: JdbcProfile
-
import profile.api._
+ implicit val periodicProcessDeploymentIdMapping: BaseColumnType[PeriodicProcessDeploymentId] =
+ MappedColumnType.base[PeriodicProcessDeploymentId, Long](_.value, PeriodicProcessDeploymentId.apply)
+
implicit val periodicProcessDeploymentStatusColumnTyped: JdbcType[PeriodicProcessDeploymentStatus] =
MappedColumnType.base[PeriodicProcessDeploymentStatus, String](_.toString, PeriodicProcessDeploymentStatus.withName)
class PeriodicProcessDeploymentsTable(tag: Tag)
- extends Table[PeriodicProcessDeploymentEntity](tag, "periodic_process_deployments") {
+ extends Table[PeriodicProcessDeploymentEntity](tag, "periodic_scenario_deployments") {
def id: Rep[PeriodicProcessDeploymentId] = column[PeriodicProcessDeploymentId]("id", O.PrimaryKey, O.AutoInc)
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessesTable.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessesTable.scala
new file mode 100644
index 00000000000..f4d7c968a50
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/PeriodicProcessesTable.scala
@@ -0,0 +1,160 @@
+package pl.touk.nussknacker.ui.db.entity
+
+import io.circe.Decoder
+import io.circe.syntax.EncoderOps
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.RuntimeParams
+import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessId
+import slick.jdbc.JdbcProfile
+import slick.lifted.ProvenShape
+import slick.sql.SqlProfile.ColumnOption.NotNull
+
+import java.time.LocalDateTime
+import java.util.UUID
+
+trait PeriodicProcessesTableFactory extends BaseEntityFactory {
+
+ protected val profile: JdbcProfile
+
+ import profile.api._
+
+ implicit val periodicProcessIdMapping: BaseColumnType[PeriodicProcessId] =
+ MappedColumnType.base[PeriodicProcessId, Long](_.value, PeriodicProcessId.apply)
+
+ private implicit val processActionIdTypedType: BaseColumnType[ProcessActionId] =
+ MappedColumnType.base[ProcessActionId, UUID](
+ _.value,
+ ProcessActionId(_)
+ )
+
+ implicit val runtimeParamsTypedType: BaseColumnType[RuntimeParams] =
+ MappedColumnType.base[RuntimeParams, String](
+ _.params.asJson.noSpaces,
+ jsonStr =>
+ io.circe.parser.parse(jsonStr).flatMap(Decoder[Map[String, String]].decodeJson) match {
+ case Right(params) => RuntimeParams(params)
+ case Left(error) => throw error
+ }
+ )
+
+ abstract class PeriodicProcessesTable[ENTITY <: PeriodicProcessEntity](tag: Tag)
+ extends Table[ENTITY](tag, "periodic_scenarios") {
+
+ def id: Rep[PeriodicProcessId] = column[PeriodicProcessId]("id", O.PrimaryKey, O.AutoInc)
+
+ def processId: Rep[Option[ProcessId]] = column[Option[ProcessId]]("process_id")
+
+ def processName: Rep[ProcessName] = column[ProcessName]("process_name", NotNull)
+
+ def processVersionId: Rep[VersionId] = column[VersionId]("process_version_id", NotNull)
+
+ def processingType: Rep[String] = column[String]("processing_type", NotNull)
+
+ def runtimeParams: Rep[RuntimeParams] = column[RuntimeParams]("runtime_params")
+
+ def scheduleProperty: Rep[String] = column[String]("schedule_property", NotNull)
+
+ def active: Rep[Boolean] = column[Boolean]("active", NotNull)
+
+ def createdAt: Rep[LocalDateTime] = column[LocalDateTime]("created_at", NotNull)
+
+ def processActionId: Rep[Option[ProcessActionId]] = column[Option[ProcessActionId]]("process_action_id")
+
+ }
+
+ class PeriodicProcessesWithInputConfigJsonTable(tag: Tag)
+ extends PeriodicProcessesTable[PeriodicProcessEntityWithInputConfigJson](tag) {
+
+ def inputConfigDuringExecutionJson: Rep[String] = column[String]("input_config_during_execution", NotNull)
+
+ override def * : ProvenShape[PeriodicProcessEntityWithInputConfigJson] = (
+ id,
+ processId,
+ processName,
+ processVersionId,
+ processingType,
+ runtimeParams,
+ scheduleProperty,
+ active,
+ createdAt,
+ processActionId,
+ inputConfigDuringExecutionJson,
+ ) <> (PeriodicProcessEntityWithInputConfigJson.apply _ tupled, PeriodicProcessEntityWithInputConfigJson.unapply)
+
+ }
+
+ class PeriodicProcessesWithoutInputConfigJsonTable(tag: Tag)
+ extends PeriodicProcessesTable[PeriodicProcessEntityWithoutInputConfigJson](tag) {
+
+ override def * : ProvenShape[PeriodicProcessEntityWithoutInputConfigJson] = (
+ id,
+ processId,
+ processName,
+ processVersionId,
+ processingType,
+ runtimeParams,
+ scheduleProperty,
+ active,
+ createdAt,
+ processActionId
+ ) <> (PeriodicProcessEntityWithoutInputConfigJson.apply _ tupled, PeriodicProcessEntityWithoutInputConfigJson.unapply)
+
+ }
+
+ object PeriodicProcessesWithoutInputConfig extends TableQuery(new PeriodicProcessesWithoutInputConfigJsonTable(_))
+
+ object PeriodicProcessesWithInputConfig extends TableQuery(new PeriodicProcessesWithInputConfigJsonTable(_))
+
+}
+
+trait PeriodicProcessEntity {
+
+ def id: PeriodicProcessId
+
+ def processId: Option[ProcessId]
+
+ def processName: ProcessName
+
+ def processVersionId: VersionId
+
+ def processingType: String
+
+ def runtimeParams: RuntimeParams
+
+ def scheduleProperty: String
+
+ def active: Boolean
+
+ def createdAt: LocalDateTime
+
+ def processActionId: Option[ProcessActionId]
+
+}
+
+case class PeriodicProcessEntityWithInputConfigJson(
+ id: PeriodicProcessId,
+ processId: Option[ProcessId],
+ processName: ProcessName,
+ processVersionId: VersionId,
+ processingType: String,
+ runtimeParams: RuntimeParams,
+ scheduleProperty: String,
+ active: Boolean,
+ createdAt: LocalDateTime,
+ processActionId: Option[ProcessActionId],
+ inputConfigDuringExecutionJson: String,
+) extends PeriodicProcessEntity
+
+case class PeriodicProcessEntityWithoutInputConfigJson(
+ id: PeriodicProcessId,
+ processId: Option[ProcessId],
+ processName: ProcessName,
+ processVersionId: VersionId,
+ processingType: String,
+ runtimeParams: RuntimeParams,
+ scheduleProperty: String,
+ active: Boolean,
+ createdAt: LocalDateTime,
+ processActionId: Option[ProcessActionId]
+) extends PeriodicProcessEntity
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/DefinitionsService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/DefinitionsService.scala
index 92c5d4b8b4b..2f8d59055ab 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/DefinitionsService.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/definition/DefinitionsService.scala
@@ -10,7 +10,7 @@ import pl.touk.nussknacker.engine.definition.component.{ComponentStaticDefinitio
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import pl.touk.nussknacker.engine.ModelData
import pl.touk.nussknacker.engine.api.TemplateEvaluationResult
-import pl.touk.nussknacker.engine.api.typed.typing.{Typed, TypingResult}
+import pl.touk.nussknacker.engine.api.typed.typing.{Typed, TypedClass, TypingResult}
import pl.touk.nussknacker.restmodel.definition._
import pl.touk.nussknacker.ui.definition.DefinitionsService.{
ComponentUiConfigMode,
@@ -106,7 +106,13 @@ class DefinitionsService(
UIDefinitions(
componentGroups = ComponentGroupsPreparer.prepareComponentGroups(components),
components = components.map(component => component.component.id -> createUIComponentDefinition(component)).toMap,
- classes = modelData.modelDefinitionWithClasses.classDefinitions.all.toList.map(_.clazzName),
+ classes = modelData.modelDefinitionWithClasses.classDefinitions.all.toList
+ .map(_.clazzName)
+ .filter {
+ case t: TypedClass if t.klass.isArray => false
+ case _ => true
+ }
+ .sortBy(_.display.toLowerCase),
scenarioProperties = {
if (forFragment) {
createUIProperties(FragmentPropertiesConfig.properties ++ fragmentPropertiesConfig, fragmentPropertiesDocsUrl)
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
index a340b48ba6c..44787d0477d 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/factory/NussknackerAppFactory.scala
@@ -7,9 +7,10 @@ import cats.effect.{IO, Resource}
import com.typesafe.scalalogging.LazyLogging
import io.dropwizard.metrics5.MetricRegistry
import io.dropwizard.metrics5.jmx.JmxReporter
-import pl.touk.nussknacker.engine.ConfigWithUnresolvedVersion
+import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader
import pl.touk.nussknacker.engine.util.{JavaClassVersionChecker, SLF4JBridgeHandlerRegistrar}
+import pl.touk.nussknacker.engine.{ConfigWithUnresolvedVersion, ProcessingTypeConfig}
import pl.touk.nussknacker.ui.config.{DesignerConfig, DesignerConfigLoader}
import pl.touk.nussknacker.ui.configloader.{ProcessingTypeConfigsLoader, ProcessingTypeConfigsLoaderFactory}
import pl.touk.nussknacker.ui.db.DbRef
@@ -18,6 +19,7 @@ import pl.touk.nussknacker.ui.process.processingtype.loader.{
ProcessingTypeDataLoader,
ProcessingTypesConfigBasedProcessingTypeDataLoader
}
+import pl.touk.nussknacker.ui.process.processingtype.{ModelClassLoaderDependencies, ModelClassLoaderProvider}
import pl.touk.nussknacker.ui.server.{AkkaHttpBasedRouteProvider, NussknackerHttpServer}
import pl.touk.nussknacker.ui.util.{ActorSystemBasedExecutionContextWithIORuntime, IOToFutureSttpBackendConverter}
import sttp.client3.SttpBackend
@@ -40,6 +42,9 @@ class NussknackerAppFactory(
designerConfig,
ioSttpBackend
)(executionContextWithIORuntime.ioRuntime)
+ modelClassLoaderProvider = createModelClassLoaderProvider(
+ designerConfig.processingTypeConfigs.configByProcessingType
+ )
processingTypeDataLoader = createProcessingTypeDataLoader(processingTypeConfigsLoader)
materializer = Materializer(system)
_ <- Resource.eval(IO(JavaClassVersionChecker.check()))
@@ -54,7 +59,8 @@ class NussknackerAppFactory(
IOToFutureSttpBackendConverter.convert(ioSttpBackend)(executionContextWithIORuntime),
processingTypeDataLoader,
feStatisticsRepository,
- clock
+ clock,
+ modelClassLoaderProvider
)(
system,
materializer,
@@ -116,6 +122,15 @@ class NussknackerAppFactory(
)
}
+ private def createModelClassLoaderProvider(
+ processingTypeConfigs: Map[String, ProcessingTypeConfig]
+ ): ModelClassLoaderProvider = {
+ val defaultWorkingDirOpt = None
+ ModelClassLoaderProvider(
+ processingTypeConfigs.mapValuesNow(c => ModelClassLoaderDependencies(c.classPath, defaultWorkingDirOpt))
+ )
+ }
+
}
object NussknackerAppFactory {
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DefaultAdditionalDeploymentDataProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DefaultAdditionalDeploymentDataProvider.scala
new file mode 100644
index 00000000000..c82cd46b59c
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DefaultAdditionalDeploymentDataProvider.scala
@@ -0,0 +1,18 @@
+package pl.touk.nussknacker.ui.process.periodic
+
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.ScheduledDeploymentDetails
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.AdditionalDeploymentDataProvider
+
+import java.time.format.DateTimeFormatter
+
+object DefaultAdditionalDeploymentDataProvider extends AdditionalDeploymentDataProvider {
+
+ override def prepareAdditionalData(runDetails: ScheduledDeploymentDetails): Map[String, String] = {
+ Map(
+ "deploymentId" -> runDetails.id.toString,
+ "runAt" -> runDetails.runAt.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME),
+ "scheduleName" -> runDetails.scheduleName.getOrElse("")
+ )
+ }
+
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActor.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DeploymentActor.scala
similarity index 70%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActor.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DeploymentActor.scala
index 1beb82e32ac..d8361fbbb9e 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActor.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/DeploymentActor.scala
@@ -1,15 +1,9 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import akka.actor.{Actor, Props, Timers}
import com.typesafe.scalalogging.LazyLogging
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.DeploymentActor.{
- CheckToBeDeployed,
- DeploymentCompleted,
- WaitingForDeployment
-}
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeployment
+import pl.touk.nussknacker.ui.process.periodic.DeploymentActor._
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeployment
import scala.concurrent.Future
import scala.concurrent.duration._
@@ -22,8 +16,8 @@ object DeploymentActor {
}
private[periodic] def props(
- findToBeDeployed: => Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]],
- deploy: PeriodicProcessDeployment[WithCanonicalProcess] => Future[Unit],
+ findToBeDeployed: => Future[Seq[PeriodicProcessDeployment]],
+ deploy: PeriodicProcessDeployment => Future[Unit],
interval: FiniteDuration
) = {
Props(new DeploymentActor(findToBeDeployed, deploy, interval))
@@ -31,14 +25,14 @@ object DeploymentActor {
private[periodic] case object CheckToBeDeployed
- private case class WaitingForDeployment(ids: List[PeriodicProcessDeployment[WithCanonicalProcess]])
+ private case class WaitingForDeployment(ids: List[PeriodicProcessDeployment])
private case object DeploymentCompleted
}
class DeploymentActor(
- findToBeDeployed: => Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]],
- deploy: PeriodicProcessDeployment[WithCanonicalProcess] => Future[Unit],
+ findToBeDeployed: => Future[Seq[PeriodicProcessDeployment]],
+ deploy: PeriodicProcessDeployment => Future[Unit],
interval: FiniteDuration
) extends Actor
with Timers
@@ -74,7 +68,9 @@ class DeploymentActor(
}
}
- private def receiveOngoingDeployment(runDetails: PeriodicProcessDeployment[WithCanonicalProcess]): Receive = {
+ private def receiveOngoingDeployment(
+ runDetails: PeriodicProcessDeployment
+ ): Receive = {
case CheckToBeDeployed =>
logger.debug(s"Still waiting for ${runDetails.display} to be deployed")
case DeploymentCompleted =>
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
similarity index 81%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
index d5bac21c6a1..5242de64a68 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManager.scala
@@ -1,29 +1,18 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import cats.data.OptionT
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
+import pl.touk.nussknacker.engine.DeploymentManagerDependencies
import pl.touk.nussknacker.engine.api.deployment._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduleProperty => ApiScheduleProperty}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services._
import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName, VersionId}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId
-import pl.touk.nussknacker.engine.management.FlinkConfig
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.PeriodicProcessStatus
-import pl.touk.nussknacker.engine.management.periodic.Utils.{createActorWithRetry, runSafely}
-import pl.touk.nussknacker.engine.management.periodic.db.{
- DbInitializer,
- PeriodicProcessesRepository,
- SlickPeriodicProcessesRepository
-}
-import pl.touk.nussknacker.engine.management.periodic.flink.FlinkJarManager
-import pl.touk.nussknacker.engine.management.periodic.service.{
- AdditionalDeploymentDataProvider,
- PeriodicProcessListenerFactory,
- ProcessConfigEnricherFactory
-}
-import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies}
-import slick.jdbc
-import slick.jdbc.JdbcProfile
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.PeriodicProcessStatus
+import pl.touk.nussknacker.ui.process.periodic.Utils._
+import pl.touk.nussknacker.ui.process.repository.PeriodicProcessesRepository
import java.time.{Clock, Instant}
import scala.concurrent.{ExecutionContext, Future}
@@ -32,51 +21,46 @@ object PeriodicDeploymentManager {
def apply(
delegate: DeploymentManager,
+ scheduledExecutionPerformer: ScheduledExecutionPerformer,
schedulePropertyExtractorFactory: SchedulePropertyExtractorFactory,
processConfigEnricherFactory: ProcessConfigEnricherFactory,
- periodicBatchConfig: PeriodicBatchConfig,
- flinkConfig: FlinkConfig,
+ schedulingConfig: SchedulingConfig,
originalConfig: Config,
- modelData: BaseModelData,
- listenerFactory: PeriodicProcessListenerFactory,
+ listenerFactory: ScheduledProcessListenerFactory,
additionalDeploymentDataProvider: AdditionalDeploymentDataProvider,
- dependencies: DeploymentManagerDependencies
+ dependencies: DeploymentManagerDependencies,
+ periodicProcessesRepository: PeriodicProcessesRepository,
): PeriodicDeploymentManager = {
import dependencies._
- val clock = Clock.systemDefaultZone()
-
- val (db: jdbc.JdbcBackend.DatabaseDef, dbProfile: JdbcProfile) = DbInitializer.init(periodicBatchConfig.db)
- val scheduledProcessesRepository =
- new SlickPeriodicProcessesRepository(db, dbProfile, clock, periodicBatchConfig.processingType)
- val jarManager = FlinkJarManager(flinkConfig, periodicBatchConfig, modelData)
+ val clock = Clock.systemDefaultZone()
val listener = listenerFactory.create(originalConfig)
val processConfigEnricher = processConfigEnricherFactory(originalConfig)
val service = new PeriodicProcessService(
delegate,
- jarManager,
- scheduledProcessesRepository,
+ scheduledExecutionPerformer,
+ periodicProcessesRepository,
listener,
additionalDeploymentDataProvider,
- periodicBatchConfig.deploymentRetry,
- periodicBatchConfig.executionConfig,
- periodicBatchConfig.maxFetchedPeriodicScenarioActivities,
+ schedulingConfig.deploymentRetry,
+ schedulingConfig.executionConfig,
+ schedulingConfig.maxFetchedPeriodicScenarioActivities,
processConfigEnricher,
clock,
dependencies.actionService,
- dependencies.configsFromProvider
+ dependencies.configsFromProvider,
)
// These actors have to be created with retries because they can initially fail to create due to taken names,
// if the actors (with the same names) created before reload aren't fully stopped (and their names freed) yet
val deploymentActor = createActorWithRetry(
- s"periodic-${periodicBatchConfig.processingType}-deployer",
- DeploymentActor.props(service, periodicBatchConfig.deployInterval),
+ s"periodic-${schedulingConfig.processingType}-deployer",
+ DeploymentActor.props(service, schedulingConfig.deployInterval),
dependencies.actorSystem
)
val rescheduleFinishedActor = createActorWithRetry(
- s"periodic-${periodicBatchConfig.processingType}-rescheduler",
- RescheduleFinishedActor.props(service, periodicBatchConfig.rescheduleCheckInterval),
+ s"periodic-${schedulingConfig.processingType}-rescheduler",
+ RescheduleFinishedActor.props(service, schedulingConfig.rescheduleCheckInterval),
dependencies.actorSystem
)
@@ -86,12 +70,11 @@ object PeriodicDeploymentManager {
// they don't have any internal state, so stopping them non-gracefully is safe
runSafely(dependencies.actorSystem.stop(deploymentActor))
runSafely(dependencies.actorSystem.stop(rescheduleFinishedActor))
- runSafely(db.close())
}
new PeriodicDeploymentManager(
delegate,
service,
- scheduledProcessesRepository,
+ periodicProcessesRepository,
schedulePropertyExtractorFactory(originalConfig),
toClose
)
@@ -102,7 +85,7 @@ object PeriodicDeploymentManager {
class PeriodicDeploymentManager private[periodic] (
val delegate: DeploymentManager,
service: PeriodicProcessService,
- repository: PeriodicProcessesRepository,
+ periodicProcessesRepository: PeriodicProcessesRepository,
schedulePropertyExtractor: SchedulePropertyExtractor,
toClose: () => Unit
)(implicit val ec: ExecutionContext)
@@ -110,7 +93,7 @@ class PeriodicDeploymentManager private[periodic] (
with ManagerSpecificScenarioActivitiesStoredByManager
with LazyLogging {
- import repository._
+ import periodicProcessesRepository._
override def processCommand[Result](command: DMScenarioCommand[Result]): Future[Result] =
command match {
@@ -158,12 +141,27 @@ class PeriodicDeploymentManager private[periodic] (
private def extractScheduleProperty(canonicalProcess: CanonicalProcess): Future[ScheduleProperty] = {
schedulePropertyExtractor(canonicalProcess) match {
case Right(scheduleProperty) =>
- Future.successful(scheduleProperty)
+ Future.successful(toDomain(scheduleProperty))
case Left(error) =>
Future.failed(new PeriodicProcessException(error))
}
}
+ private def toDomain(
+ apiScheduleProperty: ApiScheduleProperty,
+ ): ScheduleProperty = apiScheduleProperty match {
+ case property: ApiScheduleProperty.SingleScheduleProperty =>
+ toDomain(property)
+ case ApiScheduleProperty.MultipleScheduleProperty(schedules) =>
+ MultipleScheduleProperty(schedules.map { case (k, v) => (k, toDomain(v)) })
+ }
+
+ private def toDomain(
+ apiSingleScheduleProperty: ApiScheduleProperty.SingleScheduleProperty
+ ): SingleScheduleProperty = apiSingleScheduleProperty match {
+ case ApiScheduleProperty.CronScheduleProperty(labelOrCronExpr) => CronScheduleProperty(labelOrCronExpr)
+ }
+
private def stopScenario(command: DMStopScenarioCommand): Future[SavepointResult] = {
import command._
service.deactivate(scenarioName).flatMap { deploymentIdsToStop =>
@@ -282,9 +280,10 @@ class PeriodicDeploymentManager private[periodic] (
.map(_.groupedByPeriodicProcess.headOption.flatMap(_.deployments.headOption))
)
processDeploymentWithProcessJson <- OptionT.liftF(
- repository.findProcessData(processDeployment.id).run
+ periodicProcessesRepository.findProcessData(processDeployment.id).run
)
_ <- OptionT.liftF(service.deploy(processDeploymentWithProcessJson))
} yield ()
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManagerDecorator.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManagerDecorator.scala
new file mode 100644
index 00000000000..e4b36398383
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicDeploymentManagerDecorator.scala
@@ -0,0 +1,117 @@
+package pl.touk.nussknacker.ui.process.periodic
+
+import com.typesafe.config.Config
+import com.typesafe.scalalogging.LazyLogging
+import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig
+import pl.touk.nussknacker.engine.api.definition.{MandatoryParameterValidator, StringParameterEditor}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.{
+ EmptyScheduledProcessListenerFactory,
+ ProcessConfigEnricherFactory,
+ SchedulePropertyExtractorFactory
+}
+import pl.touk.nussknacker.engine.api.deployment.{DeploymentManager, SchedulingSupported}
+import pl.touk.nussknacker.engine.{DeploymentManagerDependencies, ModelData}
+import pl.touk.nussknacker.ui.db.DbRef
+import pl.touk.nussknacker.ui.process.periodic.cron.{CronParameterValidator, CronSchedulePropertyExtractor}
+import pl.touk.nussknacker.ui.process.periodic.legacy.db.{LegacyDbInitializer, SlickLegacyPeriodicProcessesRepository}
+import pl.touk.nussknacker.ui.process.repository.{
+ DBFetchingProcessRepository,
+ DbScenarioActionReadOnlyRepository,
+ ScenarioLabelsRepository,
+ SlickPeriodicProcessesRepository
+}
+import slick.jdbc
+import slick.jdbc.JdbcProfile
+
+import java.time.Clock
+
+object PeriodicDeploymentManagerDecorator extends LazyLogging {
+
+ def decorate(
+ underlying: DeploymentManager,
+ schedulingSupported: SchedulingSupported,
+ modelData: ModelData,
+ deploymentConfig: Config,
+ dependencies: DeploymentManagerDependencies,
+ dbRef: DbRef,
+ ): DeploymentManager = {
+ logger.info("Decorating DM with periodic functionality")
+ import dependencies._
+ import net.ceedubs.ficus.Ficus._
+ import net.ceedubs.ficus.readers.ArbitraryTypeReader._
+
+ val clock = Clock.systemDefaultZone()
+
+ deploymentConfig.as[SchedulingConfig]("scheduling")
+ val rawSchedulingConfig = deploymentConfig.getConfig("scheduling")
+ val schedulingConfig = rawSchedulingConfig.as[SchedulingConfig]
+
+ val schedulePropertyExtractorFactory: SchedulePropertyExtractorFactory =
+ schedulingSupported.customSchedulePropertyExtractorFactory
+ .getOrElse(_ => CronSchedulePropertyExtractor())
+
+ val processConfigEnricherFactory =
+ schedulingSupported.customProcessConfigEnricherFactory
+ .getOrElse(ProcessConfigEnricherFactory.noOp)
+
+ val periodicProcessListenerFactory =
+ schedulingSupported.customScheduledProcessListenerFactory
+ .getOrElse(EmptyScheduledProcessListenerFactory)
+
+ val additionalDeploymentDataProvider =
+ schedulingSupported.customAdditionalDeploymentDataProvider
+ .getOrElse(DefaultAdditionalDeploymentDataProvider)
+
+ val actionRepository =
+ DbScenarioActionReadOnlyRepository.create(dbRef)
+ val scenarioLabelsRepository =
+ new ScenarioLabelsRepository(dbRef)
+ val fetchingProcessRepository =
+ DBFetchingProcessRepository.createFutureRepository(dbRef, actionRepository, scenarioLabelsRepository)
+
+ val periodicProcessesRepository = schedulingConfig.legacyDb match {
+ case None =>
+ new SlickPeriodicProcessesRepository(
+ schedulingConfig.processingType,
+ dbRef.db,
+ dbRef.profile,
+ clock,
+ fetchingProcessRepository
+ )
+ case Some(customDbConfig) =>
+ val (db: jdbc.JdbcBackend.DatabaseDef, dbProfile: JdbcProfile) = LegacyDbInitializer.init(customDbConfig)
+ new SlickLegacyPeriodicProcessesRepository(
+ schedulingConfig.processingType,
+ db,
+ dbProfile,
+ clock,
+ fetchingProcessRepository
+ )
+ }
+
+ PeriodicDeploymentManager(
+ delegate = underlying,
+ dependencies = dependencies,
+ periodicProcessesRepository = periodicProcessesRepository,
+ scheduledExecutionPerformer =
+ schedulingSupported.createScheduledExecutionPerformer(modelData, dependencies, deploymentConfig),
+ schedulePropertyExtractorFactory = schedulePropertyExtractorFactory,
+ processConfigEnricherFactory = processConfigEnricherFactory,
+ listenerFactory = periodicProcessListenerFactory,
+ schedulingConfig = schedulingConfig,
+ originalConfig = deploymentConfig,
+ additionalDeploymentDataProvider = additionalDeploymentDataProvider,
+ )
+ }
+
+ def additionalScenarioProperties: Map[String, ScenarioPropertyConfig] = Map(cronConfig)
+
+ private val cronConfig = CronSchedulePropertyExtractor.CronPropertyDefaultName -> ScenarioPropertyConfig(
+ defaultValue = None,
+ editor = Some(StringParameterEditor),
+ validators = Some(List(MandatoryParameterValidator, CronParameterValidator)),
+ label = Some("Schedule"),
+ hintText = Some("Quartz cron syntax. You can specify multiple schedulers separated by '|'.")
+ )
+
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessException.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessException.scala
similarity index 74%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessException.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessException.scala
index 72a267b3317..53257f71403 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessException.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessException.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
class PeriodicProcessException(message: String, parent: Throwable) extends RuntimeException(message, parent) {
def this(message: String) = this(message, null)
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessService.scala
similarity index 74%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessService.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessService.scala
index 43c41264a8f..9569e7d4ade 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessService.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessService.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import cats.implicits._
import com.typesafe.scalalogging.LazyLogging
@@ -10,29 +10,21 @@ import pl.touk.nussknacker.engine.api.component.{
}
import pl.touk.nussknacker.engine.api.deployment.StateStatus.StatusName
import pl.touk.nussknacker.engine.api.deployment._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduleProperty => _, _}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduleProperty => ApiScheduleProperty}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
-import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName}
+import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName, VersionId}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.{AdditionalModelConfigs, DeploymentData, DeploymentId}
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.{
- DeploymentStatus,
- EngineStatusesToReschedule,
- FinishedScheduledExecutionMetadata,
- MaxDeploymentsStatus,
- PeriodicProcessStatus
-}
-import pl.touk.nussknacker.engine.management.periodic.PeriodicStateStatus.{ScheduledStatus, WaitingForScheduleStatus}
-import pl.touk.nussknacker.engine.management.periodic.db.PeriodicProcessesRepository
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.{
- WithCanonicalProcess,
- WithoutCanonicalProcess
-}
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.model._
-import pl.touk.nussknacker.engine.management.periodic.service._
-import pl.touk.nussknacker.engine.management.periodic.util.DeterministicUUIDFromLong
import pl.touk.nussknacker.engine.util.AdditionalComponentConfigsForRuntimeExtractor
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService._
+import pl.touk.nussknacker.ui.process.periodic.PeriodicStateStatus._
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model._
+import pl.touk.nussknacker.ui.process.periodic.utils.DeterministicUUIDFromLong
+import pl.touk.nussknacker.ui.process.repository.PeriodicProcessesRepository
import java.time.chrono.ChronoLocalDateTime
import java.time.temporal.ChronoUnit
@@ -42,9 +34,9 @@ import scala.util.control.NonFatal
class PeriodicProcessService(
delegateDeploymentManager: DeploymentManager,
- jarManager: JarManager,
- scheduledProcessesRepository: PeriodicProcessesRepository,
- periodicProcessListener: PeriodicProcessListener,
+ scheduledExecutionPerformer: ScheduledExecutionPerformer,
+ periodicProcessesRepository: PeriodicProcessesRepository,
+ periodicProcessListener: ScheduledProcessListener,
additionalDeploymentDataProvider: AdditionalDeploymentDataProvider,
deploymentRetryConfig: DeploymentRetryConfig,
executionConfig: PeriodicExecutionConfig,
@@ -52,19 +44,19 @@ class PeriodicProcessService(
processConfigEnricher: ProcessConfigEnricher,
clock: Clock,
actionService: ProcessingTypeActionService,
- configsFromProvider: Map[DesignerWideComponentId, ComponentAdditionalConfig]
+ configsFromProvider: Map[DesignerWideComponentId, ComponentAdditionalConfig],
)(implicit ec: ExecutionContext)
extends LazyLogging {
import cats.syntax.all._
- import scheduledProcessesRepository._
- private type RepositoryAction[T] = scheduledProcessesRepository.Action[T]
- private type Callback = () => Future[Unit]
- private type NeedsReschedule = Boolean
+ import periodicProcessesRepository._
+
+ private type Callback = () => Future[Unit]
+ private type NeedsReschedule = Boolean
- private implicit class WithCallbacksSeq(result: RepositoryAction[List[Callback]]) {
+ private implicit class WithCallbacksSeq(result: Future[List[Callback]]) {
def runWithCallbacks: Future[Unit] =
- result.run.flatMap(callbacks => Future.sequence(callbacks.map(_()))).map(_ => ())
+ result.flatMap(callbacks => Future.sequence(callbacks.map(_()))).map(_ => ())
}
private val emptyCallback: Callback = () => Future.successful(())
@@ -75,8 +67,11 @@ class PeriodicProcessService(
processIdWithName: ProcessIdWithName,
after: Option[Instant],
): Future[List[ScenarioActivity]] = for {
- schedulesState <- scheduledProcessesRepository
- .getSchedulesState(processIdWithName.name, after.map(localDateTimeAtSystemDefaultZone))
+ schedulesState <- periodicProcessesRepository
+ .getSchedulesState(
+ processIdWithName.name,
+ after.map(localDateTimeAtSystemDefaultZone)
+ )
.run
groupedByProcess = schedulesState.groupedByPeriodicProcess
deployments = groupedByProcess.flatMap(_.deployments)
@@ -91,7 +86,7 @@ class PeriodicProcessService(
scenarioActivityId = ScenarioActivityId(DeterministicUUIDFromLong.longUUID(deployment.id.value)),
user = ScenarioUser.internalNuUser,
date = metadata.dateDeployed.getOrElse(metadata.dateFinished),
- scenarioVersionId = Some(ScenarioVersionId.from(deployment.periodicProcess.processVersion.versionId)),
+ scenarioVersionId = Some(ScenarioVersionId.from(deployment.periodicProcess.deploymentData.versionId)),
scheduledExecutionStatus = metadata.status,
dateFinished = metadata.dateFinished,
scheduleName = deployment.scheduleName.display,
@@ -148,56 +143,70 @@ class PeriodicProcessService(
processVersion: ProcessVersion,
canonicalProcess: CanonicalProcess,
scheduleDates: List[(ScheduleName, Option[LocalDateTime])],
- processActionId: ProcessActionId
+ processActionId: ProcessActionId,
): Future[Unit] = {
logger.info("Scheduling periodic scenario: {} on {}", processVersion, scheduleDates)
+
for {
- deploymentWithJarData <- jarManager.prepareDeploymentWithJar(processVersion, canonicalProcess)
+ inputConfigDuringExecution <- scheduledExecutionPerformer.provideInputConfigDuringExecutionJson()
+ deploymentWithJarData <- scheduledExecutionPerformer.prepareDeploymentWithRuntimeParams(
+ processVersion,
+ )
enrichedProcessConfig <- processConfigEnricher.onInitialSchedule(
- ProcessConfigEnricher.InitialScheduleData(
- deploymentWithJarData.process,
- deploymentWithJarData.inputConfigDuringExecutionJson
- )
+ ProcessConfigEnricher.InitialScheduleData(canonicalProcess, inputConfigDuringExecution.serialized)
)
- enrichedDeploymentWithJarData = deploymentWithJarData.copy(inputConfigDuringExecutionJson =
- enrichedProcessConfig.inputConfigDuringExecutionJson
+ _ <- initialSchedule(
+ scheduleProperty,
+ scheduleDates,
+ deploymentWithJarData,
+ canonicalProcess,
+ enrichedProcessConfig.inputConfigDuringExecutionJson,
+ processActionId,
)
- _ <- initialSchedule(scheduleProperty, scheduleDates, enrichedDeploymentWithJarData, processActionId)
} yield ()
}
private def initialSchedule(
scheduleMap: ScheduleProperty,
scheduleDates: List[(ScheduleName, Option[LocalDateTime])],
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
- processActionId: ProcessActionId
+ deploymentWithJarData: DeploymentWithRuntimeParams,
+ canonicalProcess: CanonicalProcess,
+ inputConfigDuringExecutionJson: String,
+ processActionId: ProcessActionId,
): Future[Unit] = {
- scheduledProcessesRepository
- .create(deploymentWithJarData, scheduleMap, processActionId)
+ periodicProcessesRepository
+ .create(
+ deploymentWithJarData,
+ inputConfigDuringExecutionJson,
+ canonicalProcess,
+ scheduleMap,
+ processActionId
+ )
+ .run
.flatMap { process =>
scheduleDates.collect {
case (name, Some(date)) =>
- scheduledProcessesRepository
+ periodicProcessesRepository
.schedule(process.id, name, date, deploymentRetryConfig.deployMaxRetries)
+ .run
.flatMap { data =>
- handleEvent(ScheduledEvent(data, firstSchedule = true))
+ handleEvent(ScheduledEvent(data.toDetails, firstSchedule = true))
}
case (name, None) =>
logger.warn(s"Schedule $name does not have date to schedule")
- monad.pure(())
+ Future.successful(())
}.sequence
}
- .run
.map(_ => ())
}
- def findToBeDeployed: Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] = {
+ def findToBeDeployed: Future[Seq[PeriodicProcessDeployment]] = {
for {
- toBeDeployed <- scheduledProcessesRepository.findToBeDeployed.run.flatMap { toDeployList =>
+ toBeDeployed <- periodicProcessesRepository.findToBeDeployed.run.flatMap { toDeployList =>
Future.sequence(toDeployList.map(checkIfNotRunning)).map(_.flatten)
}
// We retry scenarios that failed on deployment. Failure recovery of running scenarios should be handled by Flink's restart strategy
- toBeRetried <- scheduledProcessesRepository.findToBeRetried.run
+ toBeRetried <- periodicProcessesRepository.findToBeRetried.run
// We don't block scheduled deployments by retries
} yield toBeDeployed.sortBy(d => (d.runAt, d.createdAt)) ++ toBeRetried.sortBy(d => (d.nextRetryAt, d.createdAt))
}
@@ -205,10 +214,10 @@ class PeriodicProcessService(
// Currently we don't allow simultaneous runs of one scenario - only sequential, so if other schedule kicks in, it'll have to wait
// TODO: we show allow to deploy scenarios with different scheduleName to be deployed simultaneous
private def checkIfNotRunning(
- toDeploy: PeriodicProcessDeployment[WithCanonicalProcess]
- ): Future[Option[PeriodicProcessDeployment[WithCanonicalProcess]]] = {
+ toDeploy: PeriodicProcessDeployment
+ ): Future[Option[PeriodicProcessDeployment]] = {
delegateDeploymentManager
- .getProcessStates(toDeploy.periodicProcess.processVersion.processName)(DataFreshnessPolicy.Fresh)
+ .getProcessStates(toDeploy.periodicProcess.deploymentData.processName)(DataFreshnessPolicy.Fresh)
.map(
_.value
.map(_.status)
@@ -228,7 +237,7 @@ class PeriodicProcessService(
schedules.groupedByPeriodicProcess
.collect {
case processScheduleData
- if processScheduleData.existsDeployment(d => needRescheduleDeploymentIds.contains(d.id)) =>
+ if processScheduleData.deployments.exists(d => needRescheduleDeploymentIds.contains(d.id)) =>
reschedule(processScheduleData, needRescheduleDeploymentIds)
}
.sequence
@@ -236,9 +245,9 @@ class PeriodicProcessService(
}
for {
- schedules <- scheduledProcessesRepository
+ schedules <- periodicProcessesRepository
.findActiveSchedulesForProcessesHavingDeploymentWithMatchingStatus(
- Set(PeriodicProcessDeploymentStatus.Deployed, PeriodicProcessDeploymentStatus.FailedOnDeploy)
+ Set(PeriodicProcessDeploymentStatus.Deployed, PeriodicProcessDeploymentStatus.FailedOnDeploy),
)
.run
// we handle each job separately, if we fail at some point, we will continue on next handleFinished run
@@ -261,32 +270,40 @@ class PeriodicProcessService(
s"Process '$processName' latest deployment ids: ${scheduleData.latestDeployments.map(_.id.toString)}"
)
scheduleData.latestDeployments.map { deployment =>
- (deployment, runtimeStatuses.getStatus(deployment.id))
+ (
+ scheduleData.process.deploymentData.processName,
+ scheduleData.process.deploymentData.versionId,
+ deployment,
+ runtimeStatuses.getStatus(deployment.id)
+ )
}
}
_ = logger.debug(
s"Process '$processName' schedule deployments with status: ${scheduleDeploymentsWithStatus.map(_.toString)}"
)
needRescheduleDeployments <- Future
- .sequence(scheduleDeploymentsWithStatus.map { case (deploymentData, statusOpt) =>
- synchronizeDeploymentState(deploymentData, statusOpt).run.map { needReschedule =>
+ .sequence(scheduleDeploymentsWithStatus.map { case (processName, versionId, deploymentData, statusOpt) =>
+ synchronizeDeploymentState(processName, versionId, deploymentData, statusOpt).map { needReschedule =>
Option(deploymentData.id).filter(_ => needReschedule)
}
})
.map(_.flatten.toSet)
followingDeployDeploymentsForSchedules = scheduleDeploymentsWithStatus.collect {
- case (deployment, Some(status)) if SimpleStateStatus.DefaultFollowingDeployStatuses.contains(status.status) =>
+ case (_, _, deployment, Some(status))
+ if SimpleStateStatus.DefaultFollowingDeployStatuses.contains(status.status) =>
deployment.id
}.toSet
} yield (followingDeployDeploymentsForSchedules, needRescheduleDeployments)
// We assume that this method leaves with data in consistent state
private def synchronizeDeploymentState(
+ processName: ProcessName,
+ versionId: VersionId,
deployment: ScheduleDeploymentData,
- processState: Option[StatusDetails]
- ): RepositoryAction[NeedsReschedule] = {
- implicit class RichRepositoryAction[Unit](a: RepositoryAction[Unit]) {
- def needsReschedule(value: Boolean): RepositoryAction[NeedsReschedule] = a.map(_ => value)
+ processState: Option[StatusDetails],
+ ): Future[NeedsReschedule] = {
+ implicit class RichFuture[Unit](a: Future[Unit]) {
+ def needsReschedule(value: Boolean): Future[NeedsReschedule] = a.map(_ => value)
}
processState.map(_.status) match {
case Some(status)
@@ -298,7 +315,7 @@ class PeriodicProcessService(
if EngineStatusesToReschedule.contains(
status
) && deployment.state.status != PeriodicProcessDeploymentStatus.Finished =>
- markFinished(deployment, processState).needsReschedule(value = true)
+ markFinished(processName, versionId, deployment, processState).needsReschedule(value = true)
case None
if deployment.state.status == PeriodicProcessDeploymentStatus.Deployed
&& deployment.deployedAt.exists(_.isBefore(LocalDateTime.now().minusMinutes(5))) =>
@@ -306,26 +323,27 @@ class PeriodicProcessService(
// this can be caused by a race in e.g. FlinkRestManager
// (because /jobs/overview used in getProcessStates isn't instantly aware of submitted jobs)
// so freshly deployed deployments aren't considered
- markFinished(deployment, processState).needsReschedule(value = true)
+ markFinished(processName, versionId, deployment, processState).needsReschedule(value = true)
case _ =>
- scheduledProcessesRepository.monad.pure(()).needsReschedule(value = false)
+ Future.successful(()).needsReschedule(value = false)
}
}
private def reschedule(
processScheduleData: PeriodicProcessScheduleData,
needRescheduleDeploymentIds: Set[PeriodicProcessDeploymentId]
- ): RepositoryAction[Callback] = {
+ ): Future[Callback] = {
import processScheduleData._
val scheduleActions = deployments.map { deployment =>
if (needRescheduleDeploymentIds.contains(deployment.id))
- deployment.nextRunAt(clock) match {
+ nextRunAt(deployment, clock) match {
case Right(Some(futureDate)) =>
logger.info(s"Rescheduling ${deployment.display} to $futureDate")
- val action = scheduledProcessesRepository
+ val action = periodicProcessesRepository
.schedule(process.id, deployment.scheduleName, futureDate, deploymentRetryConfig.deployMaxRetries)
+ .run
.flatMap { data =>
- handleEvent(ScheduledEvent(data, firstSchedule = false))
+ handleEvent(ScheduledEvent(data.toDetails, firstSchedule = false))
}
Some(action)
case Right(None) =>
@@ -338,7 +356,7 @@ class PeriodicProcessService(
else
Option(deployment)
.filter(_.state.status == PeriodicProcessDeploymentStatus.Scheduled)
- .map(_ => scheduledProcessesRepository.monad.pure(()))
+ .map(_ => Future.successful(()))
}
@@ -352,18 +370,45 @@ class PeriodicProcessService(
scheduleActions.flatten.sequence.as(emptyCallback)
}
- private def markFinished(deployment: ScheduleDeploymentData, state: Option[StatusDetails]): RepositoryAction[Unit] = {
+ private def nextRunAt(
+ deployment: PeriodicProcessDeployment,
+ clock: Clock
+ ): Either[String, Option[LocalDateTime]] =
+ (deployment.periodicProcess.scheduleProperty, deployment.scheduleName.value) match {
+ case (MultipleScheduleProperty(schedules), Some(name)) =>
+ schedules.get(name).toRight(s"Failed to find schedule: $deployment.scheduleName").flatMap(_.nextRunAt(clock))
+ case (e: SingleScheduleProperty, None) => e.nextRunAt(clock)
+ case (schedule, name) => Left(s"Schedule name: $name mismatch with schedule: $schedule")
+ }
+
+ private def markFinished(
+ processName: ProcessName,
+ versionId: VersionId,
+ deployment: ScheduleDeploymentData,
+ state: Option[StatusDetails],
+ ): Future[Unit] = {
logger.info(s"Marking ${deployment.display} with status: ${deployment.state.status} as finished")
for {
- _ <- scheduledProcessesRepository.markFinished(deployment.id)
- currentState <- scheduledProcessesRepository.findProcessData(deployment.id)
- } yield handleEvent(FinishedEvent(currentState, state))
+ _ <- periodicProcessesRepository.markFinished(deployment.id).run
+ currentState <- periodicProcessesRepository.findProcessData(deployment.id).run
+ canonicalProcessOpt <- periodicProcessesRepository
+ .fetchCanonicalProcessWithVersion(
+ processName,
+ versionId
+ )
+ .map(_.map(_._1))
+ canonicalProcess = canonicalProcessOpt.getOrElse {
+ throw new PeriodicProcessException(
+ s"Could not fetch CanonicalProcess with ProcessVersion for processName=$processName, versionId=$versionId"
+ )
+ }
+ } yield handleEvent(FinishedEvent(currentState.toDetails, canonicalProcess, state))
}
private def handleFailedDeployment(
- deployment: PeriodicProcessDeployment[_],
+ deployment: PeriodicProcessDeployment,
state: Option[StatusDetails]
- ): RepositoryAction[Unit] = {
+ ): Future[Unit] = {
def calculateNextRetryAt = now().plus(deploymentRetryConfig.deployRetryPenalize.toMillis, ChronoUnit.MILLIS)
val retriesLeft =
@@ -382,20 +427,20 @@ class PeriodicProcessService(
)
for {
- _ <- scheduledProcessesRepository.markFailedOnDeployWithStatus(deployment.id, status, retriesLeft, nextRetryAt)
- currentState <- scheduledProcessesRepository.findProcessData(deployment.id)
- } yield handleEvent(FailedOnDeployEvent(currentState, state))
+ _ <- periodicProcessesRepository.markFailedOnDeployWithStatus(deployment.id, status, retriesLeft, nextRetryAt).run
+ currentState <- periodicProcessesRepository.findProcessData(deployment.id).run
+ } yield handleEvent(FailedOnDeployEvent(currentState.toDetails, state))
}
private def markFailedAction(
deployment: ScheduleDeploymentData,
state: Option[StatusDetails]
- ): RepositoryAction[Unit] = {
+ ): Future[Unit] = {
logger.info(s"Marking ${deployment.display} as failed.")
for {
- _ <- scheduledProcessesRepository.markFailed(deployment.id)
- currentState <- scheduledProcessesRepository.findProcessData(deployment.id)
- } yield handleEvent(FailedOnRunEvent(currentState, state))
+ _ <- periodicProcessesRepository.markFailed(deployment.id).run
+ currentState <- periodicProcessesRepository.findProcessData(deployment.id).run
+ } yield handleEvent(FailedOnRunEvent(currentState.toDetails, state))
}
def deactivate(processName: ProcessName): Future[Iterable[DeploymentId]] =
@@ -405,32 +450,34 @@ class PeriodicProcessService(
_ <- activeSchedules.groupedByPeriodicProcess.map(p => deactivateAction(p.process)).sequence.runWithCallbacks
} yield runningDeploymentsForSchedules.map(deployment => DeploymentId(deployment.toString))
- private def deactivateAction(process: PeriodicProcess[WithoutCanonicalProcess]): RepositoryAction[Callback] = {
+ private def deactivateAction(
+ process: PeriodicProcess
+ ): Future[Callback] = {
logger.info(s"Deactivate periodic process id: ${process.id.value}")
for {
- _ <- scheduledProcessesRepository.markInactive(process.id)
+ _ <- periodicProcessesRepository.markInactive(process.id).run
// we want to delete jars only after we successfully mark process as inactive. It's better to leave jar garbage than
// have process without jar
- } yield () => jarManager.deleteJar(process.deploymentData.jarFileName)
+ } yield () => scheduledExecutionPerformer.cleanAfterDeployment(process.deploymentData.runtimeParams)
}
private def markProcessActionExecutionFinished(
processActionIdOption: Option[ProcessActionId]
- ): RepositoryAction[Callback] =
- scheduledProcessesRepository.monad.pure { () =>
+ ): Future[Callback] =
+ Future.successful { () =>
processActionIdOption
.map(actionService.markActionExecutionFinished)
.sequence
.map(_ => ())
}
- def deploy(deployment: PeriodicProcessDeployment[WithCanonicalProcess]): Future[Unit] = {
+ def deploy(deployment: PeriodicProcessDeployment): Future[Unit] = {
// TODO: set status before deployment?
val id = deployment.id
val deploymentData = DeploymentData(
DeploymentId(id.toString),
DeploymentData.systemUser,
- additionalDeploymentDataProvider.prepareAdditionalData(deployment),
+ additionalDeploymentDataProvider.prepareAdditionalData(deployment.toDetails),
// TODO: in the future we could allow users to specify nodes data during schedule requesting
NodesDeploymentData.empty,
AdditionalModelConfigs(
@@ -440,42 +487,69 @@ class PeriodicProcessService(
val deploymentWithJarData = deployment.periodicProcess.deploymentData
val deploymentAction = for {
_ <- Future.successful(
- logger.info("Deploying scenario {} for deployment id {}", deploymentWithJarData.processVersion, id)
+ logger.info("Deploying scenario {} for deployment id {}", deploymentWithJarData, id)
)
+ processName = deploymentWithJarData.processName
+ versionId = deploymentWithJarData.versionId
+ canonicalProcessWithVersionOpt <- periodicProcessesRepository
+ .fetchCanonicalProcessWithVersion(
+ processName,
+ versionId
+ )
+ canonicalProcessWithVersion = canonicalProcessWithVersionOpt.getOrElse {
+ throw new PeriodicProcessException(
+ s"Could not fetch CanonicalProcess with ProcessVersion for processName=$processName, versionId=$versionId"
+ )
+ }
+ inputConfigDuringExecutionJsonOpt <- periodicProcessesRepository
+ .fetchInputConfigDuringExecutionJson(
+ processName,
+ versionId,
+ )
+ .run
+ inputConfigDuringExecutionJson = inputConfigDuringExecutionJsonOpt.getOrElse {
+ throw new PeriodicProcessException(
+ s"Could not fetch inputConfigDuringExecutionJson for processName=${processName}, versionId=${versionId}"
+ )
+ }
enrichedProcessConfig <- processConfigEnricher.onDeploy(
ProcessConfigEnricher.DeployData(
- deploymentWithJarData.process,
- deploymentWithJarData.inputConfigDuringExecutionJson,
- deployment
+ canonicalProcessWithVersion._1,
+ canonicalProcessWithVersion._2,
+ inputConfigDuringExecutionJson,
+ deployment.toDetails
)
)
- enrichedDeploymentWithJarData = deploymentWithJarData.copy(inputConfigDuringExecutionJson =
- enrichedProcessConfig.inputConfigDuringExecutionJson
+ externalDeploymentId <- scheduledExecutionPerformer.deployWithRuntimeParams(
+ deploymentWithJarData,
+ enrichedProcessConfig.inputConfigDuringExecutionJson,
+ deploymentData,
+ canonicalProcessWithVersion._1,
+ canonicalProcessWithVersion._2,
)
- externalDeploymentId <- jarManager.deployWithJar(enrichedDeploymentWithJarData, deploymentData)
} yield externalDeploymentId
deploymentAction
.flatMap { externalDeploymentId =>
- logger.info("Scenario has been deployed {} for deployment id {}", deploymentWithJarData.processVersion, id)
+ logger.info("Scenario has been deployed {} for deployment id {}", deploymentWithJarData, id)
// TODO: add externalDeploymentId??
- scheduledProcessesRepository
+ periodicProcessesRepository
.markDeployed(id)
- .flatMap(_ => scheduledProcessesRepository.findProcessData(id))
- .flatMap(afterChange => handleEvent(DeployedEvent(afterChange, externalDeploymentId)))
.run
+ .flatMap(_ => periodicProcessesRepository.findProcessData(id).run)
+ .flatMap(afterChange => handleEvent(DeployedEvent(afterChange.toDetails, externalDeploymentId)))
}
// We can recover since deployment actor watches only future completion.
.recoverWith { case exception =>
logger.error(s"Scenario deployment ${deployment.display} failed", exception)
- handleFailedDeployment(deployment, None).run
+ handleFailedDeployment(deployment, None)
}
}
// TODO: allow access to DB in listener?
- private def handleEvent(event: PeriodicProcessEvent): scheduledProcessesRepository.Action[Unit] = {
- scheduledProcessesRepository.monad.pure {
+ private def handleEvent(event: ScheduledProcessEvent): Future[Unit] = {
+ Future.successful {
try {
- periodicProcessListener.onPeriodicProcessEvent.applyOrElse(event, (_: PeriodicProcessEvent) => ())
+ periodicProcessListener.onScheduledProcessEvent.applyOrElse(event, (_: ScheduledProcessEvent) => ())
} catch {
case NonFatal(e) => throw new PeriodicProcessException("Failed to invoke listener", e)
}
@@ -523,7 +597,7 @@ class PeriodicProcessService(
def toDeploymentStatuses(schedulesState: SchedulesState) = schedulesState.schedules.toList
.flatMap { case (scheduleId, scheduleData) =>
scheduleData.latestDeployments.map { deployment =>
- DeploymentStatus(
+ PeriodicDeploymentStatus(
deployment.id,
scheduleId,
deployment.createdAt,
@@ -534,7 +608,7 @@ class PeriodicProcessService(
)
}
}
- .sorted(DeploymentStatus.ordering.reverse)
+ .sorted(PeriodicDeploymentStatus.ordering.reverse)
for {
activeSchedules <- getLatestDeploymentsForActiveSchedules(name, MaxDeploymentsStatus)
@@ -556,7 +630,7 @@ class PeriodicProcessService(
schedulesState.schedules.toList
.flatMap { case (scheduleId, scheduleData) =>
scheduleData.latestDeployments.map { deployment =>
- DeploymentStatus(
+ PeriodicDeploymentStatus(
deployment.id,
scheduleId,
deployment.createdAt,
@@ -567,7 +641,7 @@ class PeriodicProcessService(
)
}
}
- .sorted(DeploymentStatus.ordering.reverse)
+ .sorted(PeriodicDeploymentStatus.ordering.reverse)
for {
activeSchedules <- getLatestDeploymentsForActiveSchedules(MaxDeploymentsStatus)
@@ -591,23 +665,28 @@ class PeriodicProcessService(
processName: ProcessName,
deploymentsPerScheduleMaxCount: Int = 1
): Future[SchedulesState] =
- scheduledProcessesRepository.getLatestDeploymentsForActiveSchedules(processName, deploymentsPerScheduleMaxCount).run
+ periodicProcessesRepository
+ .getLatestDeploymentsForActiveSchedules(
+ processName,
+ deploymentsPerScheduleMaxCount,
+ )
+ .run
def getLatestDeploymentsForActiveSchedules(
deploymentsPerScheduleMaxCount: Int
): Future[Map[ProcessName, SchedulesState]] =
- scheduledProcessesRepository.getLatestDeploymentsForActiveSchedules(deploymentsPerScheduleMaxCount).run
+ periodicProcessesRepository.getLatestDeploymentsForActiveSchedules(deploymentsPerScheduleMaxCount).run
def getLatestDeploymentsForLatestInactiveSchedules(
processName: ProcessName,
inactiveProcessesMaxCount: Int,
deploymentsPerScheduleMaxCount: Int
): Future[SchedulesState] =
- scheduledProcessesRepository
+ periodicProcessesRepository
.getLatestDeploymentsForLatestInactiveSchedules(
processName,
inactiveProcessesMaxCount,
- deploymentsPerScheduleMaxCount
+ deploymentsPerScheduleMaxCount,
)
.run
@@ -615,7 +694,7 @@ class PeriodicProcessService(
inactiveProcessesMaxCount: Int,
deploymentsPerScheduleMaxCount: Int
): Future[Map[ProcessName, SchedulesState]] =
- scheduledProcessesRepository
+ periodicProcessesRepository
.getLatestDeploymentsForLatestInactiveSchedules(
inactiveProcessesMaxCount,
deploymentsPerScheduleMaxCount
@@ -631,7 +710,7 @@ class PeriodicProcessService(
}
private def scheduledExecutionStatusAndDateFinished(
- entity: PeriodicProcessDeployment[WithoutCanonicalProcess],
+ entity: PeriodicProcessDeployment,
): Option[FinishedScheduledExecutionMetadata] = {
for {
status <- entity.state.status match {
@@ -688,15 +767,15 @@ object PeriodicProcessService {
// of single, merged status similar to this available for streaming job. This merged status should be a straightforward derivative
// of these deployments statuses so it will be easy to figure out it by user.
case class PeriodicProcessStatus(
- activeDeploymentsStatuses: List[DeploymentStatus],
- inactiveDeploymentsStatuses: List[DeploymentStatus]
+ activeDeploymentsStatuses: List[PeriodicDeploymentStatus],
+ inactiveDeploymentsStatuses: List[PeriodicDeploymentStatus]
) extends StateStatus
with LazyLogging {
- def limitedAndSortedDeployments: List[DeploymentStatus] =
+ def limitedAndSortedDeployments: List[PeriodicDeploymentStatus] =
(activeDeploymentsStatuses ++ inactiveDeploymentsStatuses.take(
MaxDeploymentsStatus - activeDeploymentsStatuses.size
- )).sorted(DeploymentStatus.ordering.reverse)
+ )).sorted(PeriodicDeploymentStatus.ordering.reverse)
// We present merged name to be possible to filter scenario by status
override def name: StatusName = mergedStatusDetails.status.name
@@ -761,7 +840,7 @@ object PeriodicProcessService {
* should be deactivated earlier.
*
*/
- def pickMostImportantActiveDeployment: Option[DeploymentStatus] = {
+ def pickMostImportantActiveDeployment: Option[PeriodicDeploymentStatus] = {
val lastActiveDeploymentStatusForEachSchedule =
latestDeploymentForEachSchedule(activeDeploymentsStatuses).sorted
@@ -779,17 +858,17 @@ object PeriodicProcessService {
.orElse(last(PeriodicProcessDeploymentStatus.Finished))
}
- private def latestDeploymentForEachSchedule(deploymentsStatuses: List[DeploymentStatus]) = {
+ private def latestDeploymentForEachSchedule(deploymentsStatuses: List[PeriodicDeploymentStatus]) = {
deploymentsStatuses
.groupBy(_.scheduleId)
.values
.toList
- .map(_.min(DeploymentStatus.ordering.reverse))
+ .map(_.min(PeriodicDeploymentStatus.ordering.reverse))
}
}
- case class DeploymentStatus( // Probably it is too much technical to present to users, but the only other alternative
+ case class PeriodicDeploymentStatus( // Probably it is too much technical to present to users, but the only other alternative
// to present to users is scheduleName+runAt
deploymentId: PeriodicProcessDeploymentId,
scheduleId: ScheduleId,
@@ -820,14 +899,15 @@ object PeriodicProcessService {
}
- object DeploymentStatus {
+ object PeriodicDeploymentStatus {
- implicit val ordering: Ordering[DeploymentStatus] = (self: DeploymentStatus, that: DeploymentStatus) => {
- self.runAt.compareTo(that.runAt) match {
- case 0 => self.createdAt.compareTo(that.createdAt)
- case a => a
+ implicit val ordering: Ordering[PeriodicDeploymentStatus] =
+ (self: PeriodicDeploymentStatus, that: PeriodicDeploymentStatus) => {
+ self.runAt.compareTo(that.runAt) match {
+ case 0 => self.createdAt.compareTo(that.createdAt)
+ case a => a
+ }
}
- }
}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManager.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessStateDefinitionManager.scala
similarity index 80%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManager.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessStateDefinitionManager.scala
index 008aabcca05..17c3642a8c8 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManager.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessStateDefinitionManager.scala
@@ -1,13 +1,8 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import pl.touk.nussknacker.engine.api.deployment.ProcessStateDefinitionManager.defaultVisibleActions
-import pl.touk.nussknacker.engine.api.deployment.{
- OverridingProcessStateDefinitionManager,
- ProcessStateDefinitionManager,
- ScenarioActionName,
- StateStatus
-}
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.{DeploymentStatus, PeriodicProcessStatus}
+import pl.touk.nussknacker.engine.api.deployment.{OverridingProcessStateDefinitionManager, ProcessStateDefinitionManager, ScenarioActionName, StateStatus}
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.{PeriodicDeploymentStatus, PeriodicProcessStatus}
class PeriodicProcessStateDefinitionManager(delegate: ProcessStateDefinitionManager)
extends OverridingProcessStateDefinitionManager(
@@ -33,7 +28,7 @@ object PeriodicProcessStateDefinitionManager {
def statusTooltip(processStatus: PeriodicProcessStatus): String = {
processStatus.limitedAndSortedDeployments
- .map { case d @ DeploymentStatus(_, scheduleId, _, runAt, status, _, _) =>
+ .map { case d @ PeriodicDeploymentStatus(_, scheduleId, _, runAt, status, _, _) =>
val refinedStatus = {
if (d.isCanceled) {
"Canceled"
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicStateStatus.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicStateStatus.scala
similarity index 98%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicStateStatus.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicStateStatus.scala
index a90c0cee3fb..ce47c08485d 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicStateStatus.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicStateStatus.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import pl.touk.nussknacker.engine.api.deployment.ProcessStateDefinitionManager.ProcessStatus
import pl.touk.nussknacker.engine.api.deployment.StateStatus.StatusName
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActor.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/RescheduleFinishedActor.scala
similarity index 90%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActor.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/RescheduleFinishedActor.scala
index d53b95c95c0..33d240de627 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActor.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/RescheduleFinishedActor.scala
@@ -1,8 +1,8 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import akka.actor.{Actor, Props, Timers}
import com.typesafe.scalalogging.LazyLogging
-import pl.touk.nussknacker.engine.management.periodic.RescheduleFinishedActor.{CheckStates, CheckStatesCompleted}
+import pl.touk.nussknacker.ui.process.periodic.RescheduleFinishedActor.{CheckStates, CheckStatesCompleted}
import scala.concurrent.Future
import scala.concurrent.duration._
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SingleScheduleProperty.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/ScheduleProperty.scala
similarity index 95%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SingleScheduleProperty.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/ScheduleProperty.scala
index 97b7aa80af9..86f01f37130 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SingleScheduleProperty.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/ScheduleProperty.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import com.cronutils.model.definition.CronDefinitionBuilder
import com.cronutils.model.time.ExecutionTime
@@ -34,7 +34,7 @@ object SingleScheduleProperty {
@JsonCodec case class CronScheduleProperty(labelOrCronExpr: String) extends SingleScheduleProperty {
import cats.implicits._
- import pl.touk.nussknacker.engine.management.periodic.CronScheduleProperty._
+ import pl.touk.nussknacker.ui.process.periodic.CronScheduleProperty._
private lazy val cronsOrError: Either[String, List[Cron]] = {
val (errors, crons) = labelOrCronExpr
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicBatchConfig.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/SchedulingConfig.scala
similarity index 71%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicBatchConfig.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/SchedulingConfig.scala
index 61b8f9bb1ac..d8b6f8df610 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicBatchConfig.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/SchedulingConfig.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import com.typesafe.config.Config
@@ -7,22 +7,23 @@ import scala.concurrent.duration._
/**
* Periodic Flink scenarios deployment configuration.
*
- * @param db Nussknacker db configuration.
+ * @param legacyDb Optional custom db, that will be used instead of main Nussknacker DB. Will be removed in the future.
* @param processingType processing type of scenarios to be managed by this instance of the periodic engine.
* @param rescheduleCheckInterval {@link RescheduleFinishedActor} check interval.
* @param deployInterval {@link DeploymentActor} check interval.
* @param deploymentRetry {@link DeploymentRetryConfig} for deployment failure recovery.
- * @param jarsDir Directory for jars storage.
* @param maxFetchedPeriodicScenarioActivities Optional limit of number of latest periodic-related Scenario Activities that are returned by Periodic DM.
*/
-case class PeriodicBatchConfig(
- db: Config,
+case class SchedulingConfig(
+ legacyDb: Option[Config],
+ // The `processingType` value should be removed in the future, because it should correspond to the real value of processingType.
+ // But at the moment it may not be equal to the value of processingType of the DM that uses scheduling mechanism.
+ // Therefore, we must keep the separate value SchedulingConfig, until we ensure consistency between the real processingType and the one defined here.
processingType: String,
rescheduleCheckInterval: FiniteDuration = 13 seconds,
deployInterval: FiniteDuration = 17 seconds,
deploymentRetry: DeploymentRetryConfig,
executionConfig: PeriodicExecutionConfig,
- jarsDir: String,
maxFetchedPeriodicScenarioActivities: Option[Int] = Some(200),
)
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/Utils.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/Utils.scala
similarity index 95%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/Utils.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/Utils.scala
index b3fe622b411..a3ba8073295 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/Utils.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/Utils.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import akka.actor.{ActorRef, ActorSystem, Props}
import com.typesafe.scalalogging.LazyLogging
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/cron/CronParameterValidator.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronParameterValidator.scala
similarity index 63%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/cron/CronParameterValidator.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronParameterValidator.scala
index 9eddcdbde26..18f11368259 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/cron/CronParameterValidator.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronParameterValidator.scala
@@ -1,27 +1,17 @@
-package pl.touk.nussknacker.engine.management.periodic.cron
+package pl.touk.nussknacker.ui.process.periodic.cron
import cats.data.Validated
import cats.data.Validated.{invalid, valid}
import pl.touk.nussknacker.engine.api
import pl.touk.nussknacker.engine.api.context.PartSubGraphCompilationError
import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.CustomParameterValidationError
-import pl.touk.nussknacker.engine.api.definition.{
- CustomParameterValidator,
- CustomParameterValidatorDelegate,
- ParameterValidator
-}
+import pl.touk.nussknacker.engine.api.definition.CustomParameterValidatorDelegate
import pl.touk.nussknacker.engine.api.parameter.ParameterName
import pl.touk.nussknacker.engine.graph.expression.Expression
-import pl.touk.nussknacker.engine.management.periodic.SchedulePropertyExtractor
-
-object CronParameterValidator extends CronParameterValidator {
-
- def delegate: ParameterValidator = CustomParameterValidatorDelegate(name)
-
-}
+import pl.touk.nussknacker.ui.process.periodic.utils.SchedulePropertyExtractorUtils
// Valid expression is e.g.: 0 * * * * ? * which means run every minute at 0 second
-class CronParameterValidator extends CustomParameterValidator {
+object CronParameterValidator extends CustomParameterValidatorDelegate("cron_validator") {
override def isValid(paramName: ParameterName, expression: Expression, value: Option[Any], label: Option[String])(
implicit nodeId: api.NodeId
@@ -36,12 +26,12 @@ class CronParameterValidator extends CustomParameterValidator {
}
value match {
case Some(s: String) =>
- SchedulePropertyExtractor.parseAndValidateProperty(s).fold(_ => invalid(createValidationError), _ => valid(()))
+ SchedulePropertyExtractorUtils
+ .parseAndValidateProperty(s)
+ .fold(_ => invalid(createValidationError), _ => valid(()))
case _ => invalid(createValidationError)
}
}
- override def name: String = "cron_validator"
-
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronSchedulePropertyExtractor.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronSchedulePropertyExtractor.scala
new file mode 100644
index 00000000000..96294b9a08a
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/cron/CronSchedulePropertyExtractor.scala
@@ -0,0 +1,34 @@
+package pl.touk.nussknacker.ui.process.periodic.cron
+
+import com.typesafe.scalalogging.LazyLogging
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduleProperty => ApiScheduleProperty}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.SchedulePropertyExtractor
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.ui.process.periodic.cron.CronSchedulePropertyExtractor.CronPropertyDefaultName
+import pl.touk.nussknacker.ui.process.periodic.utils.SchedulePropertyExtractorUtils
+import pl.touk.nussknacker.ui.process.periodic.{CronScheduleProperty, MultipleScheduleProperty, SingleScheduleProperty}
+
+object CronSchedulePropertyExtractor {
+ val CronPropertyDefaultName = "cron"
+}
+
+case class CronSchedulePropertyExtractor(propertyName: String = CronPropertyDefaultName)
+ extends SchedulePropertyExtractor
+ with LazyLogging {
+
+ override def apply(canonicalProcess: CanonicalProcess): Either[String, ApiScheduleProperty] = {
+ SchedulePropertyExtractorUtils.extractProperty(canonicalProcess, propertyName).map {
+ case MultipleScheduleProperty(schedules) =>
+ ApiScheduleProperty.MultipleScheduleProperty(schedules.map { case (k, v) => (k, toApi(v)) })
+ case cronProperty: CronScheduleProperty =>
+ toApi(cronProperty)
+ }
+ }
+
+ private def toApi(singleProperty: SingleScheduleProperty): ApiScheduleProperty.SingleScheduleProperty = {
+ singleProperty match {
+ case CronScheduleProperty(labelOrCronExpr) => ApiScheduleProperty.CronScheduleProperty(labelOrCronExpr)
+ }
+ }
+
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/DbInitializer.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyDbInitializer.scala
similarity index 95%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/DbInitializer.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyDbInitializer.scala
index bb65cef5794..0dbaa7fd6a3 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/DbInitializer.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyDbInitializer.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic.db
+package pl.touk.nussknacker.ui.process.periodic.legacy.db
import com.github.tminglei.slickpg.ExPostgresProfile
import com.typesafe.config.Config
@@ -9,7 +9,7 @@ import org.flywaydb.core.api.configuration.FluentConfiguration
import org.flywaydb.core.internal.database.postgresql.PostgreSQLDatabaseType
import slick.jdbc.{HsqldbProfile, JdbcBackend, JdbcProfile, PostgresProfile}
-object DbInitializer extends LazyLogging {
+object LegacyDbInitializer extends LazyLogging {
def init(configDb: Config): (JdbcBackend.DatabaseDef, JdbcProfile) = {
import net.ceedubs.ficus.Ficus._
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala
new file mode 100644
index 00000000000..c46626ac88c
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessDeploymentsTableFactory.scala
@@ -0,0 +1,80 @@
+package pl.touk.nussknacker.ui.process.periodic.legacy.db
+
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model.{
+ PeriodicProcessDeploymentId,
+ PeriodicProcessDeploymentStatus,
+ PeriodicProcessId
+}
+import slick.jdbc.{JdbcProfile, JdbcType}
+import slick.lifted.ProvenShape
+import slick.sql.SqlProfile.ColumnOption.NotNull
+
+import java.time.LocalDateTime
+
+trait LegacyPeriodicProcessDeploymentsTableFactory extends LegacyPeriodicProcessesTableFactory {
+
+ protected val profile: JdbcProfile
+
+ import profile.api._
+
+ implicit val periodicProcessDeploymentIdMapping: BaseColumnType[PeriodicProcessDeploymentId] =
+ MappedColumnType.base[PeriodicProcessDeploymentId, Long](_.value, PeriodicProcessDeploymentId.apply)
+
+ implicit val periodicProcessDeploymentStatusColumnTyped: JdbcType[PeriodicProcessDeploymentStatus] =
+ MappedColumnType.base[PeriodicProcessDeploymentStatus, String](_.toString, PeriodicProcessDeploymentStatus.withName)
+
+ class PeriodicProcessDeploymentsTable(tag: Tag)
+ extends Table[PeriodicProcessDeploymentEntity](tag, "periodic_process_deployments") {
+
+ def id: Rep[PeriodicProcessDeploymentId] = column[PeriodicProcessDeploymentId]("id", O.PrimaryKey, O.AutoInc)
+
+ def periodicProcessId: Rep[PeriodicProcessId] = column[PeriodicProcessId]("periodic_process_id", NotNull)
+
+ def createdAt: Rep[LocalDateTime] = column[LocalDateTime]("created_at", NotNull)
+
+ def runAt: Rep[LocalDateTime] = column[LocalDateTime]("run_at", NotNull)
+
+ def scheduleName: Rep[Option[String]] = column[Option[String]]("schedule_name")
+
+ def deployedAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("deployed_at")
+
+ def completedAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("completed_at")
+
+ def retriesLeft: Rep[Int] = column[Int]("retries_left")
+
+ def nextRetryAt: Rep[Option[LocalDateTime]] = column[Option[LocalDateTime]]("next_retry_at")
+
+ def status: Rep[PeriodicProcessDeploymentStatus] = column[PeriodicProcessDeploymentStatus]("status", NotNull)
+
+ override def * : ProvenShape[PeriodicProcessDeploymentEntity] = (
+ id,
+ periodicProcessId,
+ createdAt,
+ runAt,
+ scheduleName,
+ deployedAt,
+ completedAt,
+ retriesLeft,
+ nextRetryAt,
+ status
+ ) <>
+ ((PeriodicProcessDeploymentEntity.apply _).tupled, PeriodicProcessDeploymentEntity.unapply)
+
+ }
+
+ object PeriodicProcessDeployments extends TableQuery(new PeriodicProcessDeploymentsTable(_))
+}
+
+case class PeriodicProcessDeploymentEntity(
+ id: PeriodicProcessDeploymentId,
+ periodicProcessId: PeriodicProcessId,
+ createdAt: LocalDateTime,
+ runAt: LocalDateTime,
+ scheduleName: Option[String],
+ deployedAt: Option[LocalDateTime],
+ completedAt: Option[LocalDateTime],
+ retriesLeft: Int,
+ nextRetryAt: Option[LocalDateTime],
+ status: PeriodicProcessDeploymentStatus
+)
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala
new file mode 100644
index 00000000000..d747be320b2
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesRepository.scala
@@ -0,0 +1,498 @@
+package pl.touk.nussknacker.ui.process.periodic.legacy.db
+
+import cats.Monad
+import com.github.tminglei.slickpg.ExPostgresProfile
+import com.typesafe.scalalogging.LazyLogging
+import db.util.DBIOActionInstances.DB
+import io.circe.parser.decode
+import io.circe.syntax.EncoderOps
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.management.FlinkScheduledExecutionPerformer.jarFileNameRuntimeParam
+import pl.touk.nussknacker.ui.process.periodic.ScheduleProperty
+import pl.touk.nussknacker.ui.process.periodic.legacy.db.LegacyPeriodicProcessesRepository.createPeriodicProcess
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model._
+import pl.touk.nussknacker.ui.process.repository.{FetchingProcessRepository, PeriodicProcessesRepository}
+import pl.touk.nussknacker.ui.security.api.NussknackerInternalUser
+import slick.dbio.{DBIOAction, Effect, NoStream}
+import slick.jdbc.PostgresProfile.api._
+import slick.jdbc.{JdbcBackend, JdbcProfile}
+
+import java.time.{Clock, LocalDateTime}
+import scala.concurrent.{ExecutionContext, Future}
+import scala.language.higherKinds
+
+object LegacyPeriodicProcessesRepository {
+
+ def createPeriodicProcessDeployment(
+ processEntity: PeriodicProcessEntity,
+ processDeploymentEntity: PeriodicProcessDeploymentEntity
+ ): PeriodicProcessDeployment = {
+ val process = createPeriodicProcess(processEntity)
+ PeriodicProcessDeployment(
+ processDeploymentEntity.id,
+ process,
+ processDeploymentEntity.createdAt,
+ processDeploymentEntity.runAt,
+ ScheduleName(processDeploymentEntity.scheduleName),
+ processDeploymentEntity.retriesLeft,
+ processDeploymentEntity.nextRetryAt,
+ createPeriodicDeploymentState(processDeploymentEntity)
+ )
+ }
+
+ def createPeriodicDeploymentState(
+ processDeploymentEntity: PeriodicProcessDeploymentEntity
+ ): PeriodicProcessDeploymentState = {
+ PeriodicProcessDeploymentState(
+ processDeploymentEntity.deployedAt,
+ processDeploymentEntity.completedAt,
+ processDeploymentEntity.status
+ )
+ }
+
+ def createPeriodicProcess(
+ processEntity: PeriodicProcessEntity
+ ): PeriodicProcess = {
+ val scheduleProperty = prepareScheduleProperty(processEntity)
+ PeriodicProcess(
+ processEntity.id,
+ DeploymentWithRuntimeParams(
+ processId = None,
+ processName = processEntity.processName,
+ versionId = processEntity.processVersionId,
+ runtimeParams = RuntimeParams(Map(jarFileNameRuntimeParam -> processEntity.jarFileName)),
+ ),
+ scheduleProperty,
+ processEntity.active,
+ processEntity.createdAt,
+ processEntity.processActionId
+ )
+ }
+
+ private def prepareScheduleProperty(processEntity: PeriodicProcessEntity) = {
+ val scheduleProperty = decode[ScheduleProperty](processEntity.scheduleProperty)
+ .fold(e => throw new IllegalArgumentException(e), identity)
+ scheduleProperty
+ }
+
+}
+
+class SlickLegacyPeriodicProcessesRepository(
+ processingType: String,
+ db: JdbcBackend.DatabaseDef,
+ override val profile: JdbcProfile,
+ clock: Clock,
+ fetchingProcessRepository: FetchingProcessRepository[Future],
+)(implicit ec: ExecutionContext)
+ extends PeriodicProcessesRepository
+ with LegacyPeriodicProcessesTableFactory
+ with LegacyPeriodicProcessDeploymentsTableFactory
+ with LazyLogging {
+
+ import pl.touk.nussknacker.engine.util.Implicits._
+
+ type Action[T] = DBIOActionInstances.DB[T]
+
+ override def run[T](action: DBIOAction[T, NoStream, Effect.All]): Future[T] = db.run(action.transactionally)
+
+ override def getSchedulesState(
+ scenarioName: ProcessName,
+ afterOpt: Option[LocalDateTime],
+ ): Action[SchedulesState] = {
+ PeriodicProcessesWithoutJson
+ .filter(_.processName === scenarioName)
+ .join(PeriodicProcessDeployments)
+ .on(_.id === _.periodicProcessId)
+ .filterOpt(afterOpt)((entities, after) => entities._2.completedAt > after)
+ .result
+ .map(toSchedulesStateForSinglePeriodicProcess)
+ }
+
+ override def create(
+ deploymentWithRuntimeParams: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ canonicalProcess: CanonicalProcess,
+ scheduleProperty: ScheduleProperty,
+ processActionId: ProcessActionId,
+ ): Action[PeriodicProcess] = {
+ val jarFileName = deploymentWithRuntimeParams.runtimeParams.params.getOrElse(
+ jarFileNameRuntimeParam,
+ throw new RuntimeException(s"jarFileName runtime param not present")
+ )
+ val processEntity = PeriodicProcessEntityWithJson(
+ id = PeriodicProcessId(-1),
+ processName = deploymentWithRuntimeParams.processName,
+ processVersionId = deploymentWithRuntimeParams.versionId,
+ processingType = processingType,
+ jarFileName = jarFileName,
+ scheduleProperty = scheduleProperty.asJson.noSpaces,
+ active = true,
+ createdAt = now(),
+ processActionId = Some(processActionId),
+ inputConfigDuringExecutionJson = inputConfigDuringExecutionJson,
+ processJson = canonicalProcess,
+ )
+ ((PeriodicProcessesWithJson returning PeriodicProcessesWithJson into ((_, id) => id)) += processEntity)
+ .map(LegacyPeriodicProcessesRepository.createPeriodicProcess)
+ }
+
+ private def now(): LocalDateTime = LocalDateTime.now(clock)
+
+ override def findToBeDeployed: Action[Seq[PeriodicProcessDeployment]] =
+ findProcesses(
+ activePeriodicProcessWithDeploymentQuery(processingType)
+ .filter { case (_, d) =>
+ d.runAt <= now() &&
+ d.status === (PeriodicProcessDeploymentStatus.Scheduled: PeriodicProcessDeploymentStatus)
+ }
+ )
+
+ override def findToBeRetried: Action[Seq[PeriodicProcessDeployment]] =
+ findProcesses(
+ activePeriodicProcessWithDeploymentQuery(processingType)
+ .filter { case (_, d) =>
+ d.nextRetryAt <= now() &&
+ d.status === (PeriodicProcessDeploymentStatus.RetryingDeploy: PeriodicProcessDeploymentStatus)
+ }
+ )
+
+ private def findProcesses(
+ query: Query[
+ (PeriodicProcessWithoutJson, PeriodicProcessDeploymentsTable),
+ (PeriodicProcessEntityWithoutJson, PeriodicProcessDeploymentEntity),
+ Seq
+ ]
+ ) = {
+ query.result
+ .map(_.map { case (periodicProcess, periodicDeployment) =>
+ LegacyPeriodicProcessesRepository.createPeriodicProcessDeployment(
+ periodicProcess,
+ periodicDeployment,
+ )
+ })
+ }
+
+ override def findProcessData(id: PeriodicProcessDeploymentId): Action[PeriodicProcessDeployment] =
+ findProcesses(
+ (PeriodicProcessesWithoutJson join PeriodicProcessDeployments on (_.id === _.periodicProcessId))
+ .filter { case (_, deployment) => deployment.id === id }
+ ).map(_.head)
+
+ override def markDeployed(id: PeriodicProcessDeploymentId): Action[Unit] = {
+ val q = for {
+ d <- PeriodicProcessDeployments if d.id === id
+ } yield (d.status, d.deployedAt)
+ val update = q.update((PeriodicProcessDeploymentStatus.Deployed, Some(now())))
+ update.map(_ => ())
+ }
+
+ override def markFailed(id: PeriodicProcessDeploymentId): Action[Unit] = {
+ updateCompleted(id, PeriodicProcessDeploymentStatus.Failed)
+ }
+
+ override def markFinished(id: PeriodicProcessDeploymentId): Action[Unit] = {
+ updateCompleted(id, PeriodicProcessDeploymentStatus.Finished)
+ }
+
+ override def markFailedOnDeployWithStatus(
+ id: PeriodicProcessDeploymentId,
+ status: PeriodicProcessDeploymentStatus,
+ retriesLeft: Int,
+ retryAt: Option[LocalDateTime]
+ ): Action[Unit] = {
+ val q = for {
+ d <- PeriodicProcessDeployments if d.id === id
+ } yield (d.status, d.completedAt, d.retriesLeft, d.nextRetryAt)
+ val update = q.update((status, Some(now()), retriesLeft, retryAt))
+ update.map(_ => ())
+ }
+
+ private def updateCompleted(
+ id: PeriodicProcessDeploymentId,
+ status: PeriodicProcessDeploymentStatus
+ ): Action[Unit] = {
+ val q = for {
+ d <- PeriodicProcessDeployments if d.id === id
+ } yield (d.status, d.completedAt)
+ val update = q.update((status, Some(now())))
+ update.map(_ => ())
+ }
+
+ override def findActiveSchedulesForProcessesHavingDeploymentWithMatchingStatus(
+ expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus],
+ ): Action[SchedulesState] = {
+ val processesHavingDeploymentsWithMatchingStatus = PeriodicProcessesWithoutJson.filter(p =>
+ p.active &&
+ PeriodicProcessDeployments
+ .filter(d => d.periodicProcessId === p.id && d.status.inSet(expectedDeploymentStatuses))
+ .exists
+ )
+ getLatestDeploymentsForEachSchedule(
+ processesHavingDeploymentsWithMatchingStatus,
+ deploymentsPerScheduleMaxCount = 1,
+ processingType = processingType,
+ ).map(schedulesForProcessNames =>
+ SchedulesState(
+ schedulesForProcessNames.values.map(_.schedules).foldLeft(Map.empty[ScheduleId, ScheduleData])(_ ++ _)
+ )
+ )
+ }
+
+ override def getLatestDeploymentsForActiveSchedules(
+ processName: ProcessName,
+ deploymentsPerScheduleMaxCount: Int,
+ ): Action[SchedulesState] = {
+ val activeProcessesQuery =
+ PeriodicProcessesWithoutJson.filter(p => p.processName === processName && p.active)
+ getLatestDeploymentsForEachSchedule(activeProcessesQuery, deploymentsPerScheduleMaxCount, processingType)
+ .map(_.getOrElse(processName, SchedulesState(Map.empty)))
+ }
+
+ override def getLatestDeploymentsForActiveSchedules(
+ deploymentsPerScheduleMaxCount: Int,
+ ): Action[Map[ProcessName, SchedulesState]] = {
+ val activeProcessesQuery = PeriodicProcessesWithoutJson.filter(_.active)
+ getLatestDeploymentsForEachSchedule(activeProcessesQuery, deploymentsPerScheduleMaxCount, processingType)
+ }
+
+ override def getLatestDeploymentsForLatestInactiveSchedules(
+ processName: ProcessName,
+ inactiveProcessesMaxCount: Int,
+ deploymentsPerScheduleMaxCount: Int,
+ ): Action[SchedulesState] = {
+ val filteredProcessesQuery = PeriodicProcessesWithoutJson
+ .filter(p => p.processName === processName && !p.active)
+ .sortBy(_.createdAt.desc)
+ .take(inactiveProcessesMaxCount)
+ getLatestDeploymentsForEachSchedule(filteredProcessesQuery, deploymentsPerScheduleMaxCount, processingType)
+ .map(_.getOrElse(processName, SchedulesState(Map.empty)))
+ }
+
+ override def getLatestDeploymentsForLatestInactiveSchedules(
+ inactiveProcessesMaxCount: Int,
+ deploymentsPerScheduleMaxCount: Int,
+ ): Action[Map[ProcessName, SchedulesState]] = {
+ val filteredProcessesQuery = PeriodicProcessesWithoutJson
+ .filter(!_.active)
+ .sortBy(_.createdAt.desc)
+ .take(inactiveProcessesMaxCount)
+ getLatestDeploymentsForEachSchedule(filteredProcessesQuery, deploymentsPerScheduleMaxCount, processingType)
+ }
+
+ private def getLatestDeploymentsForEachSchedule(
+ periodicProcessesQuery: Query[
+ PeriodicProcessWithoutJson,
+ PeriodicProcessEntityWithoutJson,
+ Seq
+ ],
+ deploymentsPerScheduleMaxCount: Int,
+ processingType: String,
+ ): Action[Map[ProcessName, SchedulesState]] = {
+ val filteredPeriodicProcessQuery = periodicProcessesQuery.filter(p => p.processingType === processingType)
+ val latestDeploymentsForSchedules = profile match {
+ case _: ExPostgresProfile =>
+ getLatestDeploymentsForEachSchedulePostgres(filteredPeriodicProcessQuery, deploymentsPerScheduleMaxCount)
+ case _ =>
+ getLatestDeploymentsForEachScheduleJdbcGeneric(filteredPeriodicProcessQuery, deploymentsPerScheduleMaxCount)
+ }
+ latestDeploymentsForSchedules.map(toSchedulesState)
+ }
+
+ private def getLatestDeploymentsForEachSchedulePostgres(
+ periodicProcessesQuery: Query[
+ PeriodicProcessWithoutJson,
+ PeriodicProcessEntityWithoutJson,
+ Seq
+ ],
+ deploymentsPerScheduleMaxCount: Int
+ ): Action[Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]] = {
+ // To effectively limit deployments to given count for each schedule in one query, we use window functions in slick
+ import ExPostgresProfile.api._
+ import com.github.tminglei.slickpg.window.PgWindowFuncSupport.WindowFunctions._
+
+ val deploymentsForProcesses =
+ periodicProcessesQuery join PeriodicProcessDeployments on (_.id === _.periodicProcessId)
+ deploymentsForProcesses
+ .map { case (process, deployment) =>
+ (
+ rowNumber() :: Over
+ .partitionBy((deployment.periodicProcessId, deployment.scheduleName))
+ .sortBy(
+ deployment.runAt.desc,
+ deployment.createdAt.desc
+ ), // Remember to change DeploymentStatus.ordering accordingly
+ process,
+ deployment
+ )
+ }
+ .subquery
+ .filter(_._1 <= deploymentsPerScheduleMaxCount.longValue())
+ .map { case (_, process, deployment) =>
+ (process, deployment)
+ }
+ .result
+ }
+
+ // This variant of method is much less optimal than postgres one. It is highly recommended to use postgres with periodics
+ // If we decided to support more databases, we should consider some optimization like extracting periodic_schedule table
+ // with foreign key to periodic_process and with schedule_name column - it would reduce number of queries
+ private def getLatestDeploymentsForEachScheduleJdbcGeneric(
+ periodicProcessesQuery: Query[
+ PeriodicProcessWithoutJson,
+ PeriodicProcessEntityWithoutJson,
+ Seq
+ ],
+ deploymentsPerScheduleMaxCount: Int
+ ): Action[Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]] = {
+ // It is debug instead of warn to not bloast logs when e.g. for some reasons is used hsql under the hood
+ logger.debug(
+ "WARN: Using not optimized version of getLatestDeploymentsForEachSchedule that not uses window functions"
+ )
+ for {
+ processes <- periodicProcessesQuery.result
+ schedulesForProcesses <-
+ DBIO
+ .sequence(processes.map { process =>
+ PeriodicProcessDeployments
+ .filter(_.periodicProcessId === process.id)
+ .map(_.scheduleName)
+ .distinct
+ .result
+ .map(_.map((process, _)))
+ })
+ .map(_.flatten)
+ deploymentsForSchedules <-
+ DBIO
+ .sequence(schedulesForProcesses.map { case (process, scheduleName) =>
+ PeriodicProcessDeployments
+ // In SQL when you compare nulls, you will get always false
+ .filter(deployment =>
+ deployment.periodicProcessId === process.id && (deployment.scheduleName === scheduleName || deployment.scheduleName.isEmpty && scheduleName.isEmpty)
+ )
+ .sortBy(a => (a.runAt.desc, a.createdAt.desc)) // Remember to change DeploymentStatus.ordering accordingly
+ .take(deploymentsPerScheduleMaxCount)
+ .result
+ .map(_.map((process, _)))
+ })
+ .map(_.flatten)
+ } yield deploymentsForSchedules
+ }
+
+ override def schedule(
+ id: PeriodicProcessId,
+ scheduleName: ScheduleName,
+ runAt: LocalDateTime,
+ deployMaxRetries: Int
+ ): Action[PeriodicProcessDeployment] = {
+ val deploymentEntity = PeriodicProcessDeploymentEntity(
+ id = PeriodicProcessDeploymentId(-1),
+ periodicProcessId = id,
+ createdAt = now(),
+ runAt = runAt,
+ scheduleName = scheduleName.value,
+ deployedAt = None,
+ completedAt = None,
+ retriesLeft = deployMaxRetries,
+ nextRetryAt = None,
+ status = PeriodicProcessDeploymentStatus.Scheduled
+ )
+ ((PeriodicProcessDeployments returning PeriodicProcessDeployments.map(_.id) into ((_, id) =>
+ id
+ )) += deploymentEntity).flatMap(findProcessData)
+ }
+
+ override def markInactive(processId: PeriodicProcessId): Action[Unit] = {
+ val q = for {
+ p <- PeriodicProcessesWithoutJson if p.id === processId
+ } yield p.active
+ val update = q.update(false)
+ update.map(_ => ())
+ }
+
+ override def fetchCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Future[Option[(CanonicalProcess, ProcessVersion)]] =
+ fetchingProcessRepository.getCanonicalProcessWithVersion(processName, versionId)(NussknackerInternalUser.instance)
+
+ def fetchInputConfigDuringExecutionJson(processName: ProcessName, versionId: VersionId): Action[Option[String]] =
+ PeriodicProcessesWithJson
+ .filter(p => p.processName === processName && p.processVersionId === versionId)
+ .map(_.inputConfigDuringExecutionJson)
+ .result
+ .headOption
+
+ private def activePeriodicProcessWithDeploymentQuery(processingType: String) = {
+ (PeriodicProcessesWithoutJson.filter(p => p.active === true && p.processingType === processingType)
+ join PeriodicProcessDeployments on (_.id === _.periodicProcessId))
+ }
+
+ private def toSchedulesState(
+ list: Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]
+ ): Map[ProcessName, SchedulesState] = {
+ list
+ .groupBy(_._1.processName)
+ .map { case (processName, list) => processName -> toSchedulesStateForSinglePeriodicProcess(list) }
+ }
+
+ private def toSchedulesStateForSinglePeriodicProcess(
+ list: Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]
+ ): SchedulesState = {
+ SchedulesState(
+ list
+ .map { case (process, deployment) =>
+ val scheduleId = ScheduleId(process.id, ScheduleName(deployment.scheduleName))
+ val scheduleData = (scheduleId, process)
+ val scheduleDeployment = scheduleDeploymentData(deployment)
+ (scheduleData, scheduleDeployment)
+ }
+ .toList
+ .toGroupedMap
+ .toList
+ .map { case ((scheduleId, process), deployments) =>
+ scheduleId -> ScheduleData(createPeriodicProcess(process), deployments)
+ }
+ .toMap
+ )
+ }
+
+ private def scheduleDeploymentData(deployment: PeriodicProcessDeploymentEntity): ScheduleDeploymentData = {
+ ScheduleDeploymentData(
+ deployment.id,
+ deployment.createdAt,
+ deployment.runAt,
+ deployment.deployedAt,
+ deployment.retriesLeft,
+ deployment.nextRetryAt,
+ LegacyPeriodicProcessesRepository.createPeriodicDeploymentState(deployment)
+ )
+ }
+
+}
+
+//Copied from designer/server.
+object DBIOActionInstances {
+
+ type DB[A] = DBIOAction[A, NoStream, Effect.All]
+
+ implicit def dbMonad(implicit ec: ExecutionContext): Monad[DB] = new Monad[DB] {
+
+ override def pure[A](x: A) = DBIO.successful(x)
+
+ override def flatMap[A, B](fa: DB[A])(f: (A) => DB[B]) = fa.flatMap(f)
+
+ // this is *not* tail recursive
+ override def tailRecM[A, B](a: A)(f: (A) => DB[Either[A, B]]): DB[B] =
+ f(a).flatMap {
+ case Right(r) => pure(r)
+ case Left(l) => tailRecM(l)(f)
+ }
+
+ }
+
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesTable.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesTableFactory.scala
similarity index 92%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesTable.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesTableFactory.scala
index 1dfaef719eb..cf99e1ec652 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesTable.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/legacy/db/LegacyPeriodicProcessesTableFactory.scala
@@ -1,37 +1,35 @@
-package pl.touk.nussknacker.engine.management.periodic.db
+package pl.touk.nussknacker.ui.process.periodic.legacy.db
import io.circe.syntax.EncoderOps
import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessId
import pl.touk.nussknacker.engine.marshall.ProcessMarshaller
-import slick.ast.TypedType
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessId
import slick.jdbc.JdbcProfile
-import slick.lifted.MappedToBase.mappedToIsomorphism
import slick.lifted.ProvenShape
import slick.sql.SqlProfile.ColumnOption.NotNull
import java.time.LocalDateTime
import java.util.UUID
-trait PeriodicProcessesTableFactory {
+trait LegacyPeriodicProcessesTableFactory {
protected val profile: JdbcProfile
import profile.api._
+ implicit val periodicProcessIdMapping: BaseColumnType[PeriodicProcessId] =
+ MappedColumnType.base[PeriodicProcessId, Long](_.value, PeriodicProcessId.apply)
+
implicit val processNameMapping: BaseColumnType[ProcessName] =
MappedColumnType.base[ProcessName, String](_.value, ProcessName.apply)
implicit val versionIdMapping: BaseColumnType[VersionId] =
- MappedColumnType.base[VersionId, Long](_.value, VersionId(_))
+ MappedColumnType.base[VersionId, Long](_.value, VersionId.apply)
- implicit val ProcessActionIdTypedType: TypedType[ProcessActionId] =
- MappedColumnType.base[ProcessActionId, UUID](
- _.value,
- ProcessActionId(_)
- )
+ implicit val processActionIdMapping: BaseColumnType[ProcessActionId] =
+ MappedColumnType.base[ProcessActionId, UUID](_.value, ProcessActionId.apply)
abstract class PeriodicProcessesTable[ENTITY <: PeriodicProcessEntity](tag: Tag)
extends Table[ENTITY](tag, "periodic_processes") {
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcess.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcess.scala
new file mode 100644
index 00000000000..50c54e5b271
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcess.scala
@@ -0,0 +1,18 @@
+package pl.touk.nussknacker.ui.process.periodic.model
+
+import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.DeploymentWithRuntimeParams
+import pl.touk.nussknacker.ui.process.periodic.ScheduleProperty
+
+import java.time.LocalDateTime
+
+case class PeriodicProcessId(value: Long)
+
+case class PeriodicProcess(
+ id: PeriodicProcessId,
+ deploymentData: DeploymentWithRuntimeParams,
+ scheduleProperty: ScheduleProperty,
+ active: Boolean,
+ createdAt: LocalDateTime,
+ processActionId: Option[ProcessActionId]
+)
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcessDeployment.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcessDeployment.scala
new file mode 100644
index 00000000000..dba8136bfdf
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/PeriodicProcessDeployment.scala
@@ -0,0 +1,71 @@
+package pl.touk.nussknacker.ui.process.periodic.model
+
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduledDeploymentDetails, ScheduledDeploymentStatus}
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.{
+ Deployed,
+ Failed,
+ FailedOnDeploy,
+ Finished,
+ PeriodicProcessDeploymentStatus,
+ RetryingDeploy,
+ Scheduled
+}
+
+import java.time.LocalDateTime
+
+// TODO: We should separate schedules concept from deployments - fully switch to ScheduleData and ScheduleDeploymentData
+case class PeriodicProcessDeployment(
+ id: PeriodicProcessDeploymentId,
+ periodicProcess: PeriodicProcess,
+ createdAt: LocalDateTime,
+ runAt: LocalDateTime,
+ scheduleName: ScheduleName,
+ retriesLeft: Int,
+ nextRetryAt: Option[LocalDateTime],
+ state: PeriodicProcessDeploymentState
+) {
+
+ def display: String =
+ s"Process with id=${periodicProcess.deploymentData.processId}, name=${periodicProcess.deploymentData.processName}, versionId=${periodicProcess.deploymentData.versionId}, scheduleName=${scheduleName.display} and deploymentId=$id"
+
+ def toDetails: ScheduledDeploymentDetails =
+ ScheduledDeploymentDetails(
+ id = id.value,
+ processName = periodicProcess.deploymentData.processName,
+ versionId = periodicProcess.deploymentData.versionId,
+ scheduleName = scheduleName.value,
+ createdAt = createdAt,
+ runAt = runAt,
+ deployedAt = state.deployedAt,
+ completedAt = state.completedAt,
+ status = state.status match {
+ case Scheduled => ScheduledDeploymentStatus.Scheduled
+ case Deployed => ScheduledDeploymentStatus.Deployed
+ case Finished => ScheduledDeploymentStatus.Finished
+ case Failed => ScheduledDeploymentStatus.Failed
+ case RetryingDeploy => ScheduledDeploymentStatus.RetryingDeploy
+ case FailedOnDeploy => ScheduledDeploymentStatus.FailedOnDeploy
+ },
+ )
+
+}
+
+case class PeriodicProcessDeploymentState(
+ deployedAt: Option[LocalDateTime],
+ completedAt: Option[LocalDateTime],
+ status: PeriodicProcessDeploymentStatus
+)
+
+case class PeriodicProcessDeploymentId(value: Long) {
+ override def toString: String = value.toString
+}
+
+object PeriodicProcessDeploymentStatus extends Enumeration {
+ type PeriodicProcessDeploymentStatus = Value
+
+ val Scheduled, Deployed, Finished, Failed, RetryingDeploy, FailedOnDeploy = Value
+}
+
+case class ScheduleName(value: Option[String]) {
+ def display: String = value.getOrElse("[default]")
+}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/SchedulesState.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/SchedulesState.scala
similarity index 66%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/SchedulesState.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/SchedulesState.scala
index ca697fe27d9..b4161cffe46 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/SchedulesState.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/model/SchedulesState.scala
@@ -1,8 +1,6 @@
-package pl.touk.nussknacker.engine.management.periodic.model
+package pl.touk.nussknacker.ui.process.periodic.model
import pl.touk.nussknacker.engine.api.process.ProcessName
-import pl.touk.nussknacker.engine.management.periodic.db.{PeriodicProcessDeploymentEntity, PeriodicProcessesRepository}
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithoutCanonicalProcess
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import java.time.LocalDateTime
@@ -20,7 +18,7 @@ case class SchedulesState(schedules: Map[ScheduleId, ScheduleData]) {
def isEmpty: Boolean = schedules.isEmpty
def groupByProcessName: Map[ProcessName, SchedulesState] =
- schedules.groupBy(_._2.process.processVersion.processName).mapValuesNow(SchedulesState)
+ schedules.groupBy(_._2.process.deploymentData.processName).mapValuesNow(SchedulesState)
lazy val groupedByPeriodicProcess: List[PeriodicProcessScheduleData] =
schedules.toList.groupBy(_._2.process).toList.map { case (periodicProcess, groupedSchedules) =>
@@ -36,10 +34,7 @@ case class SchedulesState(schedules: Map[ScheduleId, ScheduleData]) {
// For most operations it will contain only one latest deployment but for purpose of statuses of historical deployments
// it has list instead of one element.
// This structure should contain SingleScheduleProperty as well. See note above
-case class ScheduleData(
- process: PeriodicProcess[WithoutCanonicalProcess],
- latestDeployments: List[ScheduleDeploymentData]
-)
+case class ScheduleData(process: PeriodicProcess, latestDeployments: List[ScheduleDeploymentData])
// To identify schedule we need scheduleName - None for SingleScheduleProperty and Some(key) for MultipleScheduleProperty keys
// Also we need PeriodicProcessId to distinguish between active schedules and some inactive from the past for the same PeriodicProcessId
@@ -57,42 +52,17 @@ case class ScheduleDeploymentData(
) {
def toFullDeploymentData(
- process: PeriodicProcess[WithoutCanonicalProcess],
+ process: PeriodicProcess,
scheduleName: ScheduleName
- ): PeriodicProcessDeployment[WithoutCanonicalProcess] =
+ ): PeriodicProcessDeployment =
PeriodicProcessDeployment(id, process, createdAt, runAt, scheduleName, retriesLeft, nextRetryAt, state)
def display = s"deploymentId=$id"
}
-object ScheduleDeploymentData {
-
- def apply(deployment: PeriodicProcessDeploymentEntity): ScheduleDeploymentData = {
- ScheduleDeploymentData(
- deployment.id,
- deployment.createdAt,
- deployment.runAt,
- deployment.deployedAt,
- deployment.retriesLeft,
- deployment.nextRetryAt,
- PeriodicProcessesRepository.createPeriodicDeploymentState(deployment)
- )
- }
-
-}
-
// These below are temporary structures, see notice next to SchedulesState
case class PeriodicProcessScheduleData(
- process: PeriodicProcess[WithoutCanonicalProcess],
- deployments: List[PeriodicProcessDeployment[WithoutCanonicalProcess]]
-) {
- def existsDeployment(predicate: PeriodicProcessDeployment[WithoutCanonicalProcess] => Boolean): Boolean =
- deployments.exists(predicate)
-
- def display: String = {
- val deploymentsForSchedules = deployments.map(_.display)
- s"processName=${process.processVersion.processName}, deploymentsForSchedules=$deploymentsForSchedules"
- }
-
-}
+ process: PeriodicProcess,
+ deployments: List[PeriodicProcessDeployment]
+)
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/util/DeterministicUUIDFromLong.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/DeterministicUUIDFromLong.scala
similarity index 93%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/util/DeterministicUUIDFromLong.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/DeterministicUUIDFromLong.scala
index 3f1a80b09b4..4cf890790cf 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/util/DeterministicUUIDFromLong.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/DeterministicUUIDFromLong.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic.util
+package pl.touk.nussknacker.ui.process.periodic.utils
import java.util.UUID
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractor.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/SchedulePropertyExtractorUtils.scala
similarity index 76%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractor.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/SchedulePropertyExtractorUtils.scala
index 989d625cb51..0e2ccb4bd55 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractor.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/periodic/utils/SchedulePropertyExtractorUtils.scala
@@ -1,18 +1,13 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.utils
import cats.instances.list._
import cats.syntax.traverse._
-import com.typesafe.scalalogging.LazyLogging
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.CronSchedulePropertyExtractor.CronPropertyDefaultName
+import pl.touk.nussknacker.ui.process.periodic.{CronScheduleProperty, MultipleScheduleProperty, ScheduleProperty, SingleScheduleProperty}
import java.time.Clock
-trait SchedulePropertyExtractor {
- def apply(canonicalProcess: CanonicalProcess): Either[String, ScheduleProperty]
-}
-
-object SchedulePropertyExtractor {
+object SchedulePropertyExtractorUtils {
def extractProperty(canonicalProcess: CanonicalProcess, name: String): Either[String, ScheduleProperty] = {
for {
@@ -75,19 +70,3 @@ object SchedulePropertyExtractor {
}
}
-
-object CronSchedulePropertyExtractor {
-
- val CronPropertyDefaultName = "cron"
-
-}
-
-case class CronSchedulePropertyExtractor(propertyName: String = CronPropertyDefaultName)
- extends SchedulePropertyExtractor
- with LazyLogging {
-
- override def apply(canonicalProcess: CanonicalProcess): Either[String, ScheduleProperty] = {
- SchedulePropertyExtractor.extractProperty(canonicalProcess, propertyName)
- }
-
-}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala
index df7f930b571..ee66bfe9b5b 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala
@@ -54,5 +54,7 @@ object InvalidDeploymentManagerStub extends DeploymentManager {
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
+
override def close(): Unit = ()
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala
new file mode 100644
index 00000000000..b3404eb0ac1
--- /dev/null
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ModelClassLoaderProvider.scala
@@ -0,0 +1,69 @@
+package pl.touk.nussknacker.ui.process.processingtype
+
+import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
+import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
+
+import java.nio.file.Path
+
+final case class ModelClassLoaderDependencies(classpath: List[String], workingDirectoryOpt: Option[Path]) {
+
+ def show(): String = {
+ val workingDirectoryReadable = workingDirectoryOpt match {
+ case Some(value) => value.toString
+ case None => "None (default)"
+ }
+ s"classpath: ${classpath.mkString(", ")}, workingDirectoryOpt: $workingDirectoryReadable"
+ }
+
+}
+
+class ModelClassLoaderProvider private (
+ processingTypeClassLoaders: Map[String, (ModelClassLoader, ModelClassLoaderDependencies)]
+) {
+
+ def forProcessingTypeUnsafe(processingTypeName: String): ModelClassLoader = {
+ processingTypeClassLoaders
+ .getOrElse(
+ processingTypeName,
+ throw new IllegalArgumentException(
+ s"Unknown ProcessingType: $processingTypeName, known ProcessingTypes are: ${processingTypeName.mkString(", ")}"
+ )
+ )
+ ._1
+ }
+
+ def validateReloadConsistency(
+ dependenciesFromReload: Map[String, ModelClassLoaderDependencies]
+ ): Unit = {
+ if (dependenciesFromReload.keySet != processingTypeClassLoaders.keySet) {
+ throw new IllegalStateException(
+ s"""Processing types cannot be added, removed, or renamed during processing type reload.
+ |Reloaded processing types: [${dependenciesFromReload.keySet.toList.sorted.mkString(", ")}]
+ |Current processing types: [${processingTypeClassLoaders.keySet.toList.sorted.mkString(", ")}]
+ |If you need to modify this, please restart the application with desired config.""".stripMargin
+ )
+ }
+ dependenciesFromReload.foreach { case (processingType, reloadedConfig) =>
+ val currentConfig = processingTypeClassLoaders.mapValuesNow(_._2)(processingType)
+ if (reloadedConfig != currentConfig) {
+ throw new IllegalStateException(
+ s"Error during processing types reload. Model ClassLoader dependencies such as classpath cannot be modified during reload. " +
+ s"For processing type [$processingType], reloaded ClassLoader dependencies: [${reloadedConfig.show()}] " +
+ s"do not match current dependencies: [${currentConfig.show()}]"
+ )
+ }
+ }
+ }
+
+}
+
+object ModelClassLoaderProvider {
+
+ def apply(processingTypeConfig: Map[String, ModelClassLoaderDependencies]): ModelClassLoaderProvider = {
+ val processingTypesClassloaders = processingTypeConfig.map { case (name, deps) =>
+ name -> (ModelClassLoader(deps.classpath, deps.workingDirectoryOpt) -> deps)
+ }
+ new ModelClassLoaderProvider(processingTypesClassloaders)
+ }
+
+}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeData.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeData.scala
index 9390503101c..00aa215ed26 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeData.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeData.scala
@@ -4,6 +4,7 @@ import com.typesafe.config.Config
import pl.touk.nussknacker.engine._
import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig
import pl.touk.nussknacker.engine.api.deployment.cache.ScenarioStateCachingConfig
+import pl.touk.nussknacker.engine.api.deployment.{NoSchedulingSupport, SchedulingSupported}
import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.definition.component.{
@@ -13,6 +14,8 @@ import pl.touk.nussknacker.engine.definition.component.{
}
import pl.touk.nussknacker.engine.deployment.EngineSetupName
import pl.touk.nussknacker.restmodel.scenariodetails.ScenarioParameters
+import pl.touk.nussknacker.ui.db.DbRef
+import pl.touk.nussknacker.ui.process.periodic.{PeriodicDeploymentManagerDecorator, SchedulingConfig}
import pl.touk.nussknacker.ui.process.processingtype.DesignerModelData.DynamicComponentsStaticDefinitions
import scala.util.control.NonFatal
@@ -53,6 +56,7 @@ object ProcessingTypeData {
name: ProcessingType,
modelData: ModelData,
deploymentManagerProvider: DeploymentManagerProvider,
+ schedulingForProcessingType: SchedulingForProcessingType,
deploymentManagerDependencies: DeploymentManagerDependencies,
engineSetupName: EngineSetupName,
deploymentConfig: Config,
@@ -64,11 +68,12 @@ object ProcessingTypeData {
val deploymentData =
createDeploymentData(
deploymentManagerProvider,
+ schedulingForProcessingType,
deploymentManagerDependencies,
engineSetupName,
modelData,
deploymentConfig,
- metaDataInitializer
+ metaDataInitializer,
)
val designerModelData =
@@ -90,22 +95,52 @@ object ProcessingTypeData {
private def createDeploymentData(
deploymentManagerProvider: DeploymentManagerProvider,
+ schedulingForProcessingType: SchedulingForProcessingType,
deploymentManagerDependencies: DeploymentManagerDependencies,
engineSetupName: EngineSetupName,
modelData: ModelData,
deploymentConfig: Config,
- metaDataInitializer: MetaDataInitializer
+ metaDataInitializer: MetaDataInitializer,
) = {
val scenarioStateCacheTTL = ScenarioStateCachingConfig.extractScenarioStateCacheTTL(deploymentConfig)
- val validDeploymentManager =
- deploymentManagerProvider.createDeploymentManager(
+ val validDeploymentManager = for {
+ deploymentManager <- deploymentManagerProvider.createDeploymentManager(
modelData,
deploymentManagerDependencies,
deploymentConfig,
scenarioStateCacheTTL
)
- val scenarioProperties =
+ decoratedDeploymentManager = schedulingForProcessingType match {
+ case SchedulingForProcessingType.Available(dbRef) =>
+ deploymentManager.schedulingSupport match {
+ case supported: SchedulingSupported =>
+ PeriodicDeploymentManagerDecorator.decorate(
+ underlying = deploymentManager,
+ schedulingSupported = supported,
+ modelData = modelData,
+ deploymentConfig = deploymentConfig,
+ dependencies = deploymentManagerDependencies,
+ dbRef = dbRef,
+ )
+ case NoSchedulingSupport =>
+ throw new IllegalStateException(
+ s"DeploymentManager ${deploymentManagerProvider.name} does not support periodic execution"
+ )
+ }
+
+ case SchedulingForProcessingType.NotAvailable =>
+ deploymentManager
+ }
+ } yield decoratedDeploymentManager
+
+ val additionalScenarioProperties = schedulingForProcessingType match {
+ case SchedulingForProcessingType.Available(_) =>
+ PeriodicDeploymentManagerDecorator.additionalScenarioProperties
+ case SchedulingForProcessingType.NotAvailable =>
+ Map.empty[String, ScenarioPropertyConfig]
+ }
+ val scenarioProperties = additionalScenarioProperties ++
deploymentManagerProvider.scenarioPropertiesConfig(deploymentConfig) ++ modelData.modelConfig
.getOrElse[Map[ProcessingType, ScenarioPropertyConfig]]("scenarioPropertiesConfig", Map.empty)
val fragmentProperties = modelData.modelConfig
@@ -163,4 +198,14 @@ object ProcessingTypeData {
)
}
+ sealed trait SchedulingForProcessingType
+
+ object SchedulingForProcessingType {
+
+ case object NotAvailable extends SchedulingForProcessingType
+
+ final case class Available(dbRef: DbRef) extends SchedulingForProcessingType
+
+ }
+
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/LocalProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/LocalProcessingTypeDataLoader.scala
index 2fae13dc5c2..46f28fd3b6b 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/LocalProcessingTypeDataLoader.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/LocalProcessingTypeDataLoader.scala
@@ -5,9 +5,15 @@ import com.typesafe.config.ConfigFactory
import pl.touk.nussknacker.engine._
import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
+import pl.touk.nussknacker.ui.db.DbRef
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState
-import pl.touk.nussknacker.ui.process.processingtype.{CombinedProcessingTypeData, ProcessingTypeData}
+import pl.touk.nussknacker.ui.process.processingtype.{
+ CombinedProcessingTypeData,
+ ModelClassLoaderProvider,
+ ProcessingTypeData
+}
class LocalProcessingTypeDataLoader(
modelData: Map[ProcessingType, (String, ModelData)],
@@ -16,7 +22,9 @@ class LocalProcessingTypeDataLoader(
override def loadProcessingTypeData(
getModelDependencies: ProcessingType => ModelDependencies,
- getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies
+ getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies,
+ modelClassLoaderProvider: ModelClassLoaderProvider,
+ dbRef: Option[DbRef],
): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]] = IO {
val processingTypes = modelData.map { case (processingType, (category, model)) =>
val deploymentManagerDependencies = getDeploymentManagerDependencies(processingType)
@@ -24,11 +32,12 @@ class LocalProcessingTypeDataLoader(
name = processingType,
modelData = model,
deploymentManagerProvider = deploymentManagerProvider,
+ schedulingForProcessingType = SchedulingForProcessingType.NotAvailable,
deploymentManagerDependencies = deploymentManagerDependencies,
engineSetupName = deploymentManagerProvider.defaultEngineSetupName,
deploymentConfig = ConfigFactory.empty(),
category = category,
- componentDefinitionExtractionMode = getModelDependencies(processingType).componentDefinitionExtractionMode
+ componentDefinitionExtractionMode = getModelDependencies(processingType).componentDefinitionExtractionMode,
)
processingType -> data
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypeDataLoader.scala
index b2b526fc02f..277dd6b96a1 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypeDataLoader.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypeDataLoader.scala
@@ -3,9 +3,11 @@ package pl.touk.nussknacker.ui.process.processingtype.loader
import cats.effect.IO
import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.{DeploymentManagerDependencies, ModelDependencies}
+import pl.touk.nussknacker.ui.db.DbRef
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState
import pl.touk.nussknacker.ui.process.processingtype.{
CombinedProcessingTypeData,
+ ModelClassLoaderProvider,
ProcessingTypeData,
ValueWithRestriction
}
@@ -15,6 +17,10 @@ trait ProcessingTypeDataLoader {
def loadProcessingTypeData(
getModelDependencies: ProcessingType => ModelDependencies,
getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies,
+ modelClassLoaderProvider: ModelClassLoaderProvider,
+ // should be always available, used by scheduling mechanism,
+ // but in tests sometimes we do not want to bootstrap the full environment with db
+ dbRef: Option[DbRef],
): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]]
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
index de70b5c0ba2..6f7e96d6e67 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/loader/ProcessingTypesConfigBasedProcessingTypeDataLoader.scala
@@ -7,6 +7,8 @@ import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import pl.touk.nussknacker.engine.util.loader.ScalaServiceLoader
import pl.touk.nussknacker.ui.configloader.{ProcessingTypeConfigs, ProcessingTypeConfigsLoader}
+import pl.touk.nussknacker.ui.db.DbRef
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype._
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader.toValueWithRestriction
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataState
@@ -18,16 +20,28 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(processingTypeConfigsLo
override def loadProcessingTypeData(
getModelDependencies: ProcessingType => ModelDependencies,
getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies,
+ modelClassLoaderProvider: ModelClassLoaderProvider,
+ dbRef: Option[DbRef],
): IO[ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData]] = {
processingTypeConfigsLoader
.loadProcessingTypeConfigs()
- .map(createProcessingTypeData(_, getModelDependencies, getDeploymentManagerDependencies))
+ .map(
+ createProcessingTypeData(
+ _,
+ getModelDependencies,
+ getDeploymentManagerDependencies,
+ modelClassLoaderProvider,
+ dbRef
+ )
+ )
}
private def createProcessingTypeData(
processingTypesConfig: ProcessingTypeConfigs,
getModelDependencies: ProcessingType => ModelDependencies,
- getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies
+ getDeploymentManagerDependencies: ProcessingType => DeploymentManagerDependencies,
+ modelClassLoaderProvider: ModelClassLoaderProvider,
+ dbRef: Option[DbRef],
): ProcessingTypeDataState[ProcessingTypeData, CombinedProcessingTypeData] = {
// This step with splitting DeploymentManagerProvider loading for all processing types
// and after that creating ProcessingTypeData is done because of the deduplication of deployments
@@ -41,21 +55,42 @@ class ProcessingTypesConfigBasedProcessingTypeDataLoader(processingTypeConfigsLo
)
(processingTypeConfig, provider, nameInputData)
}
+ modelClassLoaderProvider.validateReloadConsistency(providerWithNameInputData.map { case (processingType, data) =>
+ processingType -> ModelClassLoaderDependencies(
+ classpath = data._1.classPath,
+ workingDirectoryOpt = getModelDependencies(processingType).workingDirectoryOpt
+ )
+ })
+
val engineSetupNames =
ScenarioParametersDeterminer.determineEngineSetupNames(providerWithNameInputData.mapValuesNow(_._3))
val processingTypesData = providerWithNameInputData
.map { case (processingType, (processingTypeConfig, deploymentManagerProvider, _)) =>
logger.debug(s"Creating Processing Type: $processingType with config: $processingTypeConfig")
+ val schedulingForProcessingType =
+ if (processingTypeConfig.deploymentConfig.hasPath("scheduling") &&
+ processingTypeConfig.deploymentConfig.getBoolean("scheduling.enabled")) {
+ SchedulingForProcessingType.Available(
+ dbRef.getOrElse(
+ throw new RuntimeException(s"dbRef not present, but required for Dm with scheduling enabled")
+ ),
+ )
+ } else {
+ SchedulingForProcessingType.NotAvailable
+ }
+
val modelDependencies = getModelDependencies(processingType)
+ val modelClassLoader = modelClassLoaderProvider.forProcessingTypeUnsafe(processingType)
val processingTypeData = ProcessingTypeData.createProcessingTypeData(
processingType,
- ModelData(processingTypeConfig, modelDependencies),
+ ModelData(processingTypeConfig, modelDependencies, modelClassLoader),
deploymentManagerProvider,
+ schedulingForProcessingType,
getDeploymentManagerDependencies(processingType),
engineSetupNames(processingType),
processingTypeConfig.deploymentConfig,
processingTypeConfig.category,
- modelDependencies.componentDefinitionExtractionMode
+ modelDependencies.componentDefinitionExtractionMode,
)
processingType -> processingTypeData
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala
index c0ff82ef570..86450941a90 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala
@@ -5,8 +5,10 @@ import cats.data.OptionT
import cats.instances.future._
import com.typesafe.scalalogging.LazyLogging
import db.util.DBIOActionInstances._
+import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.{ProcessAction, ProcessActionState, ScenarioActionName}
import pl.touk.nussknacker.engine.api.process._
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.ui.db.DbRef
import pl.touk.nussknacker.ui.db.entity._
import pl.touk.nussknacker.ui.process.label.ScenarioLabel
@@ -22,19 +24,20 @@ object DBFetchingProcessRepository {
def create(
dbRef: DbRef,
- actionRepository: ScenarioActionRepository,
+ actionRepository: ScenarioActionReadOnlyRepository,
scenarioLabelsRepository: ScenarioLabelsRepository
)(implicit ec: ExecutionContext) =
new DBFetchingProcessRepository[DB](dbRef, actionRepository, scenarioLabelsRepository) with DbioRepository
def createFutureRepository(
dbRef: DbRef,
- actionRepository: ScenarioActionRepository,
+ actionReadOnlyRepository: ScenarioActionReadOnlyRepository,
scenarioLabelsRepository: ScenarioLabelsRepository
)(
implicit ec: ExecutionContext
) =
- new DBFetchingProcessRepository[Future](dbRef, actionRepository, scenarioLabelsRepository) with BasicRepository
+ new DBFetchingProcessRepository[Future](dbRef, actionReadOnlyRepository, scenarioLabelsRepository)
+ with BasicRepository
}
@@ -43,7 +46,7 @@ object DBFetchingProcessRepository {
// to the resource on the services side
abstract class DBFetchingProcessRepository[F[_]: Monad](
protected val dbRef: DbRef,
- actionRepository: ScenarioActionRepository,
+ actionRepository: ScenarioActionReadOnlyRepository,
scenarioLabelsRepository: ScenarioLabelsRepository,
)(protected implicit val ec: ExecutionContext)
extends FetchingProcessRepository[F]
@@ -51,6 +54,22 @@ abstract class DBFetchingProcessRepository[F[_]: Monad](
import api._
+ override def getCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ )(
+ implicit user: LoggedUser,
+ ): F[Option[(CanonicalProcess, ProcessVersion)]] = {
+ val result = for {
+ processId <- OptionT(fetchProcessId(processName))
+ details <- OptionT(fetchProcessDetailsForId[CanonicalProcess](processId, versionId))
+ } yield (
+ details.json,
+ details.toEngineProcessVersion,
+ )
+ result.value
+ }
+
override def fetchLatestProcessesDetails[PS: ScenarioShapeFetchStrategy](
query: ScenarioQuery
)(implicit loggedUser: LoggedUser, ec: ExecutionContext): F[List[ScenarioWithDetailsEntity[PS]]] = {
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/FetchingProcessRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/FetchingProcessRepository.scala
index db88f7dd0a3..c404f705218 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/FetchingProcessRepository.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/FetchingProcessRepository.scala
@@ -1,7 +1,9 @@
package pl.touk.nussknacker.ui.process.repository
import cats.Monad
-import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessIdWithName, ProcessName, ProcessingType, VersionId}
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.process._
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.ui.process.ScenarioQuery
import pl.touk.nussknacker.ui.security.api.LoggedUser
@@ -23,6 +25,13 @@ abstract class FetchingProcessRepository[F[_]: Monad] extends ProcessDBQueryRepo
query: ScenarioQuery
)(implicit loggedUser: LoggedUser, ec: ExecutionContext): F[List[ScenarioWithDetailsEntity[PS]]]
+ def getCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ )(
+ implicit user: LoggedUser,
+ ): F[Option[(CanonicalProcess, ProcessVersion)]]
+
def fetchProcessId(processName: ProcessName)(implicit ec: ExecutionContext): F[Option[ProcessId]]
def fetchProcessName(processId: ProcessId)(implicit ec: ExecutionContext): F[Option[ProcessName]]
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/PeriodicProcessesRepository.scala
similarity index 65%
rename from engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesRepository.scala
rename to designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/PeriodicProcessesRepository.scala
index 24cf6c47dac..adbc64164ac 100644
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/db/PeriodicProcessesRepository.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/PeriodicProcessesRepository.scala
@@ -1,20 +1,22 @@
-package pl.touk.nussknacker.engine.management.periodic.db
+package pl.touk.nussknacker.ui.process.repository
-import cats.Monad
import com.github.tminglei.slickpg.ExPostgresProfile
import com.typesafe.scalalogging.LazyLogging
+import db.util.DBIOActionInstances
+import db.util.DBIOActionInstances.DB
import io.circe.parser.decode
+import io.circe.syntax.EncoderOps
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
-import pl.touk.nussknacker.engine.api.process.ProcessName
-import pl.touk.nussknacker.engine.management.periodic._
-import pl.touk.nussknacker.engine.management.periodic.db.PeriodicProcessesRepository.createPeriodicProcessWithoutJson
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.{
- WithCanonicalProcess,
- WithoutCanonicalProcess
-}
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.model._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.DeploymentWithRuntimeParams
+import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.ui.db.entity._
+import pl.touk.nussknacker.ui.process.periodic.ScheduleProperty
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model._
+import pl.touk.nussknacker.ui.process.repository.PeriodicProcessesRepository.createPeriodicProcess
+import pl.touk.nussknacker.ui.security.api.NussknackerInternalUser
import slick.dbio.{DBIOAction, Effect, NoStream}
import slick.jdbc.PostgresProfile.api._
import slick.jdbc.{JdbcBackend, JdbcProfile}
@@ -26,10 +28,10 @@ import scala.language.higherKinds
object PeriodicProcessesRepository {
def createPeriodicProcessDeployment(
- processEntity: PeriodicProcessEntityWithJson,
+ processEntity: PeriodicProcessEntity,
processDeploymentEntity: PeriodicProcessDeploymentEntity
- ): PeriodicProcessDeployment[WithCanonicalProcess] = {
- val process = createPeriodicProcessWithJson(processEntity)
+ ): PeriodicProcessDeployment = {
+ val process = createPeriodicProcess(processEntity)
PeriodicProcessDeployment(
processDeploymentEntity.id,
process,
@@ -52,36 +54,17 @@ object PeriodicProcessesRepository {
)
}
- def createPeriodicProcessWithJson(
- processEntity: PeriodicProcessEntityWithJson
- ): PeriodicProcess[WithCanonicalProcess] = {
- val processVersion = createProcessVersion(processEntity)
- val scheduleProperty = prepareScheduleProperty(processEntity)
- PeriodicProcess(
- processEntity.id,
- model.DeploymentWithJarData.WithCanonicalProcess(
- processVersion = processVersion,
- inputConfigDuringExecutionJson = processEntity.inputConfigDuringExecutionJson,
- jarFileName = processEntity.jarFileName,
- process = processEntity.processJson,
- ),
- scheduleProperty,
- processEntity.active,
- processEntity.createdAt,
- processEntity.processActionId
- )
- }
-
- def createPeriodicProcessWithoutJson(
+ def createPeriodicProcess(
processEntity: PeriodicProcessEntity
- ): PeriodicProcess[WithoutCanonicalProcess] = {
- val processVersion = createProcessVersion(processEntity)
+ ): PeriodicProcess = {
val scheduleProperty = prepareScheduleProperty(processEntity)
PeriodicProcess(
processEntity.id,
- model.DeploymentWithJarData.WithoutCanonicalProcess(
- processVersion = processVersion,
- jarFileName = processEntity.jarFileName,
+ DeploymentWithRuntimeParams(
+ processId = processEntity.processId,
+ processName = processEntity.processName,
+ versionId = processEntity.processVersionId,
+ runtimeParams = processEntity.runtimeParams,
),
scheduleProperty,
processEntity.active,
@@ -96,18 +79,12 @@ object PeriodicProcessesRepository {
scheduleProperty
}
- private def createProcessVersion(processEntity: PeriodicProcessEntity): ProcessVersion = {
- ProcessVersion.empty.copy(versionId = processEntity.processVersionId, processName = processEntity.processName)
- }
-
}
trait PeriodicProcessesRepository {
type Action[_]
- implicit def monad: Monad[Action]
-
implicit class RunOps[T](action: Action[T]) {
def run: Future[T] = PeriodicProcessesRepository.this.run(action)
}
@@ -122,42 +99,42 @@ trait PeriodicProcessesRepository {
): Action[SchedulesState]
def create(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
+ deploymentWithRuntimeParams: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ canonicalProcess: CanonicalProcess,
scheduleProperty: ScheduleProperty,
- processActionId: ProcessActionId
- ): Action[PeriodicProcess[WithCanonicalProcess]]
+ processActionId: ProcessActionId,
+ ): Action[PeriodicProcess]
def getLatestDeploymentsForActiveSchedules(
processName: ProcessName,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[SchedulesState]
def getLatestDeploymentsForActiveSchedules(
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[Map[ProcessName, SchedulesState]]
def getLatestDeploymentsForLatestInactiveSchedules(
processName: ProcessName,
inactiveProcessesMaxCount: Int,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[SchedulesState]
def getLatestDeploymentsForLatestInactiveSchedules(
inactiveProcessesMaxCount: Int,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[Map[ProcessName, SchedulesState]]
- def findToBeDeployed: Action[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]]
+ def findToBeDeployed: Action[Seq[PeriodicProcessDeployment]]
- def findToBeRetried: Action[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]]
+ def findToBeRetried: Action[Seq[PeriodicProcessDeployment]]
def findActiveSchedulesForProcessesHavingDeploymentWithMatchingStatus(
- expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus]
+ expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus],
): Action[SchedulesState]
- def findProcessData(id: PeriodicProcessDeploymentId): Action[PeriodicProcessDeployment[WithCanonicalProcess]]
-
- def findProcessData(processName: ProcessName): Action[Seq[PeriodicProcess[WithCanonicalProcess]]]
+ def findProcessData(id: PeriodicProcessDeploymentId): Action[PeriodicProcessDeployment]
def markDeployed(id: PeriodicProcessDeploymentId): Action[Unit]
@@ -177,35 +154,43 @@ trait PeriodicProcessesRepository {
scheduleName: ScheduleName,
runAt: LocalDateTime,
deployMaxRetries: Int
- ): Action[PeriodicProcessDeployment[WithCanonicalProcess]]
+ ): Action[PeriodicProcessDeployment]
+
+ def fetchInputConfigDuringExecutionJson(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Action[Option[String]]
+
+ def fetchCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Future[Option[(CanonicalProcess, ProcessVersion)]]
}
class SlickPeriodicProcessesRepository(
+ processingType: String,
db: JdbcBackend.DatabaseDef,
override val profile: JdbcProfile,
clock: Clock,
- processingType: String
+ fetchingProcessRepository: FetchingProcessRepository[Future],
)(implicit ec: ExecutionContext)
extends PeriodicProcessesRepository
with PeriodicProcessesTableFactory
with PeriodicProcessDeploymentsTableFactory
with LazyLogging {
- import io.circe.syntax._
import pl.touk.nussknacker.engine.util.Implicits._
type Action[T] = DBIOActionInstances.DB[T]
- override implicit def monad: Monad[Action] = DBIOActionInstances.dbMonad
-
override def run[T](action: DBIOAction[T, NoStream, Effect.All]): Future[T] = db.run(action.transactionally)
override def getSchedulesState(
scenarioName: ProcessName,
afterOpt: Option[LocalDateTime],
): Action[SchedulesState] = {
- PeriodicProcessesWithoutJson
+ PeriodicProcessesWithoutInputConfig
.filter(_.processName === scenarioName)
.join(PeriodicProcessDeployments)
.on(_.id === _.periodicProcessId)
@@ -215,63 +200,72 @@ class SlickPeriodicProcessesRepository(
}
override def create(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
+ deploymentWithRuntimeParams: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ canonicalProcess: CanonicalProcess,
scheduleProperty: ScheduleProperty,
- processActionId: ProcessActionId
- ): Action[PeriodicProcess[WithCanonicalProcess]] = {
- val processEntity = PeriodicProcessEntityWithJson(
+ processActionId: ProcessActionId,
+ ): Action[PeriodicProcess] = {
+ val processEntity = PeriodicProcessEntityWithInputConfigJson(
id = PeriodicProcessId(-1),
- processName = deploymentWithJarData.processVersion.processName,
- processVersionId = deploymentWithJarData.processVersion.versionId,
+ processId = deploymentWithRuntimeParams.processId,
+ processName = deploymentWithRuntimeParams.processName,
+ processVersionId = deploymentWithRuntimeParams.versionId,
processingType = processingType,
- processJson = deploymentWithJarData.process,
- inputConfigDuringExecutionJson = deploymentWithJarData.inputConfigDuringExecutionJson,
- jarFileName = deploymentWithJarData.jarFileName,
+ runtimeParams = deploymentWithRuntimeParams.runtimeParams,
scheduleProperty = scheduleProperty.asJson.noSpaces,
active = true,
createdAt = now(),
- Some(processActionId)
+ Some(processActionId),
+ inputConfigDuringExecutionJson = inputConfigDuringExecutionJson,
)
- ((PeriodicProcessesWithJson returning PeriodicProcessesWithJson into ((_, id) => id)) += processEntity)
- .map(PeriodicProcessesRepository.createPeriodicProcessWithJson)
+ ((PeriodicProcessesWithInputConfig returning PeriodicProcessesWithInputConfig into ((_, id) =>
+ id
+ )) += processEntity)
+ .map(PeriodicProcessesRepository.createPeriodicProcess)
}
private def now(): LocalDateTime = LocalDateTime.now(clock)
- override def findToBeDeployed: Action[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] =
- activePeriodicProcessWithDeploymentQuery
- .filter { case (_, d) =>
- d.runAt <= now() &&
- d.status === (PeriodicProcessDeploymentStatus.Scheduled: PeriodicProcessDeploymentStatus)
- }
- .result
- .map(_.map((PeriodicProcessesRepository.createPeriodicProcessDeployment _).tupled))
+ override def findToBeDeployed: Action[Seq[PeriodicProcessDeployment]] =
+ findProcesses(
+ activePeriodicProcessWithDeploymentQuery(processingType)
+ .filter { case (_, d) =>
+ d.runAt <= now() &&
+ d.status === (PeriodicProcessDeploymentStatus.Scheduled: PeriodicProcessDeploymentStatus)
+ }
+ )
- override def findToBeRetried: Action[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] =
- activePeriodicProcessWithDeploymentQuery
- .filter { case (_, d) =>
- d.nextRetryAt <= now() &&
- d.status === (PeriodicProcessDeploymentStatus.RetryingDeploy: PeriodicProcessDeploymentStatus)
- }
- .result
- .map(_.map((PeriodicProcessesRepository.createPeriodicProcessDeployment _).tupled))
+ override def findToBeRetried: Action[Seq[PeriodicProcessDeployment]] =
+ findProcesses(
+ activePeriodicProcessWithDeploymentQuery(processingType)
+ .filter { case (_, d) =>
+ d.nextRetryAt <= now() &&
+ d.status === (PeriodicProcessDeploymentStatus.RetryingDeploy: PeriodicProcessDeploymentStatus)
+ }
+ )
- override def findProcessData(
- id: PeriodicProcessDeploymentId
- ): Action[PeriodicProcessDeployment[WithCanonicalProcess]] = {
- (PeriodicProcessesWithJson join PeriodicProcessDeployments on (_.id === _.periodicProcessId))
- .filter { case (_, deployment) => deployment.id === id }
- .result
- .head
- .map((PeriodicProcessesRepository.createPeriodicProcessDeployment _).tupled)
+ private def findProcesses(
+ query: Query[
+ (PeriodicProcessesWithoutInputConfigJsonTable, PeriodicProcessDeploymentsTable),
+ (PeriodicProcessEntityWithoutInputConfigJson, PeriodicProcessDeploymentEntity),
+ Seq
+ ]
+ ) = {
+ query.result
+ .map(_.map { case (periodicProcess, periodicDeployment) =>
+ PeriodicProcessesRepository.createPeriodicProcessDeployment(
+ periodicProcess,
+ periodicDeployment,
+ )
+ })
}
- override def findProcessData(processName: ProcessName): Action[Seq[PeriodicProcess[WithCanonicalProcess]]] = {
- PeriodicProcessesWithJson
- .filter(p => p.active === true && p.processName === processName)
- .result
- .map(_.map(PeriodicProcessesRepository.createPeriodicProcessWithJson))
- }
+ override def findProcessData(id: PeriodicProcessDeploymentId): Action[PeriodicProcessDeployment] =
+ findProcesses(
+ (PeriodicProcessesWithoutInputConfig join PeriodicProcessDeployments on (_.id === _.periodicProcessId))
+ .filter { case (_, deployment) => deployment.id === id }
+ ).map(_.head)
override def markDeployed(id: PeriodicProcessDeploymentId): Action[Unit] = {
val q = for {
@@ -314,9 +308,9 @@ class SlickPeriodicProcessesRepository(
}
override def findActiveSchedulesForProcessesHavingDeploymentWithMatchingStatus(
- expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus]
+ expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus],
): Action[SchedulesState] = {
- val processesHavingDeploymentsWithMatchingStatus = PeriodicProcessesWithoutJson.filter(p =>
+ val processesHavingDeploymentsWithMatchingStatus = PeriodicProcessesWithoutInputConfig.filter(p =>
p.active &&
PeriodicProcessDeployments
.filter(d => d.periodicProcessId === p.id && d.status.inSet(expectedDeploymentStatuses))
@@ -324,7 +318,7 @@ class SlickPeriodicProcessesRepository(
)
getLatestDeploymentsForEachSchedule(
processesHavingDeploymentsWithMatchingStatus,
- deploymentsPerScheduleMaxCount = 1
+ deploymentsPerScheduleMaxCount = 1,
).map(schedulesForProcessNames =>
SchedulesState(
schedulesForProcessNames.values.map(_.schedules).foldLeft(Map.empty[ScheduleId, ScheduleData])(_ ++ _)
@@ -334,26 +328,27 @@ class SlickPeriodicProcessesRepository(
override def getLatestDeploymentsForActiveSchedules(
processName: ProcessName,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[SchedulesState] = {
- val activeProcessesQuery = PeriodicProcessesWithoutJson.filter(p => p.processName === processName && p.active)
+ val activeProcessesQuery =
+ PeriodicProcessesWithoutInputConfig.filter(p => p.processName === processName && p.active)
getLatestDeploymentsForEachSchedule(activeProcessesQuery, deploymentsPerScheduleMaxCount)
.map(_.getOrElse(processName, SchedulesState(Map.empty)))
}
override def getLatestDeploymentsForActiveSchedules(
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[Map[ProcessName, SchedulesState]] = {
- val activeProcessesQuery = PeriodicProcessesWithoutJson.filter(_.active)
+ val activeProcessesQuery = PeriodicProcessesWithoutInputConfig.filter(_.active)
getLatestDeploymentsForEachSchedule(activeProcessesQuery, deploymentsPerScheduleMaxCount)
}
override def getLatestDeploymentsForLatestInactiveSchedules(
processName: ProcessName,
inactiveProcessesMaxCount: Int,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[SchedulesState] = {
- val filteredProcessesQuery = PeriodicProcessesWithoutJson
+ val filteredProcessesQuery = PeriodicProcessesWithoutInputConfig
.filter(p => p.processName === processName && !p.active)
.sortBy(_.createdAt.desc)
.take(inactiveProcessesMaxCount)
@@ -363,9 +358,9 @@ class SlickPeriodicProcessesRepository(
override def getLatestDeploymentsForLatestInactiveSchedules(
inactiveProcessesMaxCount: Int,
- deploymentsPerScheduleMaxCount: Int
+ deploymentsPerScheduleMaxCount: Int,
): Action[Map[ProcessName, SchedulesState]] = {
- val filteredProcessesQuery = PeriodicProcessesWithoutJson
+ val filteredProcessesQuery = PeriodicProcessesWithoutInputConfig
.filter(!_.active)
.sortBy(_.createdAt.desc)
.take(inactiveProcessesMaxCount)
@@ -373,8 +368,12 @@ class SlickPeriodicProcessesRepository(
}
private def getLatestDeploymentsForEachSchedule(
- periodicProcessesQuery: Query[PeriodicProcessWithoutJson, PeriodicProcessEntityWithoutJson, Seq],
- deploymentsPerScheduleMaxCount: Int
+ periodicProcessesQuery: Query[
+ PeriodicProcessesWithoutInputConfigJsonTable,
+ PeriodicProcessEntityWithoutInputConfigJson,
+ Seq
+ ],
+ deploymentsPerScheduleMaxCount: Int,
): Action[Map[ProcessName, SchedulesState]] = {
val filteredPeriodicProcessQuery = periodicProcessesQuery.filter(p => p.processingType === processingType)
val latestDeploymentsForSchedules = profile match {
@@ -387,9 +386,13 @@ class SlickPeriodicProcessesRepository(
}
private def getLatestDeploymentsForEachSchedulePostgres(
- periodicProcessesQuery: Query[PeriodicProcessWithoutJson, PeriodicProcessEntityWithoutJson, Seq],
+ periodicProcessesQuery: Query[
+ PeriodicProcessesWithoutInputConfigJsonTable,
+ PeriodicProcessEntityWithoutInputConfigJson,
+ Seq
+ ],
deploymentsPerScheduleMaxCount: Int
- ): Action[Seq[(PeriodicProcessEntityWithoutJson, PeriodicProcessDeploymentEntity)]] = {
+ ): Action[Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]] = {
// To effectively limit deployments to given count for each schedule in one query, we use window functions in slick
import ExPostgresProfile.api._
import com.github.tminglei.slickpg.window.PgWindowFuncSupport.WindowFunctions._
@@ -421,9 +424,13 @@ class SlickPeriodicProcessesRepository(
// If we decided to support more databases, we should consider some optimization like extracting periodic_schedule table
// with foreign key to periodic_process and with schedule_name column - it would reduce number of queries
private def getLatestDeploymentsForEachScheduleJdbcGeneric(
- periodicProcessesQuery: Query[PeriodicProcessWithoutJson, PeriodicProcessEntityWithoutJson, Seq],
+ periodicProcessesQuery: Query[
+ PeriodicProcessesWithoutInputConfigJsonTable,
+ PeriodicProcessEntityWithoutInputConfigJson,
+ Seq
+ ],
deploymentsPerScheduleMaxCount: Int
- ): Action[Seq[(PeriodicProcessEntityWithoutJson, PeriodicProcessDeploymentEntity)]] = {
+ ): Action[Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]] = {
// It is debug instead of warn to not bloast logs when e.g. for some reasons is used hsql under the hood
logger.debug(
"WARN: Using not optimized version of getLatestDeploymentsForEachSchedule that not uses window functions"
@@ -463,7 +470,7 @@ class SlickPeriodicProcessesRepository(
scheduleName: ScheduleName,
runAt: LocalDateTime,
deployMaxRetries: Int
- ): Action[PeriodicProcessDeployment[WithCanonicalProcess]] = {
+ ): Action[PeriodicProcessDeployment] = {
val deploymentEntity = PeriodicProcessDeploymentEntity(
id = PeriodicProcessDeploymentId(-1),
periodicProcessId = id,
@@ -483,19 +490,26 @@ class SlickPeriodicProcessesRepository(
override def markInactive(processId: PeriodicProcessId): Action[Unit] = {
val q = for {
- p <- PeriodicProcessesWithoutJson if p.id === processId
+ p <- PeriodicProcessesWithoutInputConfig if p.id === processId
} yield p.active
val update = q.update(false)
update.map(_ => ())
}
- private def activePeriodicProcessWithDeploymentQuery = {
- (PeriodicProcessesWithJson.filter(p => p.active === true && p.processingType === processingType)
+ def fetchInputConfigDuringExecutionJson(processName: ProcessName, versionId: VersionId): Action[Option[String]] =
+ PeriodicProcessesWithInputConfig
+ .filter(p => p.processName === processName && p.processVersionId === versionId)
+ .map(_.inputConfigDuringExecutionJson)
+ .result
+ .headOption
+
+ private def activePeriodicProcessWithDeploymentQuery(processingType: String) = {
+ (PeriodicProcessesWithoutInputConfig.filter(p => p.active === true && p.processingType === processingType)
join PeriodicProcessDeployments on (_.id === _.periodicProcessId))
}
private def toSchedulesState(
- list: Seq[(PeriodicProcessEntityWithoutJson, PeriodicProcessDeploymentEntity)]
+ list: Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]
): Map[ProcessName, SchedulesState] = {
list
.groupBy(_._1.processName)
@@ -503,46 +517,42 @@ class SlickPeriodicProcessesRepository(
}
private def toSchedulesStateForSinglePeriodicProcess(
- list: Seq[(PeriodicProcessEntityWithoutJson, PeriodicProcessDeploymentEntity)]
+ list: Seq[(PeriodicProcessEntity, PeriodicProcessDeploymentEntity)]
): SchedulesState = {
SchedulesState(
list
.map { case (process, deployment) =>
val scheduleId = ScheduleId(process.id, ScheduleName(deployment.scheduleName))
val scheduleData = (scheduleId, process)
- val scheduleDeployment = ScheduleDeploymentData(deployment)
+ val scheduleDeployment = scheduleDeploymentData(deployment)
(scheduleData, scheduleDeployment)
}
.toList
.toGroupedMap
.toList
.map { case ((scheduleId, process), deployments) =>
- scheduleId -> ScheduleData(createPeriodicProcessWithoutJson(process), deployments)
+ scheduleId -> ScheduleData(createPeriodicProcess(process), deployments)
}
.toMap
)
}
-}
-
-//Copied from designer/server.
-object DBIOActionInstances {
-
- type DB[A] = DBIOAction[A, NoStream, Effect.All]
-
- implicit def dbMonad(implicit ec: ExecutionContext): Monad[DB] = new Monad[DB] {
-
- override def pure[A](x: A) = DBIO.successful(x)
-
- override def flatMap[A, B](fa: DB[A])(f: (A) => DB[B]) = fa.flatMap(f)
-
- // this is *not* tail recursive
- override def tailRecM[A, B](a: A)(f: (A) => DB[Either[A, B]]): DB[B] =
- f(a).flatMap {
- case Right(r) => pure(r)
- case Left(l) => tailRecM(l)(f)
- }
-
+ private def scheduleDeploymentData(deployment: PeriodicProcessDeploymentEntity): ScheduleDeploymentData = {
+ ScheduleDeploymentData(
+ deployment.id,
+ deployment.createdAt,
+ deployment.runAt,
+ deployment.deployedAt,
+ deployment.retriesLeft,
+ deployment.nextRetryAt,
+ PeriodicProcessesRepository.createPeriodicDeploymentState(deployment)
+ )
}
+ override def fetchCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Future[Option[(CanonicalProcess, ProcessVersion)]] =
+ fetchingProcessRepository.getCanonicalProcessWithVersion(processName, versionId)(NussknackerInternalUser.instance)
+
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala
index 68aecc26a42..bf048ea9bf4 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ScenarioActionRepository.scala
@@ -32,7 +32,7 @@ import scala.concurrent.ExecutionContext
// 2. At the moment, the old ScenarioActionRepository
// - handles those activities, which underlying operations may be long and may be in progress
// 3. Eventually, the new ScenarioActivityRepository should be aware of the state of the underlying operation, and should replace this repository
-trait ScenarioActionRepository extends LockableTable {
+trait ScenarioActionRepository extends ScenarioActionReadOnlyRepository with LockableTable {
def addInstantAction(
processId: ProcessId,
@@ -80,6 +80,10 @@ trait ScenarioActionRepository extends LockableTable {
def deleteInProgressActions(): DB[Unit]
+}
+
+trait ScenarioActionReadOnlyRepository extends LockableTable {
+
def getInProgressActionNames(processId: ProcessId): DB[Set[ScenarioActionName]]
def getInProgressActionNames(
@@ -110,10 +114,11 @@ trait ScenarioActionRepository extends LockableTable {
}
class DbScenarioActionRepository private (
- protected val dbRef: DbRef,
+ override protected val dbRef: DbRef,
buildInfos: ProcessingTypeDataProvider[Map[String, String], _]
)(override implicit val executionContext: ExecutionContext)
- extends DbioRepository
+ extends DbScenarioActionReadOnlyRepository(dbRef)
+ with DbioRepository
with NuTables
with DbLockableTable
with ScenarioActionRepository
@@ -340,6 +345,42 @@ class DbScenarioActionRepository private (
} yield updateCount == 1
}
+ override def deleteInProgressActions(): DB[Unit] = {
+ run(scenarioActivityTable.filter(_.state === ProcessActionState.InProgress).delete.map(_ => ()))
+ }
+
+ private def activityId(actionId: ProcessActionId) =
+ ScenarioActivityId(actionId.value)
+
+}
+
+object DbScenarioActionRepository {
+
+ def create(dbRef: DbRef, buildInfos: ProcessingTypeDataProvider[Map[String, String], _])(
+ implicit executionContext: ExecutionContext,
+ ): ScenarioActionRepository = {
+ new ScenarioActionRepositoryAuditLogDecorator(
+ new DbScenarioActionRepository(dbRef, buildInfos)
+ )
+ }
+
+}
+
+class DbScenarioActionReadOnlyRepository(
+ protected val dbRef: DbRef,
+)(override implicit val executionContext: ExecutionContext)
+ extends DbioRepository
+ with NuTables
+ with DbLockableTable
+ with ScenarioActionReadOnlyRepository
+ with LazyLogging {
+
+ import profile.api._
+
+ override type ENTITY = ScenarioActivityEntityFactory#ScenarioActivityEntity
+
+ override protected def table: TableQuery[ScenarioActivityEntityFactory#ScenarioActivityEntity] = scenarioActivityTable
+
override def getInProgressActionNames(processId: ProcessId): DB[Set[ScenarioActionName]] = {
val query = scenarioActivityTable
.filter(action => action.scenarioId === processId && action.state === ProcessActionState.InProgress)
@@ -391,10 +432,6 @@ class DbScenarioActionRepository private (
)
}
- override def deleteInProgressActions(): DB[Unit] = {
- run(scenarioActivityTable.filter(_.state === ProcessActionState.InProgress).delete.map(_ => ()))
- }
-
override def getLastActionPerProcess(
actionState: Set[ProcessActionState],
actionNamesOpt: Option[Set[ScenarioActionName]]
@@ -456,7 +493,7 @@ class DbScenarioActionRepository private (
)
}
- private def toFinishedProcessAction(
+ protected def toFinishedProcessAction(
activityEntity: ScenarioActivityEntityData
): Option[ProcessAction] = actionName(activityEntity.activityType).flatMap { actionName =>
(for {
@@ -486,10 +523,7 @@ class DbScenarioActionRepository private (
}.toOption
}
- private def activityId(actionId: ProcessActionId) =
- ScenarioActivityId(actionId.value)
-
- private def actionName(activityType: ScenarioActivityType): Option[ScenarioActionName] = {
+ protected def actionName(activityType: ScenarioActivityType): Option[ScenarioActionName] = {
activityType match {
case ScenarioActivityType.ScenarioCreated =>
None
@@ -553,14 +587,12 @@ class DbScenarioActionRepository private (
}
-object DbScenarioActionRepository {
+object DbScenarioActionReadOnlyRepository {
- def create(dbRef: DbRef, buildInfos: ProcessingTypeDataProvider[Map[String, String], _])(
+ def create(dbRef: DbRef)(
implicit executionContext: ExecutionContext,
- ): ScenarioActionRepository = {
- new ScenarioActionRepositoryAuditLogDecorator(
- new DbScenarioActionRepository(dbRef, buildInfos)
- )
+ ): ScenarioActionReadOnlyRepository = {
+ new DbScenarioActionReadOnlyRepository(dbRef)
}
}
diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
index ec0da71149b..1b3ec275c25 100644
--- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
+++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala
@@ -66,6 +66,8 @@ import pl.touk.nussknacker.ui.process.newdeployment.synchronize.{
}
import pl.touk.nussknacker.ui.process.newdeployment.{DeploymentRepository, DeploymentService}
import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
+import pl.touk.nussknacker.ui.process.processingtype.{ModelClassLoaderProvider, ProcessingTypeData}
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader
import pl.touk.nussknacker.ui.process.processingtype.provider.ReloadableProcessingTypeDataProvider
import pl.touk.nussknacker.ui.process.repository._
@@ -109,7 +111,8 @@ class AkkaHttpBasedRouteProvider(
sttpBackend: SttpBackend[Future, Any],
processingTypeDataLoader: ProcessingTypeDataLoader,
feStatisticsRepository: FEStatisticsRepository[Future],
- designerClock: Clock
+ designerClock: Clock,
+ modelClassLoaderProvider: ModelClassLoaderProvider
)(
implicit system: ActorSystem,
materializer: Materializer,
@@ -140,7 +143,8 @@ class AkkaHttpBasedRouteProvider(
dbioRunner,
sttpBackend,
featureTogglesConfig,
- globalNotificationRepository
+ globalNotificationRepository,
+ modelClassLoaderProvider
)
deploymentsStatusesSynchronizer = new DeploymentsStatusesSynchronizer(
@@ -716,7 +720,8 @@ class AkkaHttpBasedRouteProvider(
dbioActionRunner: DBIOActionRunner,
sttpBackend: SttpBackend[Future, Any],
featureTogglesConfig: FeatureTogglesConfig,
- globalNotificationRepository: InMemoryTimeseriesRepository[Notification]
+ globalNotificationRepository: InMemoryTimeseriesRepository[Notification],
+ modelClassLoaderProvider: ModelClassLoaderProvider
): Resource[IO, ReloadableProcessingTypeDataProvider] = {
Resource
.make(
@@ -735,6 +740,8 @@ class AkkaHttpBasedRouteProvider(
sttpBackend,
_
),
+ modelClassLoaderProvider,
+ Some(dbRef),
)
val loadAndNotifyIO = laodProcessingTypeDataIO
.map { state =>
diff --git a/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala b/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala
index 38aaa699fb2..6fc518f7cd2 100644
--- a/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala
+++ b/designer/server/src/test/scala/db/migration/V1_057__MigrateActionsAndCommentsToScenarioActivities.scala
@@ -8,7 +8,6 @@ import db.migration.V1_057__MigrateActionsAndCommentsToScenarioActivitiesDefinit
import io.circe.syntax.EncoderOps
import org.scalatest.freespec.AnyFreeSpecLike
import org.scalatest.matchers.should.Matchers
-import pl.touk.nussknacker.engine.api.deployment.ScenarioComment.WithContent
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.api.{MetaData, ProcessAdditionalFields, RequestResponseMetaData}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/db/DbTesting.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/db/DbTesting.scala
index fbc1af4f450..ce16e649949 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/db/DbTesting.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/db/DbTesting.scala
@@ -93,6 +93,8 @@ trait DbTesting extends BeforeAndAfterEach with BeforeAndAfterAll {
session.prepareStatement("""delete from "environments"""").execute()
session.prepareStatement("""delete from "processes"""").execute()
session.prepareStatement("""delete from "fingerprints"""").execute()
+ session.prepareStatement("""delete from "periodic_scenarios"""").execute()
+ session.prepareStatement("""delete from "periodic_scenario_deployments"""").execute()
}
}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
index c2674f278f4..c662bc0c32a 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/NuResourcesTest.scala
@@ -46,6 +46,7 @@ import pl.touk.nussknacker.ui.process._
import pl.touk.nussknacker.ui.process.deployment._
import pl.touk.nussknacker.ui.process.fragment.DefaultFragmentRepository
import pl.touk.nussknacker.ui.process.marshall.CanonicalProcessConverter
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype._
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypesConfigBasedProcessingTypeDataLoader
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider
@@ -124,10 +125,18 @@ trait NuResourcesTest
protected val processingTypeConfig: ProcessingTypeConfig =
ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig)
- protected val deploymentManagerProvider: DeploymentManagerProvider =
- new MockManagerProvider(deploymentManager)
+ protected val deploymentManagerProvider: DeploymentManagerProvider = new MockManagerProvider(deploymentManager)
- private val modelData = ModelData(processingTypeConfig, modelDependencies)
+ private val modelClassLoaderProvider = ModelClassLoaderProvider(
+ Map(Streaming.stringify -> ModelClassLoaderDependencies(processingTypeConfig.classPath, None))
+ )
+
+ private val modelData =
+ ModelData(
+ processingTypeConfig,
+ modelDependencies,
+ modelClassLoaderProvider.forProcessingTypeUnsafe(Streaming.stringify)
+ )
protected val testProcessingTypeDataProvider: ProcessingTypeDataProvider[ProcessingTypeData, _] =
mapProcessingTypeDataProvider(
@@ -135,6 +144,7 @@ trait NuResourcesTest
Streaming.stringify,
modelData,
deploymentManagerProvider,
+ SchedulingForProcessingType.NotAvailable,
deploymentManagerDependencies,
deploymentManagerProvider.defaultEngineSetupName,
processingTypeConfig.deploymentConfig,
@@ -151,7 +161,9 @@ trait NuResourcesTest
new ProcessingTypesConfigBasedProcessingTypeDataLoader(() => IO.pure(designerConfig.processingTypeConfigs))
.loadProcessingTypeData(
_ => modelDependencies,
- _ => deploymentManagerDependencies
+ _ => deploymentManagerDependencies,
+ modelClassLoaderProvider,
+ Some(testDbRef),
)
.unsafeRunSync()
)
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
index e7ff26f3da9..7f2a7b58edf 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala
@@ -1,17 +1,12 @@
package pl.touk.nussknacker.test.mock
-import _root_.sttp.client3.testing.SttpBackendStub
import akka.actor.ActorSystem
import cats.data.Validated.valid
import cats.data.ValidatedNel
import com.google.common.collect.LinkedHashMultimap
import com.typesafe.config.Config
+import sttp.client3.testing.SttpBackendStub
import pl.touk.nussknacker.engine._
-import pl.touk.nussknacker.engine.api.definition.{
- NotBlankParameterValidator,
- NotNullParameterValidator,
- StringParameterEditor
-}
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.ProcessName
@@ -19,6 +14,7 @@ import pl.touk.nussknacker.engine.api.{ProcessVersion, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment._
import pl.touk.nussknacker.engine.management.{FlinkDeploymentManager, FlinkStreamingDeploymentManagerProvider}
+import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import pl.touk.nussknacker.test.config.ConfigWithScalaVersion
import pl.touk.nussknacker.test.utils.domain.TestFactory
import shapeless.syntax.typeable.typeableOps
@@ -47,7 +43,8 @@ class MockDeploymentManager(
) extends FlinkDeploymentManager(
ModelData(
ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig),
- TestFactory.modelDependencies
+ TestFactory.modelDependencies,
+ ModelClassLoader(ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig).classPath, None)
),
DeploymentManagerDependencies(
deployedScenariosProvider,
@@ -267,6 +264,7 @@ class MockDeploymentManager(
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
}
class MockManagerProvider(deploymentManager: DeploymentManager = new MockDeploymentManager())
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala
index b027c7317cf..cd11115ea55 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala
@@ -1,6 +1,8 @@
package pl.touk.nussknacker.test.mock
+import cats.data.OptionT
import cats.instances.future._
+import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.ScenarioActionName
import pl.touk.nussknacker.engine.api.graph.ScenarioGraph
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessIdWithName, ProcessName, VersionId}
@@ -43,6 +45,19 @@ class MockFetchingProcessRepository private (
extends FetchingProcessRepository[Future]
with BasicRepository {
+ override def getCanonicalProcessWithVersion(processName: ProcessName, versionId: VersionId)(
+ implicit user: LoggedUser
+ ): Future[Option[(CanonicalProcess, ProcessVersion)]] = {
+ val result = for {
+ processId <- OptionT(fetchProcessId(processName))
+ details <- OptionT(fetchProcessDetailsForId[CanonicalProcess](processId, versionId))
+ } yield (
+ details.json,
+ details.toEngineProcessVersion,
+ )
+ result.value
+ }
+
override def fetchLatestProcessesDetails[PS: ScenarioShapeFetchStrategy](
q: ScenarioQuery
)(implicit loggedUser: LoggedUser, ec: ExecutionContext): Future[List[ScenarioWithDetailsEntity[PS]]] =
@@ -91,8 +106,8 @@ class MockFetchingProcessRepository private (
val shapeStrategy: ScenarioShapeFetchStrategy[PS] = implicitly[ScenarioShapeFetchStrategy[PS]]
shapeStrategy match {
- case NotFetch => process.copy(json = ().asInstanceOf[PS])
- case FetchCanonical => process.asInstanceOf[ScenarioWithDetailsEntity[PS]]
+ case NotFetch => process.copy(json = ())
+ case FetchCanonical => process
case FetchScenarioGraph =>
process
.mapScenario(canonical => CanonicalProcessConverter.toScenarioGraph(canonical))
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala
index 1f90931f0a6..d9f0245c76e 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala
@@ -2,6 +2,7 @@ package pl.touk.nussknacker.ui.api
import com.typesafe.config.ConfigValueFactory.fromAnyRef
import com.typesafe.config.{Config, ConfigFactory}
+import com.typesafe.scalalogging.LazyLogging
import io.restassured.RestAssured._
import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse
import org.hamcrest.Matchers._
@@ -29,9 +30,10 @@ class AppApiHttpServiceBusinessSpec
with WithBusinessCaseRestAssuredUsersExtensions
with NuRestAssureMatchers
with RestAssuredVerboseLoggingIfValidationFails
- with PatientScalaFutures {
+ with PatientScalaFutures
+ with LazyLogging {
- private var simulateChangeInApplicationConfig: Boolean = false
+ private var simulatedChangeInApplicationConfig: Option[Config] = None
"The app health check endpoint should" - {
"return simple designer health check (with no scenario statuses check)" in {
@@ -279,11 +281,11 @@ class AppApiHttpServiceBusinessSpec
"The processing type data reload endpoint should" - {
"reload processing types-related model data when" - {
"'scenarioTypes' configuration is changed" in {
- val componentNamesBeforeReload = fetchSortedComponentNames()
+ val componentNamesBeforeReload = fetchComponentGroupNamesWithOccurencesCount()
given()
.applicationState {
- simulateChangeInApplicationConfig = true
+ simulatedChangeInApplicationConfig = Some(additionalProcessingTypeCustomization)
}
.when()
.basicAuthAdmin()
@@ -291,27 +293,86 @@ class AppApiHttpServiceBusinessSpec
.Then()
.statusCode(204)
- val componentNamesAfterReload = fetchSortedComponentNames()
+ val componentNamesAfterReload = fetchComponentGroupNamesWithOccurencesCount()
componentNamesAfterReload shouldNot be(componentNamesBeforeReload)
- componentNamesAfterReload.length should be > (componentNamesBeforeReload.length)
+ componentNamesAfterReload("someComponentGroup") shouldBe 2
+ }
+ }
+ "return error when" - {
+ "scenario type is added" in {
+ given()
+ .applicationState {
+ simulatedChangeInApplicationConfig = Some(
+ ConfigFactory.parseString(
+ s"""
+ |scenarioTypes {
+ | streaming2 {
+ | deploymentConfig {
+ | type: "development-tests"
+ | }
+ | modelConfig {
+ | classPath: []
+ | }
+ | category: "Default"
+ | }
+ |}
+ |""".stripMargin
+ )
+ )
+ }
+ .when()
+ .basicAuthAdmin()
+ .post(s"$nuDesignerHttpAddress/api/app/processingtype/reload")
+ .Then()
+ .statusCode(500)
+ .body(
+ startsWith("Processing types cannot be added, removed, or renamed during processing type reload.")
+ )
+ }
+ "classpath of a model is changed" in {
+ given()
+ .applicationState {
+ simulatedChangeInApplicationConfig = Some(
+ ConfigFactory.parseString(
+ s"""
+ |scenarioTypes {
+ | streaming {
+ | modelConfig {
+ | classPath: ["changed.jar"]
+ | }
+ | }
+ |}
+ |""".stripMargin
+ )
+ )
+ }
+ .when()
+ .basicAuthAdmin()
+ .post(s"$nuDesignerHttpAddress/api/app/processingtype/reload")
+ .Then()
+ .statusCode(500)
+ .body(
+ startsWith(
+ "Error during processing types reload. Model ClassLoader dependencies such as classpath cannot be modified during reload."
+ )
+ )
}
}
}
override def beforeEach(): Unit = {
super.beforeEach()
- if (simulateChangeInApplicationConfig) {
- simulateChangeInApplicationConfig = false
+ if (simulatedChangeInApplicationConfig.isDefined) {
+ simulatedChangeInApplicationConfig = None
forceReloadProcessingTypes()
}
}
override def designerConfig: Config = {
- if (simulateChangeInApplicationConfig) {
- additionalProcessingTypeCustomization.withFallback(originDesignerConfig)
- } else {
- originDesignerConfig
+ simulatedChangeInApplicationConfig match {
+ case Some(customization) => customization.withFallback(originDesignerConfig)
+ case None => originDesignerConfig
}
}
@@ -332,34 +393,33 @@ class AppApiHttpServiceBusinessSpec
ConfigFactory.parseString(
s"""
|scenarioTypes {
- | streaming3 {
- | deploymentConfig {
- | type: "mockable"
- | id: "3"
- | engineSetupName: "Mockable"
- | }
- | modelConfig: {
- | classPath: [
- | "engine/flink/management/dev-model/target/scala-"$${scala.major.version}"/devModel.jar",
- | "engine/flink/executor/target/scala-"$${scala.major.version}"/flinkExecutor.jar"
- | ]
- | }
- | category: "Category1"
- | }
+ | streaming {
+ | modelConfig {
+ | componentsUiConfig {
+ | sendCommunication {
+ | componentGroup: "someComponentGroup"
+ | }
+ | }
+ | }
+ | }
|}
|""".stripMargin
)
}
- private def fetchSortedComponentNames(): List[String] = {
- given()
+ private def fetchComponentGroupNamesWithOccurencesCount(): Map[String, Int] = {
+ val body = given()
.when()
.basicAuthAdmin()
.get(s"$nuDesignerHttpAddress/api/components")
.Then()
.statusCode(200)
- .extractList("name")
- .sorted
+ body
+ .extractList("componentGroupName")
+ .groupBy(identity)
+ .view
+ .map { case (name, occurences) => name -> occurences.length }
+ .toMap
}
private def forceReloadProcessingTypes(): Unit = {
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DefinitionResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DefinitionResourcesSpec.scala
index e79e236834e..d62d72a92db 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DefinitionResourcesSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DefinitionResourcesSpec.scala
@@ -10,6 +10,7 @@ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, OptionValues}
import pl.touk.nussknacker.engine.api.CirceUtil.RichACursor
import pl.touk.nussknacker.engine.api.definition.FixedExpressionValue
import pl.touk.nussknacker.engine.api.parameter.{ParameterName, ValueInputWithFixedValuesProvided}
+import pl.touk.nussknacker.engine.api.typed.typing.{Typed, TypingResult, Unknown}
import pl.touk.nussknacker.engine.api.{FragmentSpecificData, MetaData}
import pl.touk.nussknacker.engine.canonicalgraph.canonicalnode.FlatNode
import pl.touk.nussknacker.engine.canonicalgraph.{CanonicalProcess, canonicalnode}
@@ -103,6 +104,20 @@ class DefinitionResourcesSpec
}
}
+ it("should return definition sorted data for allowed classes - skipping array because list should be uses instead") {
+ getProcessDefinitionData() ~> check {
+ status shouldBe StatusCodes.OK
+
+ val allowedClasses = responseAs[Json].hcursor.downField("classes").focus.value.asArray.value
+ val allowedClassesRefClazzNames = allowedClasses.flatMap(_.hcursor.downField("refClazzName").focus.value.asString)
+ val allowedClassesDisplay = allowedClasses.flatMap(_.hcursor.downField("display").focus.value.asString)
+
+ allowedClassesRefClazzNames should contain("java.util.List")
+ allowedClassesRefClazzNames should not contain (Array().getClass.getName)
+ allowedClassesDisplay shouldBe allowedClassesDisplay.sortBy(_.toLowerCase)
+ }
+ }
+
it("should return info about editor based on fragment parameter definition") {
val fragmentWithFixedValuesEditor = {
CanonicalProcess(
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
index ac1e56a3885..afcb0dfe60d 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/config/ConfigurationTest.scala
@@ -3,6 +3,7 @@ package pl.touk.nussknacker.ui.config
import cats.effect.unsafe.implicits.global
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
+import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import pl.touk.nussknacker.engine.{ModelData, ProcessingTypeConfig}
import pl.touk.nussknacker.test.config.ConfigWithScalaVersion
import pl.touk.nussknacker.test.utils.domain.TestFactory
@@ -17,10 +18,14 @@ class ConfigurationTest extends AnyFunSuite with Matchers {
// warning: can't be val - uses ConfigFactory.load which breaks "should preserve config overrides" test
private def globalConfig = ConfigWithScalaVersion.TestsConfig
- private def modelData: ModelData = ModelData(
- ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig),
- TestFactory.modelDependencies
- )
+ private def modelData: ModelData = {
+ val config = ProcessingTypeConfig.read(ConfigWithScalaVersion.StreamingProcessTypeConfig)
+ ModelData(
+ config,
+ TestFactory.modelDependencies,
+ ModelClassLoader(config.classPath, None)
+ )
+ }
private lazy val modelDataConfig = modelData.modelConfig
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/component/DefaultComponentServiceSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/component/DefaultComponentServiceSpec.scala
index 2e9b54cd7c7..43e0f0ef173 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/component/DefaultComponentServiceSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/definition/component/DefaultComponentServiceSpec.scala
@@ -41,6 +41,7 @@ import pl.touk.nussknacker.ui.definition.component.ComponentTestProcessData._
import pl.touk.nussknacker.ui.definition.component.DynamicComponentProvider._
import pl.touk.nussknacker.ui.process.DBProcessService
import pl.touk.nussknacker.ui.process.fragment.DefaultFragmentRepository
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypeDataLoader
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider
import pl.touk.nussknacker.ui.process.processingtype.{ProcessingTypeData, ScenarioParametersService}
@@ -854,6 +855,7 @@ class DefaultComponentServiceSpec
processingType,
modelData,
new MockManagerProvider,
+ SchedulingForProcessingType.NotAvailable,
TestFactory.deploymentManagerDependencies,
EngineSetupName("Mock"),
deploymentConfig = ConfigFactory.empty(),
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
index ab0d8f6a72e..78ccd06c3c2 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/ProcessStateDefinitionServiceSpec.scala
@@ -15,6 +15,7 @@ import pl.touk.nussknacker.security.Permission
import pl.touk.nussknacker.test.mock.{MockDeploymentManager, MockManagerProvider}
import pl.touk.nussknacker.test.utils.domain.TestFactory
import pl.touk.nussknacker.test.utils.domain.TestFactory.modelDependencies
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider
import pl.touk.nussknacker.ui.process.processingtype.{ProcessingTypeData, ValueWithRestriction}
import pl.touk.nussknacker.ui.security.api.{AdminUser, CommonUser, LoggedUser}
@@ -196,6 +197,7 @@ class ProcessStateDefinitionServiceSpec extends AnyFunSuite with Matchers {
override def processStateDefinitionManager: ProcessStateDefinitionManager = stateDefinitionManager
}
),
+ SchedulingForProcessingType.NotAvailable,
TestFactory.deploymentManagerDependencies,
deploymentConfig = ConfigFactory.empty(),
engineSetupName = EngineSetupName("mock"),
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceIntegrationTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessServiceIntegrationTest.scala
similarity index 75%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceIntegrationTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessServiceIntegrationTest.scala
index 12b2daa5c41..7737de6761d 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceIntegrationTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessServiceIntegrationTest.scala
@@ -1,32 +1,49 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import com.cronutils.builder.CronBuilder
import com.cronutils.model.CronType
import com.cronutils.model.definition.CronDefinitionBuilder
import com.cronutils.model.field.expression.FieldExpressionFactory.{on, questionMark}
-import com.dimafeng.testcontainers.{ForAllTestContainer, PostgreSQLContainer}
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
+import db.util.DBIOActionInstances.DB
import org.scalatest.LoneElement._
import org.scalatest.OptionValues
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.exceptions.TestFailedException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
-import org.testcontainers.utility.DockerImageName
import pl.touk.nussknacker.engine.api.deployment._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.{
+ ProcessConfigEnricher,
+ ScheduledProcessEvent,
+ ScheduledProcessListener
+}
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
-import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessIdWithName, ProcessName}
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessIdWithName, ProcessName, VersionId}
import pl.touk.nussknacker.engine.api.{MetaData, ProcessVersion, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.PeriodicProcessStatus
-import pl.touk.nussknacker.engine.management.periodic.db.{DbInitializer, SlickPeriodicProcessesRepository}
-import pl.touk.nussknacker.engine.management.periodic.model._
-import pl.touk.nussknacker.engine.management.periodic.service._
import pl.touk.nussknacker.test.PatientScalaFutures
+import pl.touk.nussknacker.test.base.db.WithPostgresDbTesting
+import pl.touk.nussknacker.test.base.it.WithClock
+import pl.touk.nussknacker.test.utils.domain.TestFactory
+import pl.touk.nussknacker.test.utils.domain.TestFactory.newWriteProcessRepository
+import pl.touk.nussknacker.test.utils.scalas.DBIOActionValues
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.PeriodicProcessStatus
+import pl.touk.nussknacker.ui.process.periodic.flink.{DeploymentManagerStub, ScheduledExecutionPerformerStub}
+import pl.touk.nussknacker.ui.process.periodic.legacy.db.{LegacyDbInitializer, SlickLegacyPeriodicProcessesRepository}
+import pl.touk.nussknacker.ui.process.periodic.model._
+import pl.touk.nussknacker.ui.process.repository.ProcessRepository.CreateProcessAction
+import pl.touk.nussknacker.ui.process.repository.{
+ DBIOActionRunner,
+ DBProcessRepository,
+ PeriodicProcessesRepository,
+ SlickPeriodicProcessesRepository
+}
+import pl.touk.nussknacker.ui.security.api.AdminUser
import slick.jdbc
-import slick.jdbc.{JdbcBackend, JdbcProfile}
+import slick.jdbc.JdbcProfile
import java.time._
import java.time.temporal.ChronoUnit
@@ -42,21 +59,21 @@ class PeriodicProcessServiceIntegrationTest
with OptionValues
with ScalaFutures
with PatientScalaFutures
- with ForAllTestContainer
- with LazyLogging {
-
- override val container: PostgreSQLContainer = PostgreSQLContainer(DockerImageName.parse("postgres:11.2"))
+ with WithPostgresDbTesting
+ with LazyLogging
+ with WithClock
+ with DBIOActionValues {
import scala.concurrent.ExecutionContext.Implicits.global
+ override protected def dbioRunner: DBIOActionRunner = new DBIOActionRunner(testDbRef)
+
implicit val freshnessPolicy: DataFreshnessPolicy = DataFreshnessPolicy.Fresh
private val processingType = "testProcessingType"
private val processName = ProcessName("test")
- private val processIdWithName = ProcessIdWithName(ProcessId(1), processName)
-
private val sampleProcess = CanonicalProcess(MetaData(processName.value, StreamMetaData()), Nil)
private val startTime = Instant.parse("2021-04-06T13:18:00Z")
@@ -74,6 +91,9 @@ class PeriodicProcessServiceIntegrationTest
executionConfig: PeriodicExecutionConfig = PeriodicExecutionConfig(),
maxFetchedPeriodicScenarioActivities: Option[Int] = None,
)(testCode: Fixture => Any): Unit = {
+
+ val fetchingProcessRepository = TestFactory.newFutureFetchingScenarioRepository(testDbRef)
+
val postgresConfig = ConfigFactory.parseMap(
Map(
"user" -> container.username,
@@ -92,43 +112,73 @@ class PeriodicProcessServiceIntegrationTest
).asJava
)
- def runTestCodeWithDbConfig(config: Config) = {
- val (db: jdbc.JdbcBackend.DatabaseDef, dbProfile: JdbcProfile) = DbInitializer.init(config)
+ def runWithLegacyRepository(dbConfig: Config): Unit = {
+ val (db: jdbc.JdbcBackend.DatabaseDef, dbProfile: JdbcProfile) = LegacyDbInitializer.init(dbConfig)
+ val creator = (processingType: String, currentTime: Instant) =>
+ new SlickLegacyPeriodicProcessesRepository(
+ processingType,
+ db,
+ dbProfile,
+ fixedClock(currentTime),
+ fetchingProcessRepository,
+ )
try {
testCode(
- new Fixture(db, dbProfile, deploymentRetryConfig, executionConfig, maxFetchedPeriodicScenarioActivities)
+ new Fixture(creator, deploymentRetryConfig, executionConfig, maxFetchedPeriodicScenarioActivities)
)
} finally {
db.close()
}
}
- logger.debug("Running test with hsql")
- runTestCodeWithDbConfig(hsqlConfig)
- logger.debug("Running test with postgres")
- runTestCodeWithDbConfig(postgresConfig)
+
+ def runTestCodeWithNuDb(): Unit = {
+ val creator = (processingType: String, currentTime: Instant) =>
+ new SlickPeriodicProcessesRepository(
+ processingType,
+ testDbRef.db,
+ testDbRef.profile,
+ fixedClock(currentTime),
+ fetchingProcessRepository,
+ )
+ testCode(
+ new Fixture(creator, deploymentRetryConfig, executionConfig, maxFetchedPeriodicScenarioActivities)
+ )
+ }
+
+ def testHeader(str: String) = "\n\n" + "*" * 100 + s"\n***** $str\n" + "*" * 100 + "\n"
+ logger.info(testHeader("Running test with legacy hsql-based repository"))
+ runWithLegacyRepository(hsqlConfig)
+ cleanDB()
+ logger.info(testHeader("Running test with legacy postgres-based repository"))
+ runWithLegacyRepository(postgresConfig)
+ cleanDB()
+ logger.info(testHeader("Running test with Nu database"))
+ runTestCodeWithNuDb()
+ cleanDB()
}
class Fixture(
- db: JdbcBackend.DatabaseDef,
- dbProfile: JdbcProfile,
+ periodicProcessesRepositoryCreator: (String, Instant) => PeriodicProcessesRepository,
deploymentRetryConfig: DeploymentRetryConfig,
executionConfig: PeriodicExecutionConfig,
maxFetchedPeriodicScenarioActivities: Option[Int],
) {
- val delegateDeploymentManagerStub = new DeploymentManagerStub
- val jarManagerStub = new JarManagerStub
- val events = new ArrayBuffer[PeriodicProcessEvent]()
- var failListener = false
-
- def periodicProcessService(currentTime: Instant, processingType: String = processingType) =
+ val delegateDeploymentManagerStub = new DeploymentManagerStub
+ val scheduledExecutionPerformerStub = new ScheduledExecutionPerformerStub
+ val events = new ArrayBuffer[ScheduledProcessEvent]()
+ var failListener = false
+
+ def periodicProcessService(
+ currentTime: Instant,
+ processingType: String = processingType
+ ) =
new PeriodicProcessService(
delegateDeploymentManager = delegateDeploymentManagerStub,
- jarManager = jarManagerStub,
- scheduledProcessesRepository =
- new SlickPeriodicProcessesRepository(db, dbProfile, fixedClock(currentTime), processingType),
- periodicProcessListener = new PeriodicProcessListener {
+ scheduledExecutionPerformer = scheduledExecutionPerformerStub,
+ periodicProcessesRepository = periodicProcessesRepositoryCreator(processingType, currentTime),
+ periodicProcessListener = new ScheduledProcessListener {
- override def onPeriodicProcessEvent: PartialFunction[PeriodicProcessEvent, Unit] = {
+ override def onScheduledProcessEvent: PartialFunction[ScheduledProcessEvent, Unit] = {
case k if failListener => throw new Exception(s"$k was ordered to fail")
case k => events.append(k)
}
@@ -141,8 +191,26 @@ class PeriodicProcessServiceIntegrationTest
processConfigEnricher = ProcessConfigEnricher.identity,
clock = fixedClock(currentTime),
new ProcessingTypeActionServiceStub,
- Map.empty
+ Map.empty,
+ )
+
+ def writeProcessRepository: DBProcessRepository = newWriteProcessRepository(testDbRef, clock)
+
+ def prepareProcess(processName: ProcessName): DB[ProcessIdWithName] = {
+ val canonicalProcess = CanonicalProcess(MetaData(processName.value, StreamMetaData()), Nil)
+ val action = CreateProcessAction(
+ processName = processName,
+ category = "Category1",
+ canonicalProcess = canonicalProcess,
+ processingType = "streaming",
+ isFragment = false,
+ forwardedUserName = None
)
+ writeProcessRepository
+ .saveNewProcess(action)(AdminUser("artificialTestAdmin", "artificialTestAdmin"))
+ .map(_.value.processId)
+ .map(ProcessIdWithName(_, processName))
+ }
}
@@ -159,12 +227,15 @@ class PeriodicProcessServiceIntegrationTest
def otherProcessingTypeService = f.periodicProcessService(currentTime, processingType = "other")
val otherProcessName = ProcessName("other")
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+ val otherProcessIdWithName = f.prepareProcess(otherProcessName).dbioActionValues
+
service
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
service
@@ -172,7 +243,7 @@ class PeriodicProcessServiceIntegrationTest
cronEvery30Minutes,
ProcessVersion.empty.copy(processName = every30MinutesProcessName),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
service
@@ -180,15 +251,15 @@ class PeriodicProcessServiceIntegrationTest
cronEvery4Hours,
ProcessVersion.empty.copy(processName = every4HoursProcessName),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
otherProcessingTypeService
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = otherProcessName),
+ ProcessVersion.empty.copy(processName = otherProcessIdWithName.name),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -196,22 +267,22 @@ class PeriodicProcessServiceIntegrationTest
stateAfterSchedule should have size 1
val afterSchedule = stateAfterSchedule.firstScheduleData
- afterSchedule.process.processVersion.processName shouldBe processName
+ afterSchedule.process.deploymentData.processName shouldBe processName
afterSchedule.latestDeployments.head.state shouldBe PeriodicProcessDeploymentState(
None,
None,
PeriodicProcessDeploymentStatus.Scheduled
)
afterSchedule.latestDeployments.head.runAt shouldBe localTime(expectedScheduleTime)
- service.getLatestDeploymentsForActiveSchedules(otherProcessName).futureValue shouldBe empty
+ service.getLatestDeploymentsForActiveSchedules(otherProcessIdWithName.name).futureValue shouldBe empty
currentTime = timeToTriggerCheck
val allToDeploy = service.findToBeDeployed.futureValue
allToDeploy.map(
- _.periodicProcess.processVersion.processName
+ _.periodicProcess.deploymentData.processName
) should contain only (processName, every30MinutesProcessName)
- val toDeploy = allToDeploy.find(_.periodicProcess.processVersion.processName == processName).value
+ val toDeploy = allToDeploy.find(_.periodicProcess.deploymentData.processName == processName).value
service.deploy(toDeploy).futureValue
otherProcessingTypeService.deploy(otherProcessingTypeService.findToBeDeployed.futureValue.loneElement).futureValue
@@ -237,7 +308,7 @@ class PeriodicProcessServiceIntegrationTest
// here we check that scenarios that not fired are still on the "toDeploy" list and finished are not on the list
val toDeployAfterFinish = service.findToBeDeployed.futureValue
- toDeployAfterFinish.map(_.periodicProcess.processVersion.processName) should contain only every30MinutesProcessName
+ toDeployAfterFinish.map(_.periodicProcess.deploymentData.processName) should contain only every30MinutesProcessName
service.deactivate(processName).futureValue
service.getLatestDeploymentsForActiveSchedules(processName).futureValue shouldBe empty
val inactiveStates = service
@@ -256,7 +327,7 @@ class PeriodicProcessServiceIntegrationTest
val firstActivity = activities.head.asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
activities shouldBe List(
ScenarioActivity.PerformedScheduledExecution(
- scenarioId = ScenarioId(1),
+ scenarioId = ScenarioId(processIdWithName.id.value),
scenarioActivityId = firstActivity.scenarioActivityId,
user = ScenarioUser(None, UserName("Nussknacker"), None, None),
date = firstActivity.date,
@@ -272,14 +343,23 @@ class PeriodicProcessServiceIntegrationTest
}
it should "handleFinished for all finished periodic scenarios waiting for reschedule" in withFixture() { f =>
- val timeToTriggerCheck = startTime.plus(2, ChronoUnit.HOURS)
- var currentTime = startTime
- def service = f.periodicProcessService(currentTime)
+ val timeToTriggerCheck = startTime.plus(2, ChronoUnit.HOURS)
+ var currentTime = startTime
+ def service = f.periodicProcessService(currentTime)
+ val firstProcessIdWithName = f.prepareProcess(ProcessName("first")).dbioActionValues
+ val secondProcessIdWithName = f.prepareProcess(ProcessName("second")).dbioActionValues
service
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = ProcessName("first")),
+ ProcessVersion(
+ VersionId(1),
+ firstProcessIdWithName.name,
+ firstProcessIdWithName.id,
+ List.empty,
+ "testUser",
+ None
+ ),
sampleProcess,
randomProcessActionId
)
@@ -287,7 +367,14 @@ class PeriodicProcessServiceIntegrationTest
service
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = ProcessName("second")),
+ ProcessVersion(
+ VersionId(1),
+ secondProcessIdWithName.name,
+ secondProcessIdWithName.id,
+ List.empty,
+ "testUser",
+ None
+ ),
sampleProcess,
randomProcessActionId
)
@@ -328,15 +415,18 @@ class PeriodicProcessServiceIntegrationTest
) { f =>
val timeToTriggerCheck = startTime.plus(2, ChronoUnit.HOURS)
var currentTime = startTime
- f.jarManagerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
+ f.scheduledExecutionPerformerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
def service = f.periodicProcessService(currentTime)
+
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
service
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -357,7 +447,7 @@ class PeriodicProcessServiceIntegrationTest
val firstActivity = activities.head.asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
activities shouldBe List(
ScenarioActivity.PerformedScheduledExecution(
- scenarioId = ScenarioId(1),
+ scenarioId = ScenarioId(processIdWithName.id.value),
scenarioActivityId = firstActivity.scenarioActivityId,
user = ScenarioUser(None, UserName("Nussknacker"), None, None),
date = firstActivity.date,
@@ -382,6 +472,9 @@ class PeriodicProcessServiceIntegrationTest
val scheduleMinute5 = "scheduleMinute5"
val scheduleMinute10 = "scheduleMinute10"
+
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
service
.schedule(
MultipleScheduleProperty(
@@ -390,9 +483,9 @@ class PeriodicProcessServiceIntegrationTest
scheduleMinute10 -> CronScheduleProperty("0 10 * * * ?")
)
),
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -407,7 +500,7 @@ class PeriodicProcessServiceIntegrationTest
),
ProcessVersion.empty.copy(processName = ProcessName("other")),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -424,7 +517,7 @@ class PeriodicProcessServiceIntegrationTest
val allToDeploy = service.findToBeDeployed.futureValue
allToDeploy should have length 4
- val toDeploy = allToDeploy.filter(_.periodicProcess.processVersion.processName == processName)
+ val toDeploy = allToDeploy.filter(_.periodicProcess.deploymentData.processName == processName)
toDeploy should have length 2
toDeploy.head.runAt shouldBe localTime(expectedScheduleTime.plus(5, ChronoUnit.MINUTES))
toDeploy.head.scheduleName.value shouldBe Some(scheduleMinute5)
@@ -447,6 +540,9 @@ class PeriodicProcessServiceIntegrationTest
val firstSchedule = "schedule1"
val secondSchedule = "schedule2"
+
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
service
.schedule(
MultipleScheduleProperty(
@@ -455,9 +551,9 @@ class PeriodicProcessServiceIntegrationTest
secondSchedule -> CronScheduleProperty("0 5 * * * ?")
)
),
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -467,7 +563,7 @@ class PeriodicProcessServiceIntegrationTest
toDeploy should have length 2
val deployment = toDeploy.find(_.scheduleName.value.contains(firstSchedule)).value
- service.deploy(deployment)
+ service.deploy(deployment).futureValue
f.delegateDeploymentManagerStub.setStateStatus(processName, SimpleStateStatus.Running, Some(deployment.id))
val toDeployAfterDeploy = service.findToBeDeployed.futureValue
@@ -487,7 +583,7 @@ class PeriodicProcessServiceIntegrationTest
}
firstActivity shouldBe
ScenarioActivity.PerformedScheduledExecution(
- scenarioId = ScenarioId(1),
+ scenarioId = ScenarioId(processIdWithName.id.value),
scenarioActivityId = firstActivity.scenarioActivityId,
user = ScenarioUser(None, UserName("Nussknacker"), None, None),
date = firstActivity.date,
@@ -503,7 +599,9 @@ class PeriodicProcessServiceIntegrationTest
it should "handle multiple one time schedules" in withFixture() { f =>
handleMultipleOneTimeSchedules(f)
- def service = f.periodicProcessService(startTime)
+ def service = f.periodicProcessService(startTime)
+ val processIdWithName = ProcessIdWithName(ProcessId(1), processName)
+
val activities = service.getScenarioActivitiesSpecificToPeriodicProcess(processIdWithName, None).futureValue
val firstActivity = activities.head.asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
val secondActivity = activities(1).asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
@@ -541,7 +639,9 @@ class PeriodicProcessServiceIntegrationTest
maxFetchedPeriodicScenarioActivities = Some(1)
) { f =>
handleMultipleOneTimeSchedules(f)
- def service = f.periodicProcessService(startTime)
+ def service = f.periodicProcessService(startTime)
+ val processIdWithName = ProcessIdWithName(ProcessId(1), processName)
+
val activities = service.getScenarioActivitiesSpecificToPeriodicProcess(processIdWithName, None).futureValue
val firstActivity = activities.head.asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
activities shouldBe List(
@@ -578,6 +678,9 @@ class PeriodicProcessServiceIntegrationTest
val schedule1 = "schedule1"
val schedule2 = "schedule2"
+
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
service
.schedule(
MultipleScheduleProperty(
@@ -586,9 +689,9 @@ class PeriodicProcessServiceIntegrationTest
schedule2 -> CronScheduleProperty(convertDateToCron(localTime(timeToTriggerSchedule2)))
)
),
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
@@ -659,7 +762,6 @@ class PeriodicProcessServiceIntegrationTest
.futureValue
inactiveStates.latestDeploymentForSchedule(schedule1).state.status shouldBe PeriodicProcessDeploymentStatus.Finished
inactiveStates.latestDeploymentForSchedule(schedule2).state.status shouldBe PeriodicProcessDeploymentStatus.Finished
-
}
it should "handle failed event handler" in withFixture() { f =>
@@ -669,19 +771,21 @@ class PeriodicProcessServiceIntegrationTest
def service = f.periodicProcessService(currentTime)
- def tryWithFailedListener[T](action: () => Future[T]): T = {
+ def tryWithFailedListener[T](action: () => Future[T]): Unit = {
f.failListener = true
- intercept[TestFailedException](action().futureValue).getCause shouldBe a[PeriodicProcessException]
+ val exception = intercept[TestFailedException](action().futureValue)
+ exception.getCause shouldBe a[PeriodicProcessException]
f.failListener = false
- action().futureValue
}
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
tryWithFailedListener { () =>
service.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
}
@@ -703,15 +807,17 @@ class PeriodicProcessServiceIntegrationTest
val timeToTriggerCheck = startTime.plus(1, ChronoUnit.HOURS)
var currentTime = startTime
- f.jarManagerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
+ f.scheduledExecutionPerformerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
def service = f.periodicProcessService(currentTime)
+ val processIdWithName = f.prepareProcess(processName).dbioActionValues
+
service
.schedule(
cronEveryHour,
- ProcessVersion.empty.copy(processName = processName),
+ ProcessVersion(VersionId(1), processIdWithName.name, processIdWithName.id, List.empty, "testUser", None),
sampleProcess,
- randomProcessActionId
+ randomProcessActionId,
)
.futureValue
currentTime = timeToTriggerCheck
@@ -732,7 +838,7 @@ class PeriodicProcessServiceIntegrationTest
val firstActivity = activities.head.asInstanceOf[ScenarioActivity.PerformedScheduledExecution]
activities shouldBe List(
ScenarioActivity.PerformedScheduledExecution(
- scenarioId = ScenarioId(1),
+ scenarioId = ScenarioId(processIdWithName.id.value),
scenarioActivityId = firstActivity.scenarioActivityId,
user = ScenarioUser(None, UserName("Nussknacker"), None, None),
date = firstActivity.date,
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessesFetchingTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessesFetchingTest.scala
similarity index 65%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessesFetchingTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessesFetchingTest.scala
index a5278f94586..b296f1271ef 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessesFetchingTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/PeriodicProcessesFetchingTest.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.funsuite.AnyFunSuite
@@ -9,17 +9,18 @@ import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.deployment.DeploymentData
-import pl.touk.nussknacker.engine.management.periodic.db.InMemPeriodicProcessesRepository.getLatestDeploymentQueryCount
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.service.{
- DefaultAdditionalDeploymentDataProvider,
- EmptyListener,
- ProcessConfigEnricher
-}
import pl.touk.nussknacker.test.PatientScalaFutures
+import pl.touk.nussknacker.ui.process.periodic.flink.{DeploymentManagerStub, ScheduledExecutionPerformerStub}
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository.getLatestDeploymentQueryCount
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.{EmptyListener, ProcessConfigEnricher}
+import pl.touk.nussknacker.ui.process.periodic.cron.CronSchedulePropertyExtractor
+import pl.touk.nussknacker.ui.process.repository.{FetchingProcessRepository, PeriodicProcessesRepository}
import java.time.Clock
import java.util.UUID
+import scala.concurrent.Future
class PeriodicProcessesFetchingTest
extends AnyFunSuite
@@ -37,15 +38,16 @@ class PeriodicProcessesFetchingTest
private def processName(n: Int) = ProcessName(s"test$n")
class Fixture(executionConfig: PeriodicExecutionConfig = PeriodicExecutionConfig()) {
- val repository = new db.InMemPeriodicProcessesRepository(processingType = "testProcessingType")
- val delegateDeploymentManagerStub = new DeploymentManagerStub
- val jarManagerStub = new JarManagerStub
- val preparedDeploymentData = DeploymentData.withDeploymentId(UUID.randomUUID().toString)
+ val processingType = "testProcessingType"
+ val repository = new InMemPeriodicProcessesRepository(processingType)
+ val delegateDeploymentManagerStub = new DeploymentManagerStub
+ val scheduledExecutionPerformerStub = new ScheduledExecutionPerformerStub
+ val preparedDeploymentData = DeploymentData.withDeploymentId(UUID.randomUUID().toString)
val periodicProcessService = new PeriodicProcessService(
delegateDeploymentManager = delegateDeploymentManagerStub,
- jarManager = jarManagerStub,
- scheduledProcessesRepository = repository,
+ scheduledExecutionPerformer = scheduledExecutionPerformerStub,
+ periodicProcessesRepository = repository,
periodicProcessListener = EmptyListener,
additionalDeploymentDataProvider = DefaultAdditionalDeploymentDataProvider,
deploymentRetryConfig = DeploymentRetryConfig(),
@@ -60,7 +62,7 @@ class PeriodicProcessesFetchingTest
val periodicDeploymentManager = new PeriodicDeploymentManager(
delegate = delegateDeploymentManagerStub,
service = periodicProcessService,
- repository = repository,
+ periodicProcessesRepository = repository,
schedulePropertyExtractor = CronSchedulePropertyExtractor(),
toClose = () => ()
)
@@ -76,7 +78,8 @@ class PeriodicProcessesFetchingTest
f.delegateDeploymentManagerStub.setEmptyStateStatus()
for (i <- 1 to n) {
- val deploymentId = f.repository.addActiveProcess(processName(i), PeriodicProcessDeploymentStatus.Deployed)
+ val deploymentId =
+ f.repository.addActiveProcess(processName(i), PeriodicProcessDeploymentStatus.Deployed)
f.delegateDeploymentManagerStub.addStateStatus(processName(i), SimpleStateStatus.Running, Some(deploymentId))
}
@@ -91,14 +94,17 @@ class PeriodicProcessesFetchingTest
getLatestDeploymentQueryCount.get() shouldEqual 2 * n
}
- test("getStatusDetails - should perform 2 db queries for N periodic processes when fetching all at once") {
+ test(
+ "getAllProcessesStates - should perform 2 db queries for N periodic processes when fetching all at once"
+ ) {
val f = new Fixture
val n = 10
f.delegateDeploymentManagerStub.setEmptyStateStatus()
for (i <- 1 to n) {
- val deploymentId = f.repository.addActiveProcess(processName(i), PeriodicProcessDeploymentStatus.Deployed)
+ val deploymentId =
+ f.repository.addActiveProcess(processName(i), PeriodicProcessDeploymentStatus.Deployed)
f.delegateDeploymentManagerStub.addStateStatus(processName(i), SimpleStateStatus.Running, Some(deploymentId))
}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyExtractorTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyExtractorTest.scala
similarity index 73%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyExtractorTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyExtractorTest.scala
index 5874bd1bb26..4b0c326fd2e 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyExtractorTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyExtractorTest.scala
@@ -1,14 +1,16 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import org.scalatest.Inside
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.api.NodeId
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{ScheduleProperty => ApiScheduleProperty}
import pl.touk.nussknacker.engine.api.parameter.ParameterName
import pl.touk.nussknacker.engine.build.ScenarioBuilder
import pl.touk.nussknacker.engine.graph.expression.Expression
-import pl.touk.nussknacker.engine.management.periodic.cron.CronParameterValidator
import pl.touk.nussknacker.test.{EitherValuesDetailedMessage, ValidatedValuesDetailedMessage}
+import pl.touk.nussknacker.ui.process.periodic.cron.{CronParameterValidator, CronSchedulePropertyExtractor}
+import pl.touk.nussknacker.ui.process.periodic.{CronScheduleProperty, MultipleScheduleProperty}
class CronSchedulePropertyExtractorTest
extends AnyFunSuite
@@ -39,16 +41,16 @@ class CronSchedulePropertyExtractorTest
test("should extract cron property") {
val result = extractor(PeriodicProcessGen.buildCanonicalProcess())
- inside(result) { case Right(CronScheduleProperty(_)) => }
+ inside(result) { case Right(ApiScheduleProperty.CronScheduleProperty(_)) => }
}
test("should extract MultipleScheduleProperty") {
val multipleSchedulesExpression = "{foo: '0 0 * * * ?', bar: '1 0 * * * ?'}"
val result = extractor(PeriodicProcessGen.buildCanonicalProcess(multipleSchedulesExpression))
- result.rightValue shouldEqual MultipleScheduleProperty(
+ result.rightValue shouldEqual ApiScheduleProperty.MultipleScheduleProperty(
Map(
- "foo" -> CronScheduleProperty("0 0 * * * ?"),
- "bar" -> CronScheduleProperty("1 0 * * * ?")
+ "foo" -> ApiScheduleProperty.CronScheduleProperty("0 0 * * * ?"),
+ "bar" -> ApiScheduleProperty.CronScheduleProperty("1 0 * * * ?")
)
)
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyTest.scala
similarity index 89%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyTest.scala
index 5fff9dcb12d..0177cd9fea3 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/CronSchedulePropertyTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/CronSchedulePropertyTest.scala
@@ -1,8 +1,10 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
-import java.time.{Clock, LocalDateTime, ZoneId, ZonedDateTime}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
+import pl.touk.nussknacker.ui.process.periodic.CronScheduleProperty
+
+import java.time.{Clock, LocalDateTime, ZoneId, ZonedDateTime}
class CronSchedulePropertyTest extends AnyFunSuite with Matchers {
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActorTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentActorTest.scala
similarity index 63%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActorTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentActorTest.scala
index ac646390e38..eaeed51fe9e 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentActorTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentActorTest.scala
@@ -1,15 +1,14 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import akka.actor.{ActorRef, ActorSystem}
import akka.testkit.{TestKit, TestKitBase, TestProbe}
-import org.scalatest.LoneElement._
import org.scalatest.BeforeAndAfterAll
+import org.scalatest.LoneElement._
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.DeploymentActor.CheckToBeDeployed
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeployment
+import pl.touk.nussknacker.ui.process.periodic.DeploymentActor
+import pl.touk.nussknacker.ui.process.periodic.DeploymentActor.CheckToBeDeployed
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeployment
import scala.concurrent.Future
import scala.concurrent.duration._
@@ -34,11 +33,11 @@ class DeploymentActorTest extends AnyFunSuite with TestKitBase with Matchers wit
}
private def shouldFindToBeDeployedScenarios(
- result: Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]]
+ result: Future[Seq[PeriodicProcessDeployment]]
): Unit = {
val probe = TestProbe()
var counter = 0
- def findToBeDeployed: Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] = {
+ def findToBeDeployed: Future[Seq[PeriodicProcessDeployment]] = {
counter += 1
probe.ref ! s"invoked $counter"
result
@@ -55,14 +54,14 @@ class DeploymentActorTest extends AnyFunSuite with TestKitBase with Matchers wit
}
test("should deploy found scenario") {
- val probe = TestProbe()
- val waitingDeployment = PeriodicProcessDeploymentGen()
- var toBeDeployed: Seq[PeriodicProcessDeployment[WithCanonicalProcess]] = Seq(waitingDeployment)
- var actor: ActorRef = null
- def findToBeDeployed: Future[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] = {
+ val probe = TestProbe()
+ val waitingDeployment = PeriodicProcessDeploymentGen()
+ var toBeDeployed: Seq[PeriodicProcessDeployment] = Seq(waitingDeployment)
+ var actor: ActorRef = null
+ def findToBeDeployed: Future[Seq[PeriodicProcessDeployment]] = {
Future.successful(toBeDeployed)
}
- def deploy(deployment: PeriodicProcessDeployment[WithCanonicalProcess]): Future[Unit] = {
+ def deploy(deployment: PeriodicProcessDeployment): Future[Unit] = {
probe.ref ! deployment
// Simulate periodic check for waiting scenarios while deploying a scenario.
actor ! CheckToBeDeployed
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentManagerStub.scala
similarity index 93%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentManagerStub.scala
index e424331f837..8e59980e28d 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/DeploymentManagerStub.scala
@@ -1,11 +1,11 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName, VersionId}
import pl.touk.nussknacker.engine.deployment.{DeploymentId, ExternalDeploymentId}
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentId
import pl.touk.nussknacker.engine.testing.StubbingCommands
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentId
import scala.concurrent.Future
@@ -82,6 +82,8 @@ class DeploymentManagerStub extends BaseDeploymentManager with StubbingCommands
override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
+
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport =
new StateQueryForAllScenariosSupported {
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/FlinkClientStub.scala
similarity index 96%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/FlinkClientStub.scala
index 5c386e88208..b1b260d1a7d 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/FlinkClientStub.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import org.apache.flink.configuration.Configuration
import pl.touk.nussknacker.engine.api.deployment.{DataFreshnessPolicy, SavepointResult, WithDataFreshnessStatus}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManagerTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicDeploymentManagerTest.scala
similarity index 94%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManagerTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicDeploymentManagerTest.scala
index 82f1c575b0b..eb968cee662 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManagerTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicDeploymentManagerTest.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.funsuite.AnyFunSuite
@@ -13,15 +13,14 @@ import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessIdWithName, Pro
import pl.touk.nussknacker.engine.api.{MetaData, ProcessVersion, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.{DeploymentData, User}
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.PeriodicProcessStatus
-import pl.touk.nussknacker.engine.management.periodic.PeriodicStateStatus.{ScheduledStatus, WaitingForScheduleStatus}
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.service.{
- DefaultAdditionalDeploymentDataProvider,
- EmptyListener,
- ProcessConfigEnricher
-}
import pl.touk.nussknacker.test.PatientScalaFutures
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.PeriodicProcessStatus
+import pl.touk.nussknacker.ui.process.periodic.PeriodicStateStatus.{ScheduledStatus, WaitingForScheduleStatus}
+import pl.touk.nussknacker.ui.process.periodic._
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.{EmptyListener, ProcessConfigEnricher}
+import pl.touk.nussknacker.ui.process.periodic.cron.CronSchedulePropertyExtractor
import java.time.{Clock, LocalDateTime, ZoneOffset}
import java.util.UUID
@@ -59,15 +58,15 @@ class PeriodicDeploymentManagerTest
)
class Fixture(executionConfig: PeriodicExecutionConfig = PeriodicExecutionConfig()) {
- val repository = new db.InMemPeriodicProcessesRepository(processingType = "testProcessingType")
- val delegateDeploymentManagerStub = new DeploymentManagerStub
- val jarManagerStub = new JarManagerStub
- val preparedDeploymentData = DeploymentData.withDeploymentId(UUID.randomUUID().toString)
+ val repository = new InMemPeriodicProcessesRepository(processingType = "testProcessingType")
+ val delegateDeploymentManagerStub = new DeploymentManagerStub
+ val scheduledExecutionPerformerStub = new ScheduledExecutionPerformerStub
+ val preparedDeploymentData = DeploymentData.withDeploymentId(UUID.randomUUID().toString)
val periodicProcessService = new PeriodicProcessService(
delegateDeploymentManager = delegateDeploymentManagerStub,
- jarManager = jarManagerStub,
- scheduledProcessesRepository = repository,
+ scheduledExecutionPerformer = scheduledExecutionPerformerStub,
+ periodicProcessesRepository = repository,
periodicProcessListener = EmptyListener,
additionalDeploymentDataProvider = DefaultAdditionalDeploymentDataProvider,
deploymentRetryConfig = DeploymentRetryConfig(),
@@ -76,15 +75,15 @@ class PeriodicDeploymentManagerTest
processConfigEnricher = ProcessConfigEnricher.identity,
clock = Clock.systemDefaultZone(),
new ProcessingTypeActionServiceStub,
- Map.empty
+ Map.empty,
)
val periodicDeploymentManager = new PeriodicDeploymentManager(
delegate = delegateDeploymentManagerStub,
service = periodicProcessService,
- repository = repository,
+ periodicProcessesRepository = repository,
schedulePropertyExtractor = CronSchedulePropertyExtractor(),
- toClose = () => ()
+ toClose = () => (),
)
def getAllowedActions(
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessDeploymentGen.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessDeploymentGen.scala
similarity index 53%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessDeploymentGen.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessDeploymentGen.scala
index cf813b7b011..dc7ab3014bd 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessDeploymentGen.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessDeploymentGen.scala
@@ -1,14 +1,6 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.{
- PeriodicProcessDeployment,
- PeriodicProcessDeploymentId,
- PeriodicProcessDeploymentState,
- PeriodicProcessDeploymentStatus,
- ScheduleName
-}
+import pl.touk.nussknacker.ui.process.periodic.model.{PeriodicProcessDeployment, PeriodicProcessDeploymentId, PeriodicProcessDeploymentState, PeriodicProcessDeploymentStatus, ScheduleName}
import java.time.LocalDateTime
@@ -16,7 +8,7 @@ object PeriodicProcessDeploymentGen {
val now: LocalDateTime = LocalDateTime.now()
- def apply(): PeriodicProcessDeployment[WithCanonicalProcess] = {
+ def apply(): PeriodicProcessDeployment = {
PeriodicProcessDeployment(
id = PeriodicProcessDeploymentId(42),
periodicProcess = PeriodicProcessGen(),
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessGen.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessGen.scala
new file mode 100644
index 00000000000..7111db87542
--- /dev/null
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessGen.scala
@@ -0,0 +1,39 @@
+package pl.touk.nussknacker.ui.process.periodic.flink
+
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
+import pl.touk.nussknacker.engine.build.ScenarioBuilder
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.ui.process.periodic.CronScheduleProperty
+import pl.touk.nussknacker.ui.process.periodic.cron.CronSchedulePropertyExtractor.CronPropertyDefaultName
+import pl.touk.nussknacker.ui.process.periodic.model.{PeriodicProcess, PeriodicProcessId}
+
+import java.time.LocalDateTime
+
+object PeriodicProcessGen {
+
+ def apply(): PeriodicProcess = {
+ PeriodicProcess(
+ id = PeriodicProcessId(42),
+ deploymentData = DeploymentWithRuntimeParams(
+ processId = Some(ProcessId(1)),
+ processName = ProcessName(""),
+ versionId = VersionId.initialVersionId,
+ runtimeParams = RuntimeParams(Map("jarFileName" -> "jar-file-name.jar"))
+ ),
+ scheduleProperty = CronScheduleProperty("0 0 * * * ?"),
+ active = true,
+ createdAt = LocalDateTime.now(),
+ None
+ )
+ }
+
+ def buildCanonicalProcess(cronProperty: String = "0 0 * * * ?"): CanonicalProcess = {
+ ScenarioBuilder
+ .streaming("test")
+ .additionalFields(properties = Map(CronPropertyDefaultName -> cronProperty))
+ .source("test", "test")
+ .emptySink("test", "test")
+ }
+
+}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessServiceTest.scala
similarity index 81%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessServiceTest.scala
index a258d5c2ac4..c8c631a1e5c 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessServiceTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessServiceTest.scala
@@ -1,37 +1,29 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
+import org.scalatest.OptionValues
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.exceptions.TestFailedException
-import org.scalatest.prop.TableDrivenPropertyChecks
-import org.scalatest.OptionValues
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
+import org.scalatest.prop.TableDrivenPropertyChecks
import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.api.deployment.{DataFreshnessPolicy, ProcessActionId, ProcessingTypeActionServiceStub}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.ScheduledDeploymentDetails
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.AdditionalDeploymentDataProvider
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.ProcessConfigEnricher.EnrichedProcessConfig
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus
-import pl.touk.nussknacker.engine.api.process.ProcessName
+import pl.touk.nussknacker.engine.api.deployment.{DataFreshnessPolicy, ProcessActionId, ProcessingTypeActionServiceStub}
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.build.ScenarioBuilder
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.PeriodicProcessStatus
-import pl.touk.nussknacker.engine.management.periodic.db.PeriodicProcessesRepository.createPeriodicProcessDeployment
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.model.{PeriodicProcessDeployment, PeriodicProcessDeploymentStatus}
-import pl.touk.nussknacker.engine.management.periodic.service.ProcessConfigEnricher.EnrichedProcessConfig
-import pl.touk.nussknacker.engine.management.periodic.service.{
- AdditionalDeploymentDataProvider,
- DeployedEvent,
- FailedOnDeployEvent,
- FailedOnRunEvent,
- FinishedEvent,
- PeriodicProcessEvent,
- PeriodicProcessListener,
- ProcessConfigEnricher,
- ScheduledEvent
-}
import pl.touk.nussknacker.test.PatientScalaFutures
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.PeriodicProcessStatus
+import pl.touk.nussknacker.ui.process.periodic._
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository.createPeriodicProcessDeployment
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
import java.time.temporal.ChronoField
import java.time.{Clock, LocalDate, LocalDateTime}
@@ -63,22 +55,31 @@ class PeriodicProcessServiceTest
.source("start", "source")
.emptySink("end", "KafkaSink")
+ private val processVersion = ProcessVersion(
+ versionId = VersionId(1),
+ processName = processName,
+ processId = ProcessId(1),
+ labels = List.empty,
+ user = "testUser",
+ modelVersion = None,
+ )
+
class Fixture {
- val repository = new db.InMemPeriodicProcessesRepository(processingType = "testProcessingType")
- val delegateDeploymentManagerStub = new DeploymentManagerStub
- val jarManagerStub = new JarManagerStub
- val events = new ArrayBuffer[PeriodicProcessEvent]()
- val additionalData = Map("testMap" -> "testValue")
+ val repository = new InMemPeriodicProcessesRepository(processingType = "testProcessingType")
+ val delegateDeploymentManagerStub = new DeploymentManagerStub
+ val scheduledExecutionPerformerStub = new ScheduledExecutionPerformerStub
+ val events = new ArrayBuffer[ScheduledProcessEvent]()
+ val additionalData = Map("testMap" -> "testValue")
val actionService: ProcessingTypeActionServiceStub = new ProcessingTypeActionServiceStub
val periodicProcessService = new PeriodicProcessService(
delegateDeploymentManager = delegateDeploymentManagerStub,
- jarManager = jarManagerStub,
- scheduledProcessesRepository = repository,
- new PeriodicProcessListener {
+ scheduledExecutionPerformer = scheduledExecutionPerformerStub,
+ periodicProcessesRepository = repository,
+ new ScheduledProcessListener {
- override def onPeriodicProcessEvent: PartialFunction[PeriodicProcessEvent, Unit] = { case k =>
+ override def onScheduledProcessEvent: PartialFunction[ScheduledProcessEvent, Unit] = { case k =>
events.append(k)
}
@@ -86,9 +87,9 @@ class PeriodicProcessServiceTest
additionalDeploymentDataProvider = new AdditionalDeploymentDataProvider {
override def prepareAdditionalData(
- runDetails: PeriodicProcessDeployment[WithCanonicalProcess]
+ runDetails: ScheduledDeploymentDetails
): Map[String, String] =
- additionalData + ("runId" -> runDetails.id.value.toString)
+ additionalData + ("runId" -> runDetails.id.toString)
},
DeploymentRetryConfig(),
@@ -103,7 +104,7 @@ class PeriodicProcessServiceTest
EnrichedProcessConfig(
initialScheduleData.inputConfigDuringExecution.withValue(
"processName",
- ConfigValueFactory.fromAnyRef(initialScheduleData.canonicalProcess.name.value)
+ ConfigValueFactory.fromAnyRef(processName.value)
)
)
)
@@ -115,7 +116,7 @@ class PeriodicProcessServiceTest
Future.successful(
EnrichedProcessConfig(
deployData.inputConfigDuringExecution
- .withValue("runAt", ConfigValueFactory.fromAnyRef(deployData.deployment.runAt.toString))
+ .withValue("runAt", ConfigValueFactory.fromAnyRef(deployData.deploymentDetails.runAt.toString))
)
)
}
@@ -123,7 +124,7 @@ class PeriodicProcessServiceTest
},
Clock.systemDefaultZone(),
actionService,
- Map.empty
+ Map.empty,
)
}
@@ -185,7 +186,10 @@ class PeriodicProcessServiceTest
val finished :: scheduled :: Nil =
f.repository.deploymentEntities.map(createPeriodicProcessDeployment(processEntity, _)).toList
- f.events.toList shouldBe List(FinishedEvent(finished, None), ScheduledEvent(scheduled, firstSchedule = false))
+ f.events.toList shouldBe List(
+ FinishedEvent(finished.toDetails, canonicalProcess, None),
+ ScheduledEvent(scheduled.toDetails, firstSchedule = false)
+ )
}
// Flink job could not be available in Flink console if checked too quickly after submit.
@@ -238,8 +242,12 @@ class PeriodicProcessServiceTest
val finished :: scheduled :: Nil =
f.repository.deploymentEntities.map(createPeriodicProcessDeployment(processEntity, _)).toList
f.events.toList shouldBe List(
- FinishedEvent(finished, f.delegateDeploymentManagerStub.jobStatus.get(processName).flatMap(_.headOption)),
- ScheduledEvent(scheduled, firstSchedule = false)
+ FinishedEvent(
+ finished.toDetails,
+ canonicalProcess,
+ f.delegateDeploymentManagerStub.jobStatus.get(processName).flatMap(_.headOption)
+ ),
+ ScheduledEvent(scheduled.toDetails, firstSchedule = false)
)
}
@@ -280,9 +288,13 @@ class PeriodicProcessServiceTest
f.repository.deploymentEntities.loneElement.status shouldBe PeriodicProcessDeploymentStatus.Finished
// TODO: active should be false
val event =
- createPeriodicProcessDeployment(processEntity.copy(active = true), f.repository.deploymentEntities.loneElement)
+ createPeriodicProcessDeployment(
+ processEntity.copy(active = true),
+ f.repository.deploymentEntities.loneElement,
+ )
f.events.loneElement shouldBe FinishedEvent(
- event,
+ event.toDetails,
+ canonicalProcess,
f.delegateDeploymentManagerStub.jobStatus.get(processName).flatMap(_.headOption)
)
}
@@ -330,7 +342,7 @@ class PeriodicProcessServiceTest
val f = new Fixture
f.periodicProcessService
- .schedule(CronScheduleProperty("0 0 * * * ?"), ProcessVersion.empty, canonicalProcess, randomProcessActionId)
+ .schedule(CronScheduleProperty("0 0 * * * ?"), processVersion, canonicalProcess, randomProcessActionId)
.futureValue
val processEntity = f.repository.processEntities.loneElement
@@ -342,7 +354,10 @@ class PeriodicProcessServiceTest
deploymentEntity.status shouldBe PeriodicProcessDeploymentStatus.Scheduled
f.events.toList shouldBe List(
- ScheduledEvent(createPeriodicProcessDeployment(processEntity, deploymentEntity), firstSchedule = true)
+ ScheduledEvent(
+ createPeriodicProcessDeployment(processEntity, deploymentEntity).toDetails,
+ firstSchedule = true
+ )
)
}
@@ -358,10 +373,11 @@ class PeriodicProcessServiceTest
processEntity.active shouldBe true
f.repository.deploymentEntities.loneElement.status shouldBe PeriodicProcessDeploymentStatus.Failed
- val expectedDetails = createPeriodicProcessDeployment(processEntity, f.repository.deploymentEntities.head)
+ val expectedDetails =
+ createPeriodicProcessDeployment(processEntity, f.repository.deploymentEntities.head)
f.events.toList shouldBe List(
FailedOnRunEvent(
- expectedDetails,
+ expectedDetails.toDetails,
f.delegateDeploymentManagerStub.jobStatus.get(processName).flatMap(_.headOption)
)
)
@@ -372,7 +388,7 @@ class PeriodicProcessServiceTest
f.repository.addActiveProcess(processName, PeriodicProcessDeploymentStatus.Scheduled)
val toSchedule = createPeriodicProcessDeployment(
f.repository.processEntities.loneElement,
- f.repository.deploymentEntities.loneElement
+ f.repository.deploymentEntities.loneElement,
)
f.periodicProcessService.deploy(toSchedule).futureValue
@@ -380,21 +396,22 @@ class PeriodicProcessServiceTest
val deploymentEntity = f.repository.deploymentEntities.loneElement
deploymentEntity.status shouldBe PeriodicProcessDeploymentStatus.Deployed
ConfigFactory
- .parseString(f.jarManagerStub.lastDeploymentWithJarData.value.inputConfigDuringExecutionJson)
+ .parseString(f.scheduledExecutionPerformerStub.lastInputConfigDuringExecutionJson.value)
.getString("runAt") shouldBe deploymentEntity.runAt.toString
- val expectedDetails = createPeriodicProcessDeployment(f.repository.processEntities.loneElement, deploymentEntity)
- f.events.toList shouldBe List(DeployedEvent(expectedDetails, None))
+ val expectedDetails =
+ createPeriodicProcessDeployment(f.repository.processEntities.loneElement, deploymentEntity)
+ f.events.toList shouldBe List(DeployedEvent(expectedDetails.toDetails, None))
}
test("deploy - should handle failed deployment") {
val f = new Fixture
f.repository.addActiveProcess(processName, PeriodicProcessDeploymentStatus.Scheduled)
- f.jarManagerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
+ f.scheduledExecutionPerformerStub.deployWithJarFuture = Future.failed(new RuntimeException("Flink deploy error"))
val toSchedule = createPeriodicProcessDeployment(
f.repository.processEntities.loneElement,
- f.repository.deploymentEntities.loneElement
+ f.repository.deploymentEntities.loneElement,
)
f.periodicProcessService.deploy(toSchedule).futureValue
@@ -403,9 +420,9 @@ class PeriodicProcessServiceTest
val expectedDetails = createPeriodicProcessDeployment(
f.repository.processEntities.loneElement,
- f.repository.deploymentEntities.loneElement
+ f.repository.deploymentEntities.loneElement,
)
- f.events.toList shouldBe List(FailedOnDeployEvent(expectedDetails, None))
+ f.events.toList shouldBe List(FailedOnDeployEvent(expectedDetails.toDetails, None))
}
test("Schedule new scenario only if at least one date in the future") {
@@ -413,7 +430,7 @@ class PeriodicProcessServiceTest
def tryToSchedule(schedule: ScheduleProperty): Unit =
f.periodicProcessService
- .schedule(schedule, ProcessVersion.empty, canonicalProcess, randomProcessActionId)
+ .schedule(schedule, processVersion, canonicalProcess, randomProcessActionId)
.futureValue
tryToSchedule(cronInFuture) shouldBe (())
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManagerTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessStateDefinitionManagerTest.scala
similarity index 86%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManagerTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessStateDefinitionManagerTest.scala
index 77e551054a2..49f51d166d1 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessStateDefinitionManagerTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/PeriodicProcessStateDefinitionManagerTest.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
@@ -6,10 +6,11 @@ import pl.touk.nussknacker.engine.api.deployment.ProcessStateDefinitionManager.P
import pl.touk.nussknacker.engine.api.deployment.ScenarioActionName
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.VersionId
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessService.{DeploymentStatus, PeriodicProcessStatus}
-import pl.touk.nussknacker.engine.management.periodic.PeriodicProcessStateDefinitionManager.statusTooltip
-import pl.touk.nussknacker.engine.management.periodic.PeriodicStateStatus.ScheduledStatus
-import pl.touk.nussknacker.engine.management.periodic.model._
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessService.{PeriodicDeploymentStatus, PeriodicProcessStatus}
+import pl.touk.nussknacker.ui.process.periodic.PeriodicProcessStateDefinitionManager.statusTooltip
+import pl.touk.nussknacker.ui.process.periodic.PeriodicStateStatus
+import pl.touk.nussknacker.ui.process.periodic.PeriodicStateStatus.ScheduledStatus
+import pl.touk.nussknacker.ui.process.periodic.model._
import java.time.LocalDateTime
import java.util.concurrent.atomic.AtomicLong
@@ -29,7 +30,7 @@ class PeriodicProcessStateDefinitionManagerTest extends AnyFunSuite with Matcher
private val nextScheduleId = new AtomicLong()
test("display periodic deployment status for not named schedule") {
- val deploymentStatus = DeploymentStatus(
+ val deploymentStatus = PeriodicDeploymentStatus(
generateDeploymentId,
notNamedScheduleId,
fooCreatedAt,
@@ -44,7 +45,7 @@ class PeriodicProcessStateDefinitionManagerTest extends AnyFunSuite with Matcher
test("display sorted periodic deployment status for named schedules") {
val firstScheduleId = generateScheduleId
- val firstDeploymentStatus = DeploymentStatus(
+ val firstDeploymentStatus = PeriodicDeploymentStatus(
generateDeploymentId,
firstScheduleId,
fooCreatedAt.minusMinutes(1),
@@ -54,7 +55,7 @@ class PeriodicProcessStateDefinitionManagerTest extends AnyFunSuite with Matcher
None
)
val secScheduleId = generateScheduleId
- val secDeploymentStatus = DeploymentStatus(
+ val secDeploymentStatus = PeriodicDeploymentStatus(
generateDeploymentId,
secScheduleId,
fooCreatedAt,
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActorTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/RescheduleFinishedActorTest.scala
similarity index 91%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActorTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/RescheduleFinishedActorTest.scala
index 720364a78d4..aa05869e6ec 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/RescheduleFinishedActorTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/RescheduleFinishedActorTest.scala
@@ -1,10 +1,11 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestKitBase, TestProbe}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import org.scalatest.BeforeAndAfterAll
+import pl.touk.nussknacker.ui.process.periodic.RescheduleFinishedActor
import scala.concurrent.Future
import scala.concurrent.duration._
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerStub.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerStub.scala
new file mode 100644
index 00000000000..40a4d76ca41
--- /dev/null
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerStub.scala
@@ -0,0 +1,49 @@
+package pl.touk.nussknacker.ui.process.periodic.flink
+
+import com.typesafe.config.ConfigFactory
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.ScheduledExecutionPerformer
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
+import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
+
+import scala.concurrent.Future
+
+class ScheduledExecutionPerformerStub extends ScheduledExecutionPerformer {
+
+ var deployWithJarFuture: Future[Option[ExternalDeploymentId]] = Future.successful(None)
+ var lastDeploymentWithRuntimeParams: Option[DeploymentWithRuntimeParams] = None
+ var lastInputConfigDuringExecutionJson: Option[String] = None
+
+ override def prepareDeploymentWithRuntimeParams(
+ processVersion: ProcessVersion,
+ ): Future[DeploymentWithRuntimeParams] = {
+ Future.successful(
+ DeploymentWithRuntimeParams(
+ processId = Some(processVersion.processId),
+ processName = processVersion.processName,
+ versionId = processVersion.versionId,
+ runtimeParams = RuntimeParams(Map("jarFileName" -> ""))
+ )
+ )
+ }
+
+ override def provideInputConfigDuringExecutionJson(): Future[InputConfigDuringExecution] =
+ Future.successful(InputConfigDuringExecution(ConfigFactory.parseString("")))
+
+ override def deployWithRuntimeParams(
+ deploymentWithJarData: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ deploymentData: DeploymentData,
+ canonicalProcess: CanonicalProcess,
+ processVersion: ProcessVersion,
+ ): Future[Option[ExternalDeploymentId]] = {
+ lastDeploymentWithRuntimeParams = Some(deploymentWithJarData)
+ lastInputConfigDuringExecutionJson = Some(inputConfigDuringExecutionJson)
+ deployWithJarFuture
+ }
+
+ override def cleanAfterDeployment(runtimeParams: RuntimeParams): Future[Unit] = Future.successful(())
+
+}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerTest.scala
similarity index 53%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerTest.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerTest.scala
index b63433a8795..2dc0e14252d 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/ScheduledExecutionPerformerTest.scala
@@ -1,28 +1,26 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.ScheduledExecutionPerformer
import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
-import pl.touk.nussknacker.engine.api.{MetaData, ProcessVersion, StreamMetaData}
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.FlinkModelJarProvider
-import pl.touk.nussknacker.engine.management.periodic.flink.FlinkJarManager
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData
+import pl.touk.nussknacker.engine.management.{FlinkModelJarProvider, FlinkScheduledExecutionPerformer}
import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
import pl.touk.nussknacker.test.PatientScalaFutures
import java.nio.file.{Files, Path, Paths}
import scala.concurrent.Future
-class JarManagerTest extends AnyFunSuite with Matchers with ScalaFutures with PatientScalaFutures {
+class ScheduledExecutionPerformerTest extends AnyFunSuite with Matchers with ScalaFutures with PatientScalaFutures {
private val processName = "test"
private val processVersionId = 5
private val processVersion =
ProcessVersion.empty.copy(processName = ProcessName(processName), versionId = VersionId(processVersionId))
- private val process = CanonicalProcess(MetaData("foo", StreamMetaData()), Nil)
private val jarsDir = Files.createTempDirectory("jars-dir")
private val modelJarFileContent = "abc".getBytes
@@ -34,14 +32,14 @@ class JarManagerTest extends AnyFunSuite with Matchers with ScalaFutures with Pa
private val currentModelUrls = List(currentModelJarFile.toURI.toURL)
- private val jarManager = createJarManager(jarsDir = jarsDir)
+ private val scheduledExecutionPerformer = createScheduledExecutionPerformer(jarsDir = jarsDir)
- private def createJarManager(
+ private def createScheduledExecutionPerformer(
jarsDir: Path,
modelJarProvider: FlinkModelJarProvider = new FlinkModelJarProvider(currentModelUrls)
- ): JarManager = {
+ ): ScheduledExecutionPerformer = {
- new FlinkJarManager(
+ new FlinkScheduledExecutionPerformer(
flinkClient = new FlinkClientStub,
jarsDir = jarsDir,
inputConfigDuringExecution = InputConfigDuringExecution(ConfigFactory.empty()),
@@ -50,9 +48,9 @@ class JarManagerTest extends AnyFunSuite with Matchers with ScalaFutures with Pa
}
test("prepareDeploymentWithJar - should copy to local dir") {
- val result = jarManager.prepareDeploymentWithJar(processVersion, process)
+ val result = scheduledExecutionPerformer.prepareDeploymentWithRuntimeParams(processVersion)
- val copiedJarFileName = result.futureValue.jarFileName
+ val copiedJarFileName = result.futureValue.runtimeParams.params("jarFileName")
copiedJarFileName should fullyMatch regex s"^$processName-$processVersionId-\\d+\\.jar$$"
val copiedJarFile = jarsDir.resolve(copiedJarFileName)
Files.exists(copiedJarFile) shouldBe true
@@ -60,33 +58,33 @@ class JarManagerTest extends AnyFunSuite with Matchers with ScalaFutures with Pa
}
test("prepareDeploymentWithJar - should handle disappearing model JAR") {
- val modelJarProvider = new FlinkModelJarProvider(currentModelUrls)
- val jarManager = createJarManager(jarsDir, modelJarProvider)
+ val modelJarProvider = new FlinkModelJarProvider(currentModelUrls)
+ val scheduledExecutionPerformer = createScheduledExecutionPerformer(jarsDir, modelJarProvider)
- def verifyAndDeleteJar(result: Future[DeploymentWithJarData.WithCanonicalProcess]): Unit = {
- val copiedJarFile = jarsDir.resolve(result.futureValue.jarFileName)
+ def verifyAndDeleteJar(result: Future[DeploymentWithRuntimeParams]): Unit = {
+ val copiedJarFile = jarsDir.resolve(result.futureValue.runtimeParams.params("jarFileName"))
Files.exists(copiedJarFile) shouldBe true
Files.readAllBytes(copiedJarFile) shouldBe modelJarFileContent
Files.delete(copiedJarFile)
}
- verifyAndDeleteJar(jarManager.prepareDeploymentWithJar(processVersion, process))
+ verifyAndDeleteJar(scheduledExecutionPerformer.prepareDeploymentWithRuntimeParams(processVersion))
modelJarProvider.getJobJar().delete() shouldBe true
- verifyAndDeleteJar(jarManager.prepareDeploymentWithJar(processVersion, process))
+ verifyAndDeleteJar(scheduledExecutionPerformer.prepareDeploymentWithRuntimeParams(processVersion))
}
test("prepareDeploymentWithJar - should create jars dir if not exists") {
- val tmpDir = System.getProperty("java.io.tmpdir")
- val jarsDir = Paths.get(tmpDir, s"jars-dir-not-exists-${System.currentTimeMillis()}")
- val jarManager = createJarManager(jarsDir = jarsDir)
+ val tmpDir = System.getProperty("java.io.tmpdir")
+ val jarsDir = Paths.get(tmpDir, s"jars-dir-not-exists-${System.currentTimeMillis()}")
+ val scheduledExecutionPerformer = createScheduledExecutionPerformer(jarsDir = jarsDir)
Files.exists(jarsDir) shouldBe false
- val result = jarManager.prepareDeploymentWithJar(processVersion, process)
+ val result = scheduledExecutionPerformer.prepareDeploymentWithRuntimeParams(processVersion)
- val copiedJarFileName = result.futureValue.jarFileName
+ val copiedJarFileName = result.futureValue.runtimeParams.params("jarFileName")
Files.exists(jarsDir) shouldBe true
Files.exists(jarsDir.resolve(copiedJarFileName)) shouldBe true
}
@@ -96,13 +94,14 @@ class JarManagerTest extends AnyFunSuite with Matchers with ScalaFutures with Pa
val jarPath = jarsDir.resolve(jarFileName)
Files.copy(currentModelJarFile.toPath, jarPath)
- jarManager.deleteJar(jarFileName).futureValue
+ scheduledExecutionPerformer.cleanAfterDeployment(RuntimeParams(Map("jarFileName" -> jarFileName))).futureValue
Files.exists(jarPath) shouldBe false
}
test("deleteJar - should handle not existing file") {
- val result = jarManager.deleteJar("unknown.jar").futureValue
+ val result =
+ scheduledExecutionPerformer.cleanAfterDeployment(RuntimeParams(Map("jarFileName" -> "unknown.jar"))).futureValue
result shouldBe (())
}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/UtilsSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/UtilsSpec.scala
similarity index 94%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/UtilsSpec.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/UtilsSpec.scala
index a6d53da9c16..ace8ffe0fc7 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/UtilsSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/UtilsSpec.scala
@@ -1,4 +1,4 @@
-package pl.touk.nussknacker.engine.management.periodic
+package pl.touk.nussknacker.ui.process.periodic.flink
import akka.actor.{Actor, ActorSystem, Props}
import akka.testkit.TestKit
@@ -6,7 +6,8 @@ import com.typesafe.scalalogging.LazyLogging
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
-import pl.touk.nussknacker.engine.management.periodic.Utils.createActorWithRetry
+import pl.touk.nussknacker.ui.process.periodic.Utils
+import pl.touk.nussknacker.ui.process.periodic.Utils.createActorWithRetry
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/db/InMemPeriodicProcessesRepository.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/db/InMemPeriodicProcessesRepository.scala
similarity index 53%
rename from engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/db/InMemPeriodicProcessesRepository.scala
rename to designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/db/InMemPeriodicProcessesRepository.scala
index 2ebb103de28..16a14532615 100644
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/db/InMemPeriodicProcessesRepository.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/periodic/flink/db/InMemPeriodicProcessesRepository.scala
@@ -1,20 +1,18 @@
-package pl.touk.nussknacker.engine.management.periodic.db
+package pl.touk.nussknacker.ui.process.periodic.flink.db
-import cats.{Id, Monad}
import io.circe.syntax.EncoderOps
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
-import pl.touk.nussknacker.engine.api.process.{ProcessName, VersionId}
+import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.build.ScenarioBuilder
-import pl.touk.nussknacker.engine.management.periodic._
-import pl.touk.nussknacker.engine.management.periodic.db.InMemPeriodicProcessesRepository.{
- DeploymentIdSequence,
- ProcessIdSequence,
- getLatestDeploymentQueryCount
-}
-import pl.touk.nussknacker.engine.management.periodic.db.PeriodicProcessesRepository.createPeriodicProcessDeployment
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import pl.touk.nussknacker.engine.management.periodic.model._
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.ui.process.periodic._
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository._
+import pl.touk.nussknacker.ui.process.periodic.flink.db.InMemPeriodicProcessesRepository.getLatestDeploymentQueryCount
+import pl.touk.nussknacker.ui.process.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
+import pl.touk.nussknacker.ui.process.periodic.model._
+import pl.touk.nussknacker.ui.process.repository.PeriodicProcessesRepository
import java.time.chrono.ChronoLocalDateTime
import java.time.{LocalDateTime, ZoneId}
@@ -24,25 +22,23 @@ import scala.collection.mutable.ListBuffer
import scala.concurrent.Future
import scala.util.Random
-object InMemPeriodicProcessesRepository {
- private val ProcessIdSequence = new AtomicLong(0)
- private val DeploymentIdSequence = new AtomicLong(0)
-
- val getLatestDeploymentQueryCount = new AtomicLong(0)
-}
-
class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicProcessesRepository {
- var processEntities: mutable.ListBuffer[PeriodicProcessEntityWithJson] = ListBuffer.empty
- var deploymentEntities: mutable.ListBuffer[PeriodicProcessDeploymentEntity] = ListBuffer.empty
+ override type Action[T] = Future[T]
- private implicit val localDateOrdering: Ordering[LocalDateTime] = Ordering.by(identity[ChronoLocalDateTime[_]])
+ override def run[T](action: Future[T]): Future[T] = action
- override type Action[T] = Id[T]
+ var processEntities: mutable.ListBuffer[TestPeriodicProcessEntity] = ListBuffer.empty
+ var deploymentEntities: mutable.ListBuffer[TestPeriodicProcessDeploymentEntity] = ListBuffer.empty
- override implicit def monad: Monad[Id] = cats.catsInstancesForId
+ private def canonicalProcess(processName: ProcessName) = {
+ ScenarioBuilder
+ .streaming(processName.value)
+ .source("start", "source")
+ .emptySink("end", "KafkaSink")
+ }
- override def run[T](action: Id[T]): Future[T] = Future.successful(action)
+ private implicit val localDateOrdering: Ordering[LocalDateTime] = Ordering.by(identity[ChronoLocalDateTime[_]])
def addActiveProcess(
processName: ProcessName,
@@ -71,17 +67,14 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
processActionId: Option[ProcessActionId] = None
): PeriodicProcessId = {
val id = PeriodicProcessId(ProcessIdSequence.incrementAndGet())
- val entity = PeriodicProcessEntityWithJson(
+ val entity = TestPeriodicProcessEntity(
id = id,
+ processId = None,
processName = processName,
processVersionId = VersionId.initialVersionId,
processingType = processingType,
- processJson = ScenarioBuilder
- .streaming(processName.value)
- .source("start", "source")
- .emptySink("end", "KafkaSink"),
inputConfigDuringExecutionJson = "{}",
- jarFileName = "",
+ runtimeParams = RuntimeParams(Map("jarFileName" -> "")),
scheduleProperty = scheduleProperty.asJson.noSpaces,
active = true,
createdAt = LocalDateTime.now(),
@@ -100,7 +93,7 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
deployedAt: Option[LocalDateTime] = None,
): PeriodicProcessDeploymentId = {
val id = PeriodicProcessDeploymentId(DeploymentIdSequence.incrementAndGet())
- val entity = PeriodicProcessDeploymentEntity(
+ val entity = TestPeriodicProcessDeploymentEntity(
id = id,
periodicProcessId = periodicProcessId,
createdAt = LocalDateTime.now(),
@@ -119,46 +112,49 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
override def getSchedulesState(
scenarioName: ProcessName,
after: Option[LocalDateTime],
- ): Action[SchedulesState] = {
+ ): Future[SchedulesState] = Future.successful {
val filteredProcesses = processEntities.filter { pe =>
pe.processName == scenarioName && deploymentEntities.exists(d => d.periodicProcessId == pe.id)
}.toSeq
getLatestDeploymentsForPeriodicProcesses(filteredProcesses, deploymentsPerScheduleMaxCount = Int.MaxValue)
}
- override def markInactive(processId: PeriodicProcessId): Unit =
+ override def markInactive(processId: PeriodicProcessId): Future[Unit] = Future.successful {
processEntities.zipWithIndex
.find { case (process, _) => process.id == processId }
.foreach { case (process, index) =>
processEntities.update(index, process.copy(active = false))
}
+ }
override def create(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
+ deploymentWithRuntimeParams: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ canonicalProcess: CanonicalProcess,
scheduleProperty: ScheduleProperty,
processActionId: ProcessActionId,
- ): PeriodicProcess[WithCanonicalProcess] = {
+ ): Future[PeriodicProcess] = Future.successful {
val id = PeriodicProcessId(Random.nextLong())
- val periodicProcess = PeriodicProcessEntityWithJson(
+ val periodicProcess = TestPeriodicProcessEntity(
id = id,
- processName = deploymentWithJarData.processVersion.processName,
- processVersionId = deploymentWithJarData.processVersion.versionId,
+ processId = deploymentWithRuntimeParams.processId,
+ processName = deploymentWithRuntimeParams.processName,
+ processVersionId = deploymentWithRuntimeParams.versionId,
processingType = processingType,
- processJson = deploymentWithJarData.process,
- inputConfigDuringExecutionJson = deploymentWithJarData.inputConfigDuringExecutionJson,
- jarFileName = deploymentWithJarData.jarFileName,
+ inputConfigDuringExecutionJson = inputConfigDuringExecutionJson,
+ runtimeParams = deploymentWithRuntimeParams.runtimeParams,
scheduleProperty = scheduleProperty.asJson.noSpaces,
active = true,
createdAt = LocalDateTime.now(),
processActionId = Some(processActionId)
)
processEntities += periodicProcess
- PeriodicProcessesRepository.createPeriodicProcessWithJson(periodicProcess)
+ createPeriodicProcessWithJson(periodicProcess)
}
override def findActiveSchedulesForProcessesHavingDeploymentWithMatchingStatus(
- expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus]
- ): Action[SchedulesState] = {
+ expectedDeploymentStatuses: Set[PeriodicProcessDeploymentStatus],
+ ): Future[SchedulesState] = Future.successful {
val filteredProcesses = processEntities.filter { pe =>
pe.processingType == processingType &&
deploymentEntities.exists(d => d.periodicProcessId == pe.id && expectedDeploymentStatuses.contains(d.status))
@@ -168,8 +164,8 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
override def getLatestDeploymentsForActiveSchedules(
processName: ProcessName,
- deploymentsPerScheduleMaxCount: Int
- ): Action[SchedulesState] = {
+ deploymentsPerScheduleMaxCount: Int,
+ ): Future[SchedulesState] = Future.successful {
getLatestDeploymentQueryCount.incrementAndGet()
getLatestDeploymentsForPeriodicProcesses(
processEntities(processName).filter(_.active),
@@ -179,7 +175,7 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
override def getLatestDeploymentsForActiveSchedules(
deploymentsPerScheduleMaxCount: Int
- ): Action[Map[ProcessName, SchedulesState]] = {
+ ): Future[Map[ProcessName, SchedulesState]] = Future.successful {
getLatestDeploymentQueryCount.incrementAndGet()
allProcessEntities.map { case (processName, list) =>
processName -> getLatestDeploymentsForPeriodicProcesses(
@@ -192,8 +188,8 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
override def getLatestDeploymentsForLatestInactiveSchedules(
processName: ProcessName,
inactiveProcessesMaxCount: Int,
- deploymentsPerScheduleMaxCount: Int
- ): Action[SchedulesState] = {
+ deploymentsPerScheduleMaxCount: Int,
+ ): Future[SchedulesState] = Future.successful {
getLatestDeploymentQueryCount.incrementAndGet()
val filteredProcesses =
processEntities(processName).filterNot(_.active).sortBy(_.createdAt).takeRight(inactiveProcessesMaxCount)
@@ -203,7 +199,7 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
override def getLatestDeploymentsForLatestInactiveSchedules(
inactiveProcessesMaxCount: Int,
deploymentsPerScheduleMaxCount: Int
- ): Action[Map[ProcessName, SchedulesState]] = {
+ ): Future[Map[ProcessName, SchedulesState]] = Future.successful {
getLatestDeploymentQueryCount.incrementAndGet()
allProcessEntities.map { case (processName, list) =>
processName -> getLatestDeploymentsForPeriodicProcesses(
@@ -214,9 +210,9 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
}
private def getLatestDeploymentsForPeriodicProcesses(
- processes: Seq[PeriodicProcessEntity],
+ processes: Seq[TestPeriodicProcessEntity],
deploymentsPerScheduleMaxCount: Int
- ): SchedulesState =
+ ): SchedulesState = {
SchedulesState((for {
process <- processes
deploymentGroupedByScheduleName <- deploymentEntities
@@ -227,49 +223,48 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
val ds = deployments
.sortBy(d => -d.runAt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli)
.take(deploymentsPerScheduleMaxCount)
- .map(ScheduleDeploymentData(_))
+ .map(scheduleDeploymentData(_))
.toList
- scheduleId -> ScheduleData(PeriodicProcessesRepository.createPeriodicProcessWithoutJson(process), ds)
+ scheduleId -> ScheduleData(createPeriodicProcessWithoutJson(process), ds)
}
} yield deploymentGroupedByScheduleName).toMap)
+ }
- override def findToBeDeployed: Seq[PeriodicProcessDeployment[WithCanonicalProcess]] = {
+ override def findToBeDeployed: Future[Seq[PeriodicProcessDeployment]] = {
val scheduled = findActive(PeriodicProcessDeploymentStatus.Scheduled)
readyToRun(scheduled)
}
- override def findToBeRetried: Action[Seq[PeriodicProcessDeployment[WithCanonicalProcess]]] = {
+ override def findToBeRetried: Future[Seq[PeriodicProcessDeployment]] = {
val toBeRetried = findActive(PeriodicProcessDeploymentStatus.FailedOnDeploy).filter(_.retriesLeft > 0)
readyToRun(toBeRetried)
}
- override def findProcessData(id: PeriodicProcessDeploymentId): PeriodicProcessDeployment[WithCanonicalProcess] =
+ override def findProcessData(
+ id: PeriodicProcessDeploymentId,
+ ): Future[PeriodicProcessDeployment] = Future.successful {
(for {
d <- deploymentEntities if d.id == id
p <- processEntities if p.id == d.periodicProcessId
} yield createPeriodicProcessDeployment(p, d)).head
+ }
- override def findProcessData(processName: ProcessName): Seq[PeriodicProcess[WithCanonicalProcess]] =
- processEntities(processName)
- .filter(_.active)
- .map(PeriodicProcessesRepository.createPeriodicProcessWithJson)
-
- private def allProcessEntities: Map[ProcessName, Seq[PeriodicProcessEntity]] =
+ private def processEntities(processName: ProcessName): Seq[TestPeriodicProcessEntity] =
processEntities
- .filter(process => process.processingType == processingType)
+ .filter(process => process.processName == processName && process.processingType == processingType)
.toSeq
- .groupBy(_.processName)
- private def processEntities(processName: ProcessName): Seq[PeriodicProcessEntityWithJson] =
+ private def allProcessEntities: Map[ProcessName, Seq[TestPeriodicProcessEntity]] =
processEntities
- .filter(process => process.processName == processName && process.processingType == processingType)
+ .filter(process => process.processingType == processingType)
.toSeq
+ .groupBy(_.processName)
- override def markDeployed(id: PeriodicProcessDeploymentId): Unit = {
+ override def markDeployed(id: PeriodicProcessDeploymentId): Future[Unit] = Future.successful {
update(id)(_.copy(status = PeriodicProcessDeploymentStatus.Deployed, deployedAt = Some(LocalDateTime.now())))
}
- override def markFinished(id: PeriodicProcessDeploymentId): Unit = {
+ override def markFinished(id: PeriodicProcessDeploymentId): Future[Unit] = Future.successful {
update(id)(_.copy(status = PeriodicProcessDeploymentStatus.Finished, completedAt = Some(LocalDateTime.now())))
}
@@ -278,7 +273,7 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
status: PeriodicProcessDeploymentStatus,
deployRetries: Int,
retryAt: Option[LocalDateTime]
- ): Action[Unit] = {
+ ): Future[Unit] = Future.successful {
update(id)(
_.copy(
status = status,
@@ -289,7 +284,7 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
)
}
- override def markFailed(id: PeriodicProcessDeploymentId): Unit = {
+ override def markFailed(id: PeriodicProcessDeploymentId): Future[Unit] = Future.successful {
update(id)(
_.copy(
status = PeriodicProcessDeploymentStatus.Failed,
@@ -302,9 +297,9 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
id: PeriodicProcessId,
scheduleName: ScheduleName,
runAt: LocalDateTime,
- deployMaxRetries: Int
- ): PeriodicProcessDeployment[WithCanonicalProcess] = {
- val deploymentEntity = PeriodicProcessDeploymentEntity(
+ deployMaxRetries: Int,
+ ): Future[PeriodicProcessDeployment] = Future.successful {
+ val deploymentEntity = TestPeriodicProcessDeploymentEntity(
id = PeriodicProcessDeploymentId(Random.nextLong()),
periodicProcessId = id,
createdAt = LocalDateTime.now(),
@@ -317,12 +312,13 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
status = PeriodicProcessDeploymentStatus.Scheduled
)
deploymentEntities += deploymentEntity
- createPeriodicProcessDeployment(processEntities.find(_.id == id).head, deploymentEntity)
+ val processEntity = processEntities.find(_.id == id).head
+ createPeriodicProcessDeployment(processEntity, deploymentEntity)
}
private def update(
id: PeriodicProcessDeploymentId
- )(action: PeriodicProcessDeploymentEntity => PeriodicProcessDeploymentEntity): Unit = {
+ )(action: TestPeriodicProcessDeploymentEntity => TestPeriodicProcessDeploymentEntity): Unit = {
deploymentEntities.zipWithIndex
.find { case (deployment, _) => deployment.id == id }
.foreach { case (deployment, index) =>
@@ -330,26 +326,157 @@ class InMemPeriodicProcessesRepository(processingType: String) extends PeriodicP
}
}
- private def findActive(
- status: PeriodicProcessDeploymentStatus
- ): Seq[PeriodicProcessDeployment[WithCanonicalProcess]] =
+ private def findActive(status: PeriodicProcessDeploymentStatus): Seq[PeriodicProcessDeployment] =
findActive(
Seq(status)
)
private def findActive(
statusList: Seq[PeriodicProcessDeploymentStatus]
- ): Seq[PeriodicProcessDeployment[WithCanonicalProcess]] =
+ ): Seq[PeriodicProcessDeployment] =
(for {
p <- processEntities if p.active && p.processingType == processingType
d <- deploymentEntities if d.periodicProcessId == p.id && statusList.contains(d.status)
} yield createPeriodicProcessDeployment(p, d)).toSeq
private def readyToRun(
- deployments: Seq[PeriodicProcessDeployment[WithCanonicalProcess]]
- ): Seq[PeriodicProcessDeployment[WithCanonicalProcess]] = {
+ deployments: Seq[PeriodicProcessDeployment]
+ ): Future[Seq[PeriodicProcessDeployment]] = {
val now = LocalDateTime.now()
- deployments.filter(d => d.runAt.isBefore(now) || d.runAt.isEqual(now))
+ Future.successful(deployments.filter(d => d.runAt.isBefore(now) || d.runAt.isEqual(now)))
+ }
+
+ override def fetchCanonicalProcessWithVersion(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Future[Option[(CanonicalProcess, ProcessVersion)]] = Future.successful {
+ Some(canonicalProcess(processName), ProcessVersion.empty)
+ }
+
+ override def fetchInputConfigDuringExecutionJson(
+ processName: ProcessName,
+ versionId: VersionId
+ ): Future[Option[String]] =
+ Future.successful(Some("{}"))
+
+}
+
+object InMemPeriodicProcessesRepository {
+
+ val getLatestDeploymentQueryCount = new AtomicLong(0)
+
+ private val ProcessIdSequence = new AtomicLong(0)
+ private val DeploymentIdSequence = new AtomicLong(0)
+
+ final case class TestPeriodicProcessEntity(
+ id: PeriodicProcessId,
+ processId: Option[ProcessId],
+ processName: ProcessName,
+ processVersionId: VersionId,
+ processingType: String,
+ inputConfigDuringExecutionJson: String,
+ runtimeParams: RuntimeParams,
+ scheduleProperty: String,
+ active: Boolean,
+ createdAt: LocalDateTime,
+ processActionId: Option[ProcessActionId]
+ )
+
+ case class TestPeriodicProcessDeploymentEntity(
+ id: PeriodicProcessDeploymentId,
+ periodicProcessId: PeriodicProcessId,
+ createdAt: LocalDateTime,
+ runAt: LocalDateTime,
+ scheduleName: Option[String],
+ deployedAt: Option[LocalDateTime],
+ completedAt: Option[LocalDateTime],
+ retriesLeft: Int,
+ nextRetryAt: Option[LocalDateTime],
+ status: PeriodicProcessDeploymentStatus
+ )
+
+ def createPeriodicProcessDeployment(
+ processEntity: TestPeriodicProcessEntity,
+ processDeploymentEntity: TestPeriodicProcessDeploymentEntity
+ ): PeriodicProcessDeployment = {
+ val process = createPeriodicProcessWithJson(processEntity)
+ PeriodicProcessDeployment(
+ processDeploymentEntity.id,
+ process,
+ processDeploymentEntity.createdAt,
+ processDeploymentEntity.runAt,
+ ScheduleName(processDeploymentEntity.scheduleName),
+ processDeploymentEntity.retriesLeft,
+ processDeploymentEntity.nextRetryAt,
+ createPeriodicDeploymentState(processDeploymentEntity)
+ )
+ }
+
+ def createPeriodicDeploymentState(
+ processDeploymentEntity: TestPeriodicProcessDeploymentEntity
+ ): PeriodicProcessDeploymentState = {
+ PeriodicProcessDeploymentState(
+ processDeploymentEntity.deployedAt,
+ processDeploymentEntity.completedAt,
+ processDeploymentEntity.status
+ )
+ }
+
+ def createPeriodicProcessWithJson(
+ processEntity: TestPeriodicProcessEntity
+ ): PeriodicProcess = {
+ val scheduleProperty = prepareScheduleProperty(processEntity)
+ PeriodicProcess(
+ processEntity.id,
+ DeploymentWithRuntimeParams(
+ processId = processEntity.processId,
+ processName = processEntity.processName,
+ versionId = processEntity.processVersionId,
+ runtimeParams = processEntity.runtimeParams,
+ ),
+ scheduleProperty,
+ processEntity.active,
+ processEntity.createdAt,
+ processEntity.processActionId
+ )
+ }
+
+ def createPeriodicProcessWithoutJson(
+ processEntity: TestPeriodicProcessEntity
+ ): PeriodicProcess = {
+ val scheduleProperty = prepareScheduleProperty(processEntity)
+ PeriodicProcess(
+ processEntity.id,
+ DeploymentWithRuntimeParams(
+ processId = processEntity.processId,
+ processName = processEntity.processName,
+ versionId = processEntity.processVersionId,
+ runtimeParams = processEntity.runtimeParams,
+ ),
+ scheduleProperty,
+ processEntity.active,
+ processEntity.createdAt,
+ processEntity.processActionId
+ )
+ }
+
+ private def prepareScheduleProperty(processEntity: TestPeriodicProcessEntity) = {
+ val scheduleProperty = io.circe.parser
+ .decode[ScheduleProperty](processEntity.scheduleProperty)
+ .fold(e => throw new IllegalArgumentException(e), identity)
+ scheduleProperty
+ }
+
+ private def scheduleDeploymentData(deployment: TestPeriodicProcessDeploymentEntity): ScheduleDeploymentData = {
+ ScheduleDeploymentData(
+ deployment.id,
+ deployment.createdAt,
+ deployment.runAt,
+ deployment.deployedAt,
+ deployment.retriesLeft,
+ deployment.nextRetryAt,
+ createPeriodicDeploymentState(deployment)
+ )
}
}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
index f4a8e88d82d..e76cb1f6f9a 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataProviderSpec.scala
@@ -10,6 +10,7 @@ import pl.touk.nussknacker.engine.testing.{DeploymentManagerProviderStub, LocalM
import pl.touk.nussknacker.security.Permission
import pl.touk.nussknacker.test.utils.domain.TestFactory
import pl.touk.nussknacker.ui.UnauthorizedError
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.LocalProcessingTypeDataLoader
import pl.touk.nussknacker.ui.process.processingtype.provider.ProcessingTypeDataProvider
import pl.touk.nussknacker.ui.security.api.RealLoggedUser
@@ -57,7 +58,9 @@ class ProcessingTypeDataProviderSpec extends AnyFunSuite with Matchers {
loader
.loadProcessingTypeData(
_ => modelDependencies,
- _ => TestFactory.deploymentManagerDependencies
+ _ => TestFactory.deploymentManagerDependencies,
+ ModelClassLoaderProvider(allProcessingTypes.map(_ -> ModelClassLoaderDependencies(List.empty, None)).toMap),
+ dbRef = None,
)
.unsafeRunSync()
}
diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
index fe079bdeec1..4b5deae86c6 100644
--- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
+++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/processingtype/ScenarioParametersServiceTest.scala
@@ -14,11 +14,13 @@ import pl.touk.nussknacker.engine.api.component.{ComponentProvider, DesignerWide
import pl.touk.nussknacker.engine.api.process.ProcessingType
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.deployment.EngineSetupName
+import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap
import pl.touk.nussknacker.restmodel.scenariodetails.ScenarioParameters
import pl.touk.nussknacker.security.Permission
import pl.touk.nussknacker.test.ValidatedValuesDetailedMessage
import pl.touk.nussknacker.test.utils.domain.TestFactory
import pl.touk.nussknacker.ui.config.DesignerConfig
+import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeData.SchedulingForProcessingType
import pl.touk.nussknacker.ui.process.processingtype.loader.ProcessingTypesConfigBasedProcessingTypeDataLoader
import pl.touk.nussknacker.ui.security.api.{LoggedUser, RealLoggedUser}
@@ -296,6 +298,12 @@ class ScenarioParametersServiceTest
ComponentDefinitionExtractionMode.FinalDefinition
),
_ => TestFactory.deploymentManagerDependencies,
+ ModelClassLoaderProvider(
+ designerConfig.processingTypeConfigs.configByProcessingType.mapValuesNow(conf =>
+ ModelClassLoaderDependencies(conf.classPath, None)
+ )
+ ),
+ dbRef = None,
)
.unsafeRunSync()
val parametersService = processingTypeData.getCombined().parametersService
diff --git a/docs/Changelog.md b/docs/Changelog.md
index ef7c6979fc8..c008dee443c 100644
--- a/docs/Changelog.md
+++ b/docs/Changelog.md
@@ -55,6 +55,11 @@
* [#7446](https://github.com/TouK/nussknacker/pull/7446) Small changes regarding node errors in fragments used in scenarios:
* Fragment error node tips in scenarios are now clickable and open problematic node edit window in a new tab.
* Fragment nodes are now highlighted when they contain nodes with errors.
+* [#7364](https://github.com/TouK/nussknacker/pull/7364) PeriodicDeploymentManger is no longer a separate DM, but instead is an optional functionality and decorator for all DMs
+ * in order to use it, DM must implement interface `schedulingSupported`, that handles deployments on a specific engine
+ * implementation provided for Flink DM
+* [#7443](https://github.com/TouK/nussknacker/pull/7443) Indexing on record is more similar to indexing on map. The change lets us access record values dynamically. For example now spel expression "{a: 5, b: 10}[#input.field]" compiles and has type "Integer" inferred from types of values of the record. This lets us access record value based on user input, for instance if user passes "{"field": "b"}" to scenario we will get value "10", whereas input {"field": "c"} would result in "null". Expression "{a: 5}["b"]" still does not compile because it is known at compile time that record does not have property "b".
+* [#7324](https://github.com/TouK/nussknacker/pull/7324) Fix: Passing Flink Job Global Params
## 1.18
diff --git a/docs/MigrationGuide.md b/docs/MigrationGuide.md
index fd2975f560e..43052d7b8f0 100644
--- a/docs/MigrationGuide.md
+++ b/docs/MigrationGuide.md
@@ -40,9 +40,43 @@ To see the biggest differences please consult the [changelog](Changelog.md).
* [#7379](https://github.com/TouK/nussknacker/pull/7379) Removed CustomAction mechanism.
If there were any custom actions defined in some custom DeploymentManager implementation,
they should be modified to use the predefined set of actions or otherwise replaced by custom links and handled outside Nussknacker.
+* [#7364](https://github.com/TouK/nussknacker/pull/7364)
+ * the PeriodicDeploymentManager is no longer a separate DM type
+ * in `scenarioTypes` config section, the `deploymentConfig` of a periodic scenario type (only Flink was supported so far) may have looked like that:
+ ```hocon
+ deploymentConfig: {
+ type: "flinkPeriodic"
+ restUrl: "http://jobmanager:8081"
+ shouldVerifyBeforeDeploy: true
+ deploymentManager {
+ db: { },
+ processingType: streaming,
+ jarsDir: ./storage/jars
+ }
+ }
+ ```
+ * changes:
+ * the `type: "flinkPeriodic"` is no longer supported, instead `type: "flinkStreaming"` with additional setting `supportsPeriodicExecution: true` should be used
+ * the db config is now optional - the periodic DM may still use its custom datasource defined here in `legacyDb` section
+ * when custom `db` section not defined here, then main Nussknacker db will be used
+ * config after changes may look like that:
+ ```hocon
+ deploymentConfig: {
+ type: "flinkStreaming"
+ scheduling {
+ enabled: true
+ processingType: streaming,
+ jarsDir: ./storage/jars
+ legacyDb: { },
+ }
+ restUrl: "http://jobmanager:8081"
+ shouldVerifyBeforeDeploy: true
+ }
+ ```
### Code API changes
* [#7368](https://github.com/TouK/nussknacker/pull/7368) Renamed `PeriodicSourceFactory` to `SampleGeneratorSourceFactory`
+* [#7364](https://github.com/TouK/nussknacker/pull/7364) The DeploymentManager must implement `def schedulingSupport: SchedulingSupport`. If support not added, then `NoSchedulingSupport` should be used.
## In version 1.18.0
diff --git a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala
index e7fe3ac2173..bcfd4f47e8c 100644
--- a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala
+++ b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala
@@ -196,6 +196,8 @@ class DevelopmentDeploymentManager(actorSystem: ActorSystem, modelData: BaseMode
override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
}
class DevelopmentDeploymentManagerProvider extends DeploymentManagerProvider {
diff --git a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala
index 7110bd056df..2ac56d59912 100644
--- a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala
+++ b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala
@@ -120,6 +120,8 @@ object MockableDeploymentManagerProvider {
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
+
override def managerSpecificScenarioActivities(
processIdWithName: ProcessIdWithName,
after: Option[Instant],
diff --git a/engine/flink/components-api/src/main/scala/pl/touk/nussknacker/engine/flink/api/NkGlobalParameters.scala b/engine/flink/components-api/src/main/scala/pl/touk/nussknacker/engine/flink/api/NkGlobalParameters.scala
index ddd7fbc38e2..ac9e0bc9960 100644
--- a/engine/flink/components-api/src/main/scala/pl/touk/nussknacker/engine/flink/api/NkGlobalParameters.scala
+++ b/engine/flink/components-api/src/main/scala/pl/touk/nussknacker/engine/flink/api/NkGlobalParameters.scala
@@ -17,6 +17,7 @@ import scala.jdk.CollectionConverters._
//Also, those configuration properties will be exposed via Flink REST API/webconsole
case class NkGlobalParameters(
buildInfo: String,
+ deploymentId: String, // TODO: Pass here DeploymentId?
processVersion: ProcessVersion,
configParameters: Option[ConfigGlobalParameters],
namespaceParameters: Option[NamespaceMetricsTags],
@@ -63,13 +64,21 @@ object NkGlobalParameters {
def create(
buildInfo: String,
+ deploymentId: String, // TODO: Pass here DeploymentId?
processVersion: ProcessVersion,
modelConfig: Config,
namespaceTags: Option[NamespaceMetricsTags],
additionalInformation: Map[String, String]
): NkGlobalParameters = {
val configGlobalParameters = modelConfig.getAs[ConfigGlobalParameters]("globalParameters")
- NkGlobalParameters(buildInfo, processVersion, configGlobalParameters, namespaceTags, additionalInformation)
+ NkGlobalParameters(
+ buildInfo,
+ deploymentId,
+ processVersion,
+ configGlobalParameters,
+ namespaceTags,
+ additionalInformation
+ )
}
def fromMap(jobParameters: java.util.Map[String, String]): Option[NkGlobalParameters] =
@@ -79,11 +88,12 @@ object NkGlobalParameters {
def encode(parameters: NkGlobalParameters): Map[String, String] = {
def encodeWithKeyPrefix(map: Map[String, String], prefix: String): Map[String, String] = {
- map.map { case (key, value) => s"$prefix$key" -> value }
+ map.map { case (key, value) => s"$prefix.$key" -> value }
}
val baseProperties = Map[String, String](
"buildInfo" -> parameters.buildInfo,
+ "deploymentId" -> parameters.deploymentId,
"versionId" -> parameters.processVersion.versionId.value.toString,
"processId" -> parameters.processVersion.processId.value.toString,
"modelVersion" -> parameters.processVersion.modelVersion.map(_.toString).orNull,
@@ -95,9 +105,11 @@ object NkGlobalParameters {
val configMap = parameters.configParameters
.map(ConfigGlobalParametersToMapEncoder.encode)
.getOrElse(Map.empty)
+
val namespaceTagsMap = parameters.namespaceParameters
.map(p => encodeWithKeyPrefix(p.tags, namespaceTagsMapPrefix))
.getOrElse(Map.empty)
+
val additionalInformationMap =
encodeWithKeyPrefix(parameters.additionalInformation, additionalInformationMapPrefix)
@@ -107,8 +119,8 @@ object NkGlobalParameters {
def decode(map: Map[String, String]): Option[NkGlobalParameters] = {
def decodeWithKeyPrefix(map: Map[String, String], prefix: String): Map[String, String] = {
map.view
- .filter { case (key, _) => key.startsWith(prefix) }
- .map { case (key, value) => key.stripPrefix(prefix) -> value }
+ .filter { case (key, _) => key.startsWith(s"$prefix.") }
+ .map { case (key, value) => key.stripPrefix(s"$prefix.") -> value }
.toMap
}
@@ -134,7 +146,15 @@ object NkGlobalParameters {
for {
processVersion <- processVersionOpt
buildInfo <- buildInfoOpt
- } yield NkGlobalParameters(buildInfo, processVersion, configParameters, namespaceTags, additionalInformation)
+ deploymentId <- map.get("deploymentId")
+ } yield NkGlobalParameters(
+ buildInfo,
+ deploymentId,
+ processVersion,
+ configParameters,
+ namespaceTags,
+ additionalInformation
+ )
}
private object ConfigGlobalParametersToMapEncoder {
diff --git a/engine/flink/components/base-tests/src/test/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/AggregatesSpec.scala b/engine/flink/components/base-tests/src/test/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/AggregatesSpec.scala
index c4243070a9d..d875ce51b21 100644
--- a/engine/flink/components/base-tests/src/test/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/AggregatesSpec.scala
+++ b/engine/flink/components/base-tests/src/test/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/AggregatesSpec.scala
@@ -116,7 +116,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for average aggregator on BigInt") {
val agg = AverageAggregator
- addElementsAndComputeResult(List(new BigInteger("7"), new BigInteger("8")), agg) shouldEqual new java.math.BigDecimal("7.5")
+ addElementsAndComputeResult(
+ List(new BigInteger("7"), new BigInteger("8")),
+ agg
+ ) shouldEqual new java.math.BigDecimal("7.5")
}
test("should calculate correct results for average aggregator on float") {
@@ -133,20 +136,22 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
}
test("some aggregators should produce null on single null input") {
- forAll (Table(
- "aggregator",
- AverageAggregator,
- SampleStandardDeviationAggregator,
- PopulationStandardDeviationAggregator,
- SampleVarianceAggregator,
- PopulationVarianceAggregator,
- MaxAggregator,
- MinAggregator,
- FirstAggregator,
- LastAggregator,
- SumAggregator,
- MedianAggregator
- )) { agg =>
+ forAll(
+ Table(
+ "aggregator",
+ AverageAggregator,
+ SampleStandardDeviationAggregator,
+ PopulationStandardDeviationAggregator,
+ SampleVarianceAggregator,
+ PopulationVarianceAggregator,
+ MaxAggregator,
+ MinAggregator,
+ FirstAggregator,
+ LastAggregator,
+ SumAggregator,
+ MedianAggregator
+ )
+ ) { agg =>
addElementsAndComputeResult(List(null), agg) shouldEqual null
}
}
@@ -154,10 +159,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for standard deviation and variance on doubles") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, Math.sqrt(2.5) ),
- ( PopulationStandardDeviationAggregator, Math.sqrt(2) ),
- ( SampleVarianceAggregator, 2.5 ),
- ( PopulationVarianceAggregator, 2.0 )
+ (SampleStandardDeviationAggregator, Math.sqrt(2.5)),
+ (PopulationStandardDeviationAggregator, Math.sqrt(2)),
+ (SampleVarianceAggregator, 2.5),
+ (PopulationVarianceAggregator, 2.0)
)
forAll(table) { (agg, expectedResult) =>
@@ -182,7 +187,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for median aggregator on BigInt") {
val agg = MedianAggregator
- addElementsAndComputeResult(List(new BigInteger("7"), new BigInteger("8")), agg) shouldEqual new java.math.BigDecimal("7.5")
+ addElementsAndComputeResult(
+ List(new BigInteger("7"), new BigInteger("8")),
+ agg
+ ) shouldEqual new java.math.BigDecimal("7.5")
}
test("should calculate correct results for median aggregator on floats") {
@@ -225,10 +233,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for standard deviation and variance on integers") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, Math.sqrt(2.5) ),
- ( PopulationStandardDeviationAggregator, Math.sqrt(2) ),
- ( SampleVarianceAggregator, 2.5 ),
- ( PopulationVarianceAggregator, 2.0 )
+ (SampleStandardDeviationAggregator, Math.sqrt(2.5)),
+ (PopulationStandardDeviationAggregator, Math.sqrt(2)),
+ (SampleVarianceAggregator, 2.5),
+ (PopulationVarianceAggregator, 2.0)
)
forAll(table) { (agg, expectedResult) =>
@@ -240,10 +248,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for standard deviation and variance on BigInt") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, BigDecimal(Math.sqrt(2.5)) ),
- ( PopulationStandardDeviationAggregator, BigDecimal(Math.sqrt(2)) ),
- ( SampleVarianceAggregator, BigDecimal(2.5) ),
- ( PopulationVarianceAggregator, BigDecimal(2.0) )
+ (SampleStandardDeviationAggregator, BigDecimal(Math.sqrt(2.5))),
+ (PopulationStandardDeviationAggregator, BigDecimal(Math.sqrt(2))),
+ (SampleVarianceAggregator, BigDecimal(2.5)),
+ (PopulationVarianceAggregator, BigDecimal(2.0))
)
forAll(table) { (agg, expectedResult) =>
@@ -258,10 +266,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for standard deviation and variance on float") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, Math.sqrt(2.5) ),
- ( PopulationStandardDeviationAggregator, Math.sqrt(2) ),
- ( SampleVarianceAggregator, 2.5 ),
- ( PopulationVarianceAggregator, 2.0 )
+ (SampleStandardDeviationAggregator, Math.sqrt(2.5)),
+ (PopulationStandardDeviationAggregator, Math.sqrt(2)),
+ (SampleVarianceAggregator, 2.5),
+ (PopulationVarianceAggregator, 2.0)
)
forAll(table) { (agg, expectedResult) =>
@@ -273,10 +281,10 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("should calculate correct results for standard deviation and variance on BigDecimals") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, BigDecimal(Math.sqrt(2.5)) ),
- ( PopulationStandardDeviationAggregator, BigDecimal(Math.sqrt(2)) ),
- ( SampleVarianceAggregator, BigDecimal(2.5) ),
- ( PopulationVarianceAggregator, BigDecimal(2.0) )
+ (SampleStandardDeviationAggregator, BigDecimal(Math.sqrt(2.5))),
+ (PopulationStandardDeviationAggregator, BigDecimal(Math.sqrt(2))),
+ (SampleVarianceAggregator, BigDecimal(2.5)),
+ (PopulationVarianceAggregator, BigDecimal(2.0))
)
forAll(table) { (agg, expectedResult) =>
@@ -297,15 +305,15 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
test("some aggregators should ignore nulls ") {
val table = Table(
("aggregator", "value"),
- ( SampleStandardDeviationAggregator, Math.sqrt(2.5) ),
- ( PopulationStandardDeviationAggregator, Math.sqrt(2) ),
- ( SampleVarianceAggregator, 2.5 ),
- ( PopulationVarianceAggregator, 2.0 ),
- ( SumAggregator, 15.0),
- ( MaxAggregator, 5.0),
- ( MinAggregator, 1.0),
- ( AverageAggregator, 3.0),
- ( MedianAggregator, 3.0)
+ (SampleStandardDeviationAggregator, Math.sqrt(2.5)),
+ (PopulationStandardDeviationAggregator, Math.sqrt(2)),
+ (SampleVarianceAggregator, 2.5),
+ (PopulationVarianceAggregator, 2.0),
+ (SumAggregator, 15.0),
+ (MaxAggregator, 5.0),
+ (MinAggregator, 1.0),
+ (AverageAggregator, 3.0),
+ (MedianAggregator, 3.0)
)
forAll(table) { (agg, expectedResult) =>
@@ -315,19 +323,21 @@ class AggregatesSpec extends AnyFunSuite with TableDrivenPropertyChecks with Mat
}
test("some aggregators should produce null on empty set") {
- forAll (Table(
- "aggregator",
- AverageAggregator,
- SampleStandardDeviationAggregator,
- PopulationStandardDeviationAggregator,
- SampleVarianceAggregator,
- PopulationVarianceAggregator,
- MaxAggregator,
- MinAggregator,
- FirstAggregator,
- LastAggregator,
- SumAggregator
- )) { agg =>
+ forAll(
+ Table(
+ "aggregator",
+ AverageAggregator,
+ SampleStandardDeviationAggregator,
+ PopulationStandardDeviationAggregator,
+ SampleVarianceAggregator,
+ PopulationVarianceAggregator,
+ MaxAggregator,
+ MinAggregator,
+ FirstAggregator,
+ LastAggregator,
+ SumAggregator
+ )
+ ) { agg =>
val result = addElementsAndComputeResult(List(), agg)
result shouldBe null
}
diff --git a/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/aggregates.scala b/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/aggregates.scala
index 8f1ebfadcd6..7fe0b0122f6 100644
--- a/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/aggregates.scala
+++ b/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/aggregates.scala
@@ -5,9 +5,7 @@ import cats.data.{NonEmptyList, Validated}
import cats.instances.list._
import org.apache.flink.api.common.typeinfo.TypeInfo
import pl.touk.nussknacker.engine.api.typed.supertype.NumberTypesPromotionStrategy
-import pl.touk.nussknacker.engine.api.typed.supertype.NumberTypesPromotionStrategy.{
- ForLargeFloatingNumbersOperation,
-}
+import pl.touk.nussknacker.engine.api.typed.supertype.NumberTypesPromotionStrategy.ForLargeFloatingNumbersOperation
import pl.touk.nussknacker.engine.api.typed.typing._
import pl.touk.nussknacker.engine.api.typed.{NumberTypeUtils, typing}
import pl.touk.nussknacker.engine.flink.api.typeinfo.caseclass.CaseClassTypeInfoFactory
@@ -17,7 +15,6 @@ import pl.touk.nussknacker.engine.util.MathUtils
import pl.touk.nussknacker.engine.util.validated.ValidatedSyntax._
import java.util
-import scala.collection.mutable.ListBuffer
import scala.jdk.CollectionConverters._
/*
@@ -81,7 +78,8 @@ object aggregates {
override def zero: Aggregate = new java.util.ArrayList[Number]()
- override def addElement(el: Element, agg: Aggregate): Aggregate = if (el == null) agg else {
+ override def addElement(el: Element, agg: Aggregate): Aggregate = if (el == null) agg
+ else {
agg.add(el)
agg
}
@@ -95,7 +93,8 @@ object aggregates {
result
}
- override def result(finalAggregate: Aggregate): AnyRef = MedianHelper.calculateMedian(finalAggregate.asScala.toList).orNull
+ override def result(finalAggregate: Aggregate): AnyRef =
+ MedianHelper.calculateMedian(finalAggregate.asScala.toList).orNull
override def computeStoredType(input: TypingResult): Validated[String, TypingResult] = Valid(
Typed.genericTypeClass[java.util.ArrayList[_]](List(input))
diff --git a/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/median/MedianHelper.scala b/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/median/MedianHelper.scala
index a4685a3b9b5..379d07aead4 100644
--- a/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/median/MedianHelper.scala
+++ b/engine/flink/components/base-unbounded/src/main/scala/pl/touk/nussknacker/engine/flink/util/transformer/aggregate/median/MedianHelper.scala
@@ -1,8 +1,6 @@
package pl.touk.nussknacker.engine.flink.util.transformer.aggregate.median
-import pl.touk.nussknacker.engine.api.typed.supertype.NumberTypesPromotionStrategy.{
- ForLargeFloatingNumbersOperation,
-}
+import pl.touk.nussknacker.engine.api.typed.supertype.NumberTypesPromotionStrategy.ForLargeFloatingNumbersOperation
import pl.touk.nussknacker.engine.util.MathUtils
import scala.annotation.tailrec
@@ -15,7 +13,9 @@ object MedianHelper {
if (numbers.isEmpty) {
None
} else if (numbers.size % 2 == 1) {
- Some(MathUtils.convertToPromotedType(quickSelect(numbers, (numbers.size - 1) / 2))(ForLargeFloatingNumbersOperation))
+ Some(
+ MathUtils.convertToPromotedType(quickSelect(numbers, (numbers.size - 1) / 2))(ForLargeFloatingNumbersOperation)
+ )
} else {
// it is possible to fetch both numbers with single recursion, but it would complicate code
val firstNumber = quickSelect(numbers, numbers.size / 2 - 1)
diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/ExecutionConfigPreparer.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/ExecutionConfigPreparer.scala
index 34c9140ac1a..debf951005f 100644
--- a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/ExecutionConfigPreparer.scala
+++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/ExecutionConfigPreparer.scala
@@ -56,30 +56,19 @@ object ExecutionConfigPreparer extends LazyLogging {
config.setGlobalJobParameters(
NkGlobalParameters.create(
buildInfo,
+ deploymentData.deploymentId.value,
jobData.processVersion,
modelConfig,
namespaceTags = NamespaceMetricsTags(jobData.metaData.name.value, namingStrategy),
- prepareMap(jobData.processVersion, deploymentData)
+ prepareMap(deploymentData)
)
)
}
- private def prepareMap(processVersion: ProcessVersion, deploymentData: DeploymentData) = {
-
- val baseProperties = Map[String, String](
- "buildInfo" -> buildInfo,
- "versionId" -> processVersion.versionId.value.toString,
- "processId" -> processVersion.processId.value.toString,
- "labels" -> Encoder[List[String]].apply(processVersion.labels).noSpaces,
- "modelVersion" -> processVersion.modelVersion.map(_.toString).orNull,
- "user" -> processVersion.user,
- "deploymentId" -> deploymentData.deploymentId.value
- )
- val scenarioProperties = deploymentData.additionalDeploymentData.map { case (k, v) =>
+ private def prepareMap(deploymentData: DeploymentData) =
+ deploymentData.additionalDeploymentData.map { case (k, v) =>
s"deployment.properties.$k" -> v
}
- baseProperties ++ scenarioProperties
- }
}
diff --git a/engine/flink/management/periodic/README.md b/engine/flink/management/periodic/README.md
deleted file mode 100644
index caa2148a016..00000000000
--- a/engine/flink/management/periodic/README.md
+++ /dev/null
@@ -1,34 +0,0 @@
-# Periodic scenarios deployment manager
-
-An experimental engine running scenarios periodicly according to a schedule such as a cron expression.
-
-When the deploy button is clicked in NK GUI, then the scenario is scheduled to be run in the future. When a scenario
-should be run is described by a schedule, e.g. a cron expression set in scenario properties. During scenario scheduling,
-deployment manager only prepares data needed to deploy scenario on a target engine (e.g. on Flink cluster).
-scenario is deployed according to the schedule on the target engine. Periodic engine watches its completion. Afterwards
-scenario is scheduled to be run again according to the schedule.
-
-## Usage
-
-- Implement `DeploymentManagerProvider` using `PeriodicDeploymentManagerProvider`. Following components need to provided:
- - Underlying engine, currently only Flink is supported.
- - Optional `SchedulePropertyExtractorFactory` to determine how to construct an instance of a periodic property. By default
- a cron expression set in scenario properties is used to describe when a scenario should be run.
- - Optional `ProcessConfigEnricherFactory` if you would like to extend scenario configuration, by default nothing is done.
- - Optional `PeriodicProcessListenerFactory` to take some actions on scenario lifecycle.
- - Optional `AdditionalDeploymentDataProvider` to inject additional deployment parameters.
-- Add service provider with your `DeploymentManagerProvider` implementation.
-
-## Configuration
-
-Use `deploymentManager` with the following properties:
-
-- `db` - Nussknacker db configuration.
-- `processingType` - processing type of scenarios to be managed by this instance of the periodic engine.
-- `rescheduleCheckInterval` - frequency of checking finished scenarios to be rescheduled. Optional.
-- `deployInterval` - frequency of checking scenarios to be deployed on Flink cluster. Optional.
-- `deploymentRetry` - failed deployments configuration. By default retrying is disabled.
- - `deployMaxRetries` - maximum amount of retries for failed deployment.
- - `deployRetryPenalize` - an amount of time by which the next retry should be delayed.
-- `jarsDir` - directory for jars storage.
-- `maxFetchedPeriodicScenarioActivities` - optional, maximum number of latest ScenarioActivities that will be fetched, by default 200
diff --git a/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.DeploymentManagerProvider b/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.DeploymentManagerProvider
deleted file mode 100644
index e3b1553f5e4..00000000000
--- a/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.DeploymentManagerProvider
+++ /dev/null
@@ -1 +0,0 @@
-pl.touk.nussknacker.engine.management.periodic.FlinkPeriodicDeploymentManagerProvider
diff --git a/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.api.definition.CustomParameterValidator b/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.api.definition.CustomParameterValidator
deleted file mode 100644
index 202f7c5d586..00000000000
--- a/engine/flink/management/periodic/src/main/resources/META-INF/services/pl.touk.nussknacker.engine.api.definition.CustomParameterValidator
+++ /dev/null
@@ -1 +0,0 @@
-pl.touk.nussknacker.engine.management.periodic.cron.CronParameterValidator
\ No newline at end of file
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/FlinkPeriodicDeploymentManagerProvider.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/FlinkPeriodicDeploymentManagerProvider.scala
deleted file mode 100644
index 9ce983104cc..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/FlinkPeriodicDeploymentManagerProvider.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic
-
-import cats.data.ValidatedNel
-import com.typesafe.config.Config
-import com.typesafe.scalalogging.LazyLogging
-import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig
-import pl.touk.nussknacker.engine.api.definition.{MandatoryParameterValidator, StringParameterEditor}
-import pl.touk.nussknacker.engine.api.deployment.DeploymentManager
-import pl.touk.nussknacker.engine.deployment.EngineSetupName
-import pl.touk.nussknacker.engine.management.periodic.cron.CronParameterValidator
-import pl.touk.nussknacker.engine.management.{FlinkConfig, FlinkStreamingDeploymentManagerProvider}
-import pl.touk.nussknacker.engine.management.periodic.service._
-import pl.touk.nussknacker.engine.util.config.ConfigEnrichments.RichConfig
-import pl.touk.nussknacker.engine.{
- BaseModelData,
- DeploymentManagerDependencies,
- DeploymentManagerProvider,
- MetaDataInitializer
-}
-
-import scala.concurrent.duration.FiniteDuration
-
-class FlinkPeriodicDeploymentManagerProvider extends DeploymentManagerProvider with LazyLogging {
-
- private val delegate = new FlinkStreamingDeploymentManagerProvider()
-
- private val cronConfig = CronSchedulePropertyExtractor.CronPropertyDefaultName -> ScenarioPropertyConfig(
- defaultValue = None,
- editor = Some(StringParameterEditor),
- validators = Some(List(MandatoryParameterValidator, CronParameterValidator.delegate)),
- label = Some("Schedule"),
- hintText = Some("Quartz cron syntax. You can specify multiple schedulers separated by '|'.")
- )
-
- override def name: String = "flinkPeriodic"
-
- override def createDeploymentManager(
- modelData: BaseModelData,
- dependencies: DeploymentManagerDependencies,
- config: Config,
- scenarioStateCacheTTL: Option[FiniteDuration],
- ): ValidatedNel[String, DeploymentManager] = {
- logger.info("Creating FlinkPeriodic scenario manager")
- delegate.createDeploymentManagerWithCapabilities(modelData, dependencies, config, scenarioStateCacheTTL).map {
- delegateDeploymentManager =>
- import net.ceedubs.ficus.Ficus._
- import net.ceedubs.ficus.readers.ArbitraryTypeReader._
- val periodicBatchConfig = config.as[PeriodicBatchConfig]("deploymentManager")
- val flinkConfig = config.rootAs[FlinkConfig]
-
- PeriodicDeploymentManager(
- delegate = delegateDeploymentManager,
- schedulePropertyExtractorFactory = _ => CronSchedulePropertyExtractor(),
- processConfigEnricherFactory = ProcessConfigEnricherFactory.noOp,
- periodicBatchConfig = periodicBatchConfig,
- flinkConfig = flinkConfig,
- originalConfig = config,
- modelData = modelData,
- EmptyPeriodicProcessListenerFactory,
- DefaultAdditionalDeploymentDataProvider,
- dependencies
- )
- }
-
- }
-
- override def metaDataInitializer(config: Config): MetaDataInitializer =
- delegate.metaDataInitializer(config)
-
- override def scenarioPropertiesConfig(config: Config): Map[String, ScenarioPropertyConfig] =
- Map(cronConfig) ++ delegate.scenarioPropertiesConfig(config)
-
- override def defaultEngineSetupName: EngineSetupName =
- delegate.defaultEngineSetupName
-
- override def engineSetupIdentity(config: Config): Any =
- delegate.engineSetupIdentity(config)
-
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/JarManager.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/JarManager.scala
deleted file mode 100644
index bf5cab7a4f7..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/JarManager.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic
-
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData
-
-import scala.concurrent.Future
-
-private[periodic] trait JarManager {
-
- def prepareDeploymentWithJar(
- processVersion: ProcessVersion,
- canonicalProcess: CanonicalProcess
- ): Future[DeploymentWithJarData.WithCanonicalProcess]
-
- def deployWithJar(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
- deploymentData: DeploymentData,
- ): Future[Option[ExternalDeploymentId]]
-
- def deleteJar(jarFileName: String): Future[Unit]
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractorFactory.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractorFactory.scala
deleted file mode 100644
index 209f9adb675..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/SchedulePropertyExtractorFactory.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic
-
-import com.typesafe.config.Config
-
-trait SchedulePropertyExtractorFactory {
- def apply(config: Config): SchedulePropertyExtractor
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/flink/FlinkJarManager.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/flink/FlinkJarManager.scala
deleted file mode 100644
index 1b398dea598..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/flink/FlinkJarManager.scala
+++ /dev/null
@@ -1,118 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.flink
-
-import com.typesafe.scalalogging.LazyLogging
-import org.apache.flink.api.common.JobID
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData
-import pl.touk.nussknacker.engine.management.periodic.{JarManager, PeriodicBatchConfig}
-import pl.touk.nussknacker.engine.management.rest.{FlinkClient, HttpFlinkClient}
-import pl.touk.nussknacker.engine.management.{
- FlinkConfig,
- FlinkDeploymentManager,
- FlinkModelJarProvider,
- FlinkStreamingRestManager
-}
-import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
-import pl.touk.nussknacker.engine.{BaseModelData, newdeployment}
-import sttp.client3.SttpBackend
-
-import java.nio.file.{Files, Path, Paths}
-import scala.concurrent.{ExecutionContext, Future}
-
-private[periodic] object FlinkJarManager {
-
- def apply(flinkConfig: FlinkConfig, periodicBatchConfig: PeriodicBatchConfig, modelData: BaseModelData)(
- implicit backend: SttpBackend[Future, Any],
- ec: ExecutionContext
- ): JarManager = {
- new FlinkJarManager(
- flinkClient = HttpFlinkClient.createUnsafe(flinkConfig),
- jarsDir = Paths.get(periodicBatchConfig.jarsDir),
- inputConfigDuringExecution = modelData.inputConfigDuringExecution,
- modelJarProvider = new FlinkModelJarProvider(modelData.modelClassLoaderUrls)
- )
- }
-
-}
-
-// Used by [[PeriodicProcessService]].
-private[periodic] class FlinkJarManager(
- flinkClient: FlinkClient,
- jarsDir: Path,
- inputConfigDuringExecution: InputConfigDuringExecution,
- modelJarProvider: FlinkModelJarProvider
-) extends JarManager
- with LazyLogging {
-
- import scala.concurrent.ExecutionContext.Implicits.global
-
- override def prepareDeploymentWithJar(
- processVersion: ProcessVersion,
- canonicalProcess: CanonicalProcess
- ): Future[DeploymentWithJarData.WithCanonicalProcess] = {
- logger.info(s"Prepare deployment for scenario: $processVersion")
- copyJarToLocalDir(processVersion).map { jarFileName =>
- DeploymentWithJarData.WithCanonicalProcess(
- processVersion = processVersion,
- process = canonicalProcess,
- inputConfigDuringExecutionJson = inputConfigDuringExecution.serialized,
- jarFileName = jarFileName
- )
- }
- }
-
- private def copyJarToLocalDir(processVersion: ProcessVersion): Future[String] = Future {
- jarsDir.toFile.mkdirs()
- val jarFileName =
- s"${processVersion.processName}-${processVersion.versionId.value}-${System.currentTimeMillis()}.jar"
- val jarPath = jarsDir.resolve(jarFileName)
- Files.copy(modelJarProvider.getJobJar().toPath, jarPath)
- logger.info(s"Copied current model jar to $jarPath")
- jarFileName
- }
-
- override def deployWithJar(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
- deploymentData: DeploymentData
- ): Future[Option[ExternalDeploymentId]] = {
- val processVersion = deploymentWithJarData.processVersion
- logger.info(
- s"Deploying scenario ${processVersion.processName}, version id: ${processVersion.versionId} and jar: ${deploymentWithJarData.jarFileName}"
- )
- val jarFile = jarsDir.resolve(deploymentWithJarData.jarFileName).toFile
- val args = FlinkDeploymentManager.prepareProgramArgs(
- deploymentWithJarData.inputConfigDuringExecutionJson,
- processVersion,
- deploymentData,
- deploymentWithJarData.process
- )
- flinkClient.runProgram(
- jarFile,
- FlinkStreamingRestManager.MainClassName,
- args,
- None,
- deploymentData.deploymentId.toNewDeploymentIdOpt.map(toJobId)
- )
- }
-
- override def deleteJar(jarFileName: String): Future[Unit] = {
- logger.info(s"Deleting jar: $jarFileName")
- for {
- _ <- deleteLocalJar(jarFileName)
- _ <- flinkClient.deleteJarIfExists(jarFileName)
- } yield ()
- }
-
- private def deleteLocalJar(jarFileName: String): Future[Unit] = Future {
- val jarPath = jarsDir.resolve(jarFileName)
- val deleted = Files.deleteIfExists(jarPath)
- logger.info(s"Deleted: ($deleted) jar in: $jarPath")
- }
-
- private def toJobId(did: newdeployment.DeploymentId) = {
- new JobID(did.value.getLeastSignificantBits, did.value.getMostSignificantBits).toHexString
- }
-
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/DeploymentWithJarData.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/DeploymentWithJarData.scala
deleted file mode 100644
index 6491b9506b8..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/DeploymentWithJarData.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.model
-
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-
-sealed trait DeploymentWithJarData {
- def processVersion: ProcessVersion
- def jarFileName: String
-}
-
-object DeploymentWithJarData {
-
- final case class WithCanonicalProcess(
- processVersion: ProcessVersion,
- jarFileName: String,
- process: CanonicalProcess,
- inputConfigDuringExecutionJson: String,
- ) extends DeploymentWithJarData
-
- final case class WithoutCanonicalProcess(
- processVersion: ProcessVersion,
- jarFileName: String
- ) extends DeploymentWithJarData
-
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcess.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcess.scala
deleted file mode 100644
index e89deab2320..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcess.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.model
-
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.api.deployment.ProcessActionId
-import pl.touk.nussknacker.engine.management.periodic.ScheduleProperty
-import slick.lifted.MappedTo
-
-import java.time.LocalDateTime
-
-case class PeriodicProcessId(value: Long) extends MappedTo[Long]
-
-case class PeriodicProcess[DeploymentData <: DeploymentWithJarData](
- id: PeriodicProcessId,
- deploymentData: DeploymentData,
- scheduleProperty: ScheduleProperty,
- active: Boolean,
- createdAt: LocalDateTime,
- processActionId: Option[ProcessActionId]
-) {
- val processVersion: ProcessVersion = deploymentData.processVersion
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcessDeployment.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcessDeployment.scala
deleted file mode 100644
index 88f620eff9e..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/model/PeriodicProcessDeployment.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.model
-
-import pl.touk.nussknacker.engine.management.periodic.{MultipleScheduleProperty, SingleScheduleProperty}
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeploymentStatus.PeriodicProcessDeploymentStatus
-import slick.lifted.MappedTo
-
-import java.time.{Clock, LocalDateTime}
-
-// TODO: We should separate schedules concept from deployments - fully switch to ScheduleData and ScheduleDeploymentData
-case class PeriodicProcessDeployment[DeploymentData <: DeploymentWithJarData](
- id: PeriodicProcessDeploymentId,
- periodicProcess: PeriodicProcess[DeploymentData],
- createdAt: LocalDateTime,
- runAt: LocalDateTime,
- scheduleName: ScheduleName,
- retriesLeft: Int,
- nextRetryAt: Option[LocalDateTime],
- state: PeriodicProcessDeploymentState
-) {
-
- def nextRunAt(clock: Clock): Either[String, Option[LocalDateTime]] =
- (periodicProcess.scheduleProperty, scheduleName.value) match {
- case (MultipleScheduleProperty(schedules), Some(name)) =>
- schedules.get(name).toRight(s"Failed to find schedule: $scheduleName").flatMap(_.nextRunAt(clock))
- case (e: SingleScheduleProperty, None) => e.nextRunAt(clock)
- case (schedule, name) => Left(s"Schedule name: $name mismatch with schedule: $schedule")
- }
-
- def display: String =
- s"${periodicProcess.processVersion} with scheduleName=${scheduleName.display} and deploymentId=$id"
-
-}
-
-case class PeriodicProcessDeploymentState(
- deployedAt: Option[LocalDateTime],
- completedAt: Option[LocalDateTime],
- status: PeriodicProcessDeploymentStatus
-)
-
-case class PeriodicProcessDeploymentId(value: Long) extends AnyVal with MappedTo[Long] {
- override def toString: String = value.toString
-}
-
-object PeriodicProcessDeploymentStatus extends Enumeration {
- type PeriodicProcessDeploymentStatus = Value
-
- val Scheduled, Deployed, Finished, Failed, RetryingDeploy, FailedOnDeploy = Value
-}
-
-case class ScheduleName(value: Option[String]) {
- def display: String = value.getOrElse("[default]")
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/AdditionalDeploymentDataProvider.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/AdditionalDeploymentDataProvider.scala
deleted file mode 100644
index 3131908a091..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/AdditionalDeploymentDataProvider.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.service
-
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeployment
-
-import java.time.format.DateTimeFormatter
-
-trait AdditionalDeploymentDataProvider {
-
- def prepareAdditionalData(runDetails: PeriodicProcessDeployment[WithCanonicalProcess]): Map[String, String]
-
-}
-
-object DefaultAdditionalDeploymentDataProvider extends AdditionalDeploymentDataProvider {
-
- override def prepareAdditionalData(
- runDetails: PeriodicProcessDeployment[WithCanonicalProcess]
- ): Map[String, String] = {
- Map(
- "deploymentId" -> runDetails.id.value.toString,
- "runAt" -> runDetails.runAt.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME),
- "scheduleName" -> runDetails.scheduleName.value.getOrElse("")
- )
- }
-
-}
diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/PeriodicProcessListener.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/PeriodicProcessListener.scala
deleted file mode 100644
index d3068aac568..00000000000
--- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/service/PeriodicProcessListener.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic.service
-
-import com.typesafe.config.Config
-import pl.touk.nussknacker.engine.api.deployment.StatusDetails
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.PeriodicProcessDeployment
-
-/*
- Listener is at-least-once. If there are problems e.g. with DB, invocation can be repeated for same event.
- Implementation should be aware of that. Listener is invoked during DB transaction, for that reason it's *synchronous*
- */
-trait PeriodicProcessListener {
-
- def onPeriodicProcessEvent: PartialFunction[PeriodicProcessEvent, Unit]
- def close(): Unit = {}
-}
-
-trait PeriodicProcessListenerFactory {
- def create(config: Config): PeriodicProcessListener
-}
-
-sealed trait PeriodicProcessEvent {
- val deployment: PeriodicProcessDeployment[WithCanonicalProcess]
-}
-
-case class DeployedEvent(
- deployment: PeriodicProcessDeployment[WithCanonicalProcess],
- externalDeploymentId: Option[ExternalDeploymentId]
-) extends PeriodicProcessEvent
-
-case class FinishedEvent(
- deployment: PeriodicProcessDeployment[WithCanonicalProcess],
- processState: Option[StatusDetails]
-) extends PeriodicProcessEvent
-
-case class FailedOnDeployEvent(
- deployment: PeriodicProcessDeployment[WithCanonicalProcess],
- processState: Option[StatusDetails]
-) extends PeriodicProcessEvent
-
-case class FailedOnRunEvent(
- deployment: PeriodicProcessDeployment[WithCanonicalProcess],
- processState: Option[StatusDetails]
-) extends PeriodicProcessEvent
-
-case class ScheduledEvent(deployment: PeriodicProcessDeployment[WithCanonicalProcess], firstSchedule: Boolean)
- extends PeriodicProcessEvent
-
-object EmptyListener extends EmptyListener
-
-trait EmptyListener extends PeriodicProcessListener {
-
- override def onPeriodicProcessEvent: PartialFunction[PeriodicProcessEvent, Unit] = Map.empty
-
-}
-
-object EmptyPeriodicProcessListenerFactory extends PeriodicProcessListenerFactory {
- override def create(config: Config): PeriodicProcessListener = EmptyListener
-}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerStub.scala b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerStub.scala
deleted file mode 100644
index 4ded31dd0d1..00000000000
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/JarManagerStub.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic
-
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData
-
-import scala.concurrent.Future
-
-class JarManagerStub extends JarManager {
-
- var deployWithJarFuture: Future[Option[ExternalDeploymentId]] = Future.successful(None)
- var lastDeploymentWithJarData: Option[DeploymentWithJarData.WithCanonicalProcess] = None
-
- override def prepareDeploymentWithJar(
- processVersion: ProcessVersion,
- canonicalProcess: CanonicalProcess
- ): Future[DeploymentWithJarData.WithCanonicalProcess] = {
- Future.successful(
- model.DeploymentWithJarData.WithCanonicalProcess(
- processVersion = processVersion,
- process = canonicalProcess,
- inputConfigDuringExecutionJson = "",
- jarFileName = ""
- )
- )
- }
-
- override def deployWithJar(
- deploymentWithJarData: DeploymentWithJarData.WithCanonicalProcess,
- deploymentData: DeploymentData,
- ): Future[Option[ExternalDeploymentId]] = {
- lastDeploymentWithJarData = Some(deploymentWithJarData)
- deployWithJarFuture
- }
-
- override def deleteJar(jarFileName: String): Future[Unit] = Future.successful(())
-}
diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessGen.scala b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessGen.scala
deleted file mode 100644
index 5d1062d4bb6..00000000000
--- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicProcessGen.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package pl.touk.nussknacker.engine.management.periodic
-
-import pl.touk.nussknacker.engine.api.ProcessVersion
-import pl.touk.nussknacker.engine.build.ScenarioBuilder
-import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.CronSchedulePropertyExtractor.CronPropertyDefaultName
-import pl.touk.nussknacker.engine.management.periodic.model.DeploymentWithJarData.WithCanonicalProcess
-import pl.touk.nussknacker.engine.management.periodic.model.{DeploymentWithJarData, PeriodicProcess, PeriodicProcessId}
-
-import java.time.LocalDateTime
-
-object PeriodicProcessGen {
-
- def apply(): PeriodicProcess[WithCanonicalProcess] = {
- PeriodicProcess(
- id = PeriodicProcessId(42),
- deploymentData = DeploymentWithJarData.WithCanonicalProcess(
- processVersion = ProcessVersion.empty,
- process = buildCanonicalProcess(),
- inputConfigDuringExecutionJson = "{}",
- jarFileName = "jar-file-name.jar"
- ),
- scheduleProperty = CronScheduleProperty("0 0 * * * ?"),
- active = true,
- createdAt = LocalDateTime.now(),
- None
- )
- }
-
- def buildCanonicalProcess(cronProperty: String = "0 0 * * * ?"): CanonicalProcess = {
- ScenarioBuilder
- .streaming("test")
- .additionalFields(properties = Map(CronPropertyDefaultName -> cronProperty))
- .source("test", "test")
- .emptySink("test", "test")
- }
-
-}
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
index 81578b4fdd4..0b9b58fd450 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerProviderHelper.scala
@@ -1,8 +1,8 @@
package pl.touk.nussknacker.engine.management.streaming
-import _root_.sttp.client3.asynchttpclient.future.AsyncHttpClientFutureBackend
import akka.actor.ActorSystem
import org.asynchttpclient.DefaultAsyncHttpClientConfig
+import sttp.client3.asynchttpclient.future.AsyncHttpClientFutureBackend
import pl.touk.nussknacker.engine._
import pl.touk.nussknacker.engine.api.component.DesignerWideComponentId
import pl.touk.nussknacker.engine.api.deployment.{
@@ -13,13 +13,22 @@ import pl.touk.nussknacker.engine.api.deployment.{
}
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProvider
+import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
+import pl.touk.nussknacker.engine.{
+ ConfigWithUnresolvedVersion,
+ DeploymentManagerDependencies,
+ ModelData,
+ ModelDependencies,
+ ProcessingTypeConfig
+}
object FlinkStreamingDeploymentManagerProviderHelper {
def createDeploymentManager(
processingTypeConfig: ConfigWithUnresolvedVersion,
): DeploymentManager = {
- val typeConfig = ProcessingTypeConfig.read(processingTypeConfig)
+ val typeConfig = ProcessingTypeConfig.read(processingTypeConfig)
+ val modelClassLoader = ModelClassLoader(typeConfig.classPath, None)
val modelData = ModelData(
processingTypeConfig = typeConfig,
ModelDependencies(
@@ -27,8 +36,9 @@ object FlinkStreamingDeploymentManagerProviderHelper {
determineDesignerWideId = id => DesignerWideComponentId(id.toString),
workingDirectoryOpt = None,
_ => true,
- ComponentDefinitionExtractionMode.FinalDefinition
- )
+ ComponentDefinitionExtractionMode.FinalDefinition,
+ ),
+ modelClassLoader
)
val actorSystem = ActorSystem("FlinkStreamingDeploymentManagerProviderHelper")
val backend = AsyncHttpClientFutureBackend.usingConfig(new DefaultAsyncHttpClientConfig.Builder().build())
diff --git a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
index 5872b331cb7..0916e833329 100644
--- a/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
+++ b/engine/flink/management/src/it/scala/pl/touk/nussknacker/engine/management/streaming/FlinkStreamingDeploymentManagerSpec.scala
@@ -18,6 +18,7 @@ import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.definition.component.Components.ComponentDefinitionExtractionMode
import pl.touk.nussknacker.engine.deployment.DeploymentData
+import pl.touk.nussknacker.engine.util.loader.ModelClassLoader
import java.net.URI
import java.nio.file.{Files, Paths}
@@ -271,7 +272,8 @@ class FlinkStreamingDeploymentManagerSpec extends AnyFunSuite with Matchers with
workingDirectoryOpt = None,
_ => true,
ComponentDefinitionExtractionMode.FinalDefinition
- )
+ ),
+ ModelClassLoader(processingTypeConfig.classPath, None)
)
val definition = modelData.modelDefinition
definition.components.components.map(_.id) should contain(ComponentId(ComponentType.Service, "accountService"))
diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala
index 3a436aed5ae..1a66968f1e1 100644
--- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala
+++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala
@@ -1,9 +1,18 @@
package pl.touk.nussknacker.engine.management
+import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import org.apache.flink.api.common.{JobID, JobStatus}
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment._
+import pl.touk.nussknacker.engine.api.deployment.scheduler._
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.{
+ AdditionalDeploymentDataProvider,
+ ProcessConfigEnricherFactory,
+ SchedulePropertyExtractorFactory,
+ ScheduledExecutionPerformer,
+ ScheduledProcessListenerFactory
+}
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
@@ -11,10 +20,7 @@ import pl.touk.nussknacker.engine.deployment.{DeploymentId, ExternalDeploymentId
import pl.touk.nussknacker.engine.management.FlinkRestManager.ParsedJobConfig
import pl.touk.nussknacker.engine.management.rest.FlinkClient
import pl.touk.nussknacker.engine.management.rest.flinkRestModel.{BaseJobStatusCounts, JobOverview}
-import pl.touk.nussknacker.engine.util.WithDataFreshnessStatusUtils.{
- WithDataFreshnessStatusMapOps,
- WithDataFreshnessStatusOps
-}
+import pl.touk.nussknacker.engine.util.WithDataFreshnessStatusUtils.WithDataFreshnessStatusMapOps
import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment}
import scala.concurrent.Future
@@ -71,6 +77,21 @@ class FlinkRestManager(
}
+ override def schedulingSupport: SchedulingSupport = new SchedulingSupported {
+
+ override def createScheduledExecutionPerformer(
+ modelData: BaseModelData,
+ dependencies: DeploymentManagerDependencies,
+ config: Config,
+ ): ScheduledExecutionPerformer = FlinkScheduledExecutionPerformer.create(modelData, dependencies, config)
+
+ override def customSchedulePropertyExtractorFactory: Option[SchedulePropertyExtractorFactory] = None
+ override def customProcessConfigEnricherFactory: Option[ProcessConfigEnricherFactory] = None
+ override def customScheduledProcessListenerFactory: Option[ScheduledProcessListenerFactory] = None
+ override def customAdditionalDeploymentDataProvider: Option[AdditionalDeploymentDataProvider] = None
+
+ }
+
private def getAllProcessesStatesFromFlink()(
implicit freshnessPolicy: DataFreshnessPolicy
): Future[WithDataFreshnessStatus[Map[ProcessName, List[StatusDetails]]]] = {
diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkScheduledExecutionPerformer.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkScheduledExecutionPerformer.scala
new file mode 100644
index 00000000000..c312c7834db
--- /dev/null
+++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkScheduledExecutionPerformer.scala
@@ -0,0 +1,140 @@
+package pl.touk.nussknacker.engine.management
+
+import com.typesafe.config.Config
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.flink.api.common.JobID
+import pl.touk.nussknacker.engine.api.ProcessVersion
+import pl.touk.nussknacker.engine.api.deployment.scheduler.model.{DeploymentWithRuntimeParams, RuntimeParams}
+import pl.touk.nussknacker.engine.api.deployment.scheduler.services.ScheduledExecutionPerformer
+import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
+import pl.touk.nussknacker.engine.deployment.{DeploymentData, ExternalDeploymentId}
+import pl.touk.nussknacker.engine.management.FlinkScheduledExecutionPerformer.jarFileNameRuntimeParam
+import pl.touk.nussknacker.engine.management.rest.{FlinkClient, HttpFlinkClient}
+import pl.touk.nussknacker.engine.modelconfig.InputConfigDuringExecution
+import pl.touk.nussknacker.engine.util.config.ConfigEnrichments.RichConfig
+import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment}
+
+import java.nio.file.{Files, Path, Paths}
+import scala.concurrent.Future
+
+object FlinkScheduledExecutionPerformer {
+
+ val jarFileNameRuntimeParam = "jarFileName"
+
+ def create(
+ modelData: BaseModelData,
+ dependencies: DeploymentManagerDependencies,
+ config: Config,
+ ): ScheduledExecutionPerformer = {
+ import dependencies._
+ import net.ceedubs.ficus.Ficus._
+ import net.ceedubs.ficus.readers.ArbitraryTypeReader._
+ val flinkConfig = config.rootAs[FlinkConfig]
+ new FlinkScheduledExecutionPerformer(
+ flinkClient = HttpFlinkClient.createUnsafe(flinkConfig),
+ jarsDir = Paths.get(config.getString("scheduling.jarsDir")),
+ inputConfigDuringExecution = modelData.inputConfigDuringExecution,
+ modelJarProvider = new FlinkModelJarProvider(modelData.modelClassLoaderUrls)
+ )
+ }
+
+}
+
+// Used by [[PeriodicProcessService]].
+class FlinkScheduledExecutionPerformer(
+ flinkClient: FlinkClient,
+ jarsDir: Path,
+ inputConfigDuringExecution: InputConfigDuringExecution,
+ modelJarProvider: FlinkModelJarProvider
+) extends ScheduledExecutionPerformer
+ with LazyLogging {
+
+ import scala.concurrent.ExecutionContext.Implicits.global
+
+ override def prepareDeploymentWithRuntimeParams(
+ processVersion: ProcessVersion,
+ ): Future[DeploymentWithRuntimeParams] = {
+ logger.info(s"Prepare deployment for scenario: $processVersion")
+ copyJarToLocalDir(processVersion).map { jarFileName =>
+ DeploymentWithRuntimeParams(
+ processId = Some(processVersion.processId),
+ processName = processVersion.processName,
+ versionId = processVersion.versionId,
+ runtimeParams = RuntimeParams(Map(jarFileNameRuntimeParam -> jarFileName))
+ )
+ }
+ }
+
+ override def provideInputConfigDuringExecutionJson(): Future[InputConfigDuringExecution] =
+ Future.successful(inputConfigDuringExecution)
+
+ private def copyJarToLocalDir(processVersion: ProcessVersion): Future[String] = Future {
+ jarsDir.toFile.mkdirs()
+ val jarFileName =
+ s"${processVersion.processName}-${processVersion.versionId.value}-${System.currentTimeMillis()}.jar"
+ val jarPath = jarsDir.resolve(jarFileName)
+ Files.copy(modelJarProvider.getJobJar().toPath, jarPath)
+ logger.info(s"Copied current model jar to $jarPath")
+ jarFileName
+ }
+
+ override def deployWithRuntimeParams(
+ deployment: DeploymentWithRuntimeParams,
+ inputConfigDuringExecutionJson: String,
+ deploymentData: DeploymentData,
+ canonicalProcess: CanonicalProcess,
+ processVersion: ProcessVersion,
+ ): Future[Option[ExternalDeploymentId]] = {
+ deployment.runtimeParams.params.get(jarFileNameRuntimeParam) match {
+ case Some(jarFileName) =>
+ logger.info(
+ s"Deploying scenario ${deployment.processName}, version id: ${deployment.versionId} and jar: $jarFileName"
+ )
+ val jarFile = jarsDir.resolve(jarFileName).toFile
+ val args = FlinkDeploymentManager.prepareProgramArgs(
+ inputConfigDuringExecutionJson,
+ processVersion,
+ deploymentData,
+ canonicalProcess,
+ )
+ flinkClient.runProgram(
+ jarFile,
+ FlinkStreamingRestManager.MainClassName,
+ args,
+ None,
+ deploymentData.deploymentId.toNewDeploymentIdOpt.map(toJobId)
+ )
+ case None =>
+ logger.error(
+ s"Cannot deploy scenario ${deployment.processName}, version id: ${deployment.versionId}: jar file name not present"
+ )
+ Future.successful(None)
+ }
+ }
+
+ override def cleanAfterDeployment(runtimeParams: RuntimeParams): Future[Unit] = {
+ runtimeParams.params.get(jarFileNameRuntimeParam) match {
+ case Some(jarFileName) =>
+ logger.info(s"Deleting jar: $jarFileName")
+ for {
+ _ <- deleteLocalJar(jarFileName)
+ _ <- flinkClient.deleteJarIfExists(jarFileName)
+ } yield ()
+ case None =>
+ logger.warn(s"Jar file name not present among runtime params: ${runtimeParams}")
+ Future.unit
+ }
+
+ }
+
+ private def deleteLocalJar(jarFileName: String): Future[Unit] = Future {
+ val jarPath = jarsDir.resolve(jarFileName)
+ val deleted = Files.deleteIfExists(jarPath)
+ logger.info(s"Deleted: ($deleted) jar in: $jarPath")
+ }
+
+ private def toJobId(did: newdeployment.DeploymentId) = {
+ new JobID(did.value.getLeastSignificantBits, did.value.getMostSignificantBits).toHexString
+ }
+
+}
diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkStreamingDeploymentManagerProvider.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkStreamingDeploymentManagerProvider.scala
index fdc3eef46ac..7081194a2cd 100644
--- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkStreamingDeploymentManagerProvider.scala
+++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkStreamingDeploymentManagerProvider.scala
@@ -26,15 +26,6 @@ class FlinkStreamingDeploymentManagerProvider extends DeploymentManagerProvider
dependencies: DeploymentManagerDependencies,
deploymentConfig: Config,
scenarioStateCacheTTL: Option[FiniteDuration]
- ): ValidatedNel[String, DeploymentManager] = {
- createDeploymentManagerWithCapabilities(modelData, dependencies, deploymentConfig, scenarioStateCacheTTL)
- }
-
- def createDeploymentManagerWithCapabilities(
- modelData: BaseModelData,
- dependencies: DeploymentManagerDependencies,
- deploymentConfig: Config,
- scenarioStateCacheTTL: Option[FiniteDuration]
): ValidatedNel[String, DeploymentManager] = {
import dependencies._
val flinkConfig = deploymentConfig.rootAs[FlinkConfig]
diff --git a/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/FlinkRestManagerSpec.scala b/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/FlinkRestManagerSpec.scala
index f379f73891b..200458a48ce 100644
--- a/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/FlinkRestManagerSpec.scala
+++ b/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/FlinkRestManagerSpec.scala
@@ -19,13 +19,7 @@ import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.Proble
import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId}
import pl.touk.nussknacker.engine.api.{MetaData, ProcessVersion, StreamMetaData}
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
-import pl.touk.nussknacker.engine.deployment.{
- AdditionalModelConfigs,
- DeploymentData,
- DeploymentId,
- ExternalDeploymentId,
- User
-}
+import pl.touk.nussknacker.engine.deployment._
import pl.touk.nussknacker.engine.management.rest.HttpFlinkClient
import pl.touk.nussknacker.engine.management.rest.flinkRestModel._
import pl.touk.nussknacker.engine.testing.LocalModelData
diff --git a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/NkGlobalParametersEncoderTest.scala b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/NkGlobalParametersEncoderTest.scala
index 7ad5e7528a1..8ebb8aa889c 100644
--- a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/NkGlobalParametersEncoderTest.scala
+++ b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/NkGlobalParametersEncoderTest.scala
@@ -12,6 +12,7 @@ class NkGlobalParametersEncoderTest extends AnyFunSuite with Matchers {
test("global parameters set and read from context are equal") {
val globalParamsWithAllOptionalValues = NkGlobalParameters(
buildInfo = "aBuildInfo",
+ deploymentId = "1",
processVersion = ProcessVersion(
VersionId.initialVersionId,
ProcessName("aProcessName"),
@@ -27,6 +28,7 @@ class NkGlobalParametersEncoderTest extends AnyFunSuite with Matchers {
val globalParamsWithNoOptionalValues = NkGlobalParameters(
buildInfo = "aBuildInfo",
+ deploymentId = "1",
processVersion = ProcessVersion(
VersionId.initialVersionId,
ProcessName("aProcessName"),
@@ -44,6 +46,7 @@ class NkGlobalParametersEncoderTest extends AnyFunSuite with Matchers {
val decodedParams = NkGlobalParameters.fromMap(params.toMap).get
decodedParams.buildInfo shouldBe params.buildInfo
+ decodedParams.deploymentId shouldBe params.deploymentId
decodedParams.processVersion shouldBe params.processVersion
decodedParams.configParameters shouldBe params.configParameters
decodedParams.namespaceParameters shouldBe params.namespaceParameters
diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
index 785d01b97b8..9603698ddf5 100644
--- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
+++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala
@@ -257,6 +257,8 @@ class EmbeddedDeploymentManager(
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
+
override def processStateDefinitionManager: ProcessStateDefinitionManager = EmbeddedProcessStateDefinitionManager
override def close(): Unit = {
diff --git a/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/RequestResponseEmbeddedDeploymentManagerTest.scala b/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/RequestResponseEmbeddedDeploymentManagerTest.scala
index 0d28fae5775..d3011301eef 100644
--- a/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/RequestResponseEmbeddedDeploymentManagerTest.scala
+++ b/engine/lite/embeddedDeploymentManager/src/test/scala/pl/touk/nussknacker/streaming/embedded/RequestResponseEmbeddedDeploymentManagerTest.scala
@@ -7,20 +7,9 @@ import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.DeploymentUpdateStrategy.StateRestoringStrategy
+import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.cache.ScenarioStateCachingConfig
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
-import pl.touk.nussknacker.engine.api.deployment.{
- DMCancelScenarioCommand,
- DMRunDeploymentCommand,
- DataFreshnessPolicy,
- DeployedScenarioData,
- DeploymentManager,
- DeploymentUpdateStrategy,
- NoOpScenarioActivityManager,
- ProcessingTypeActionServiceStub,
- ProcessingTypeDeployedScenariosProviderStub,
- ScenarioActivityManager
-}
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.build.ScenarioBuilder
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
diff --git a/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala b/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala
index 8357f2a449d..5c83b0b9aa1 100644
--- a/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala
+++ b/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala
@@ -389,6 +389,7 @@ class K8sDeploymentManager(
override def stateQueryForAllScenariosSupport: StateQueryForAllScenariosSupport = NoStateQueryForAllScenariosSupport
+ override def schedulingSupport: SchedulingSupport = NoSchedulingSupport
}
object K8sDeploymentManager {
diff --git a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
index 05b006824bc..b81fdaf74d3 100644
--- a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
+++ b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/BaseK8sDeploymentManagerTest.scala
@@ -9,8 +9,8 @@ import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.deployment.DeploymentUpdateStrategy.StateRestoringStrategy
-import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment._
+import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.DeploymentData
import pl.touk.nussknacker.engine.{DeploymentManagerDependencies, ModelData}
diff --git a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerOnMocksTest.scala b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerOnMocksTest.scala
index 16b645f8582..309a7bc25fe 100644
--- a/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerOnMocksTest.scala
+++ b/engine/lite/k8sDeploymentManager/src/test/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManagerOnMocksTest.scala
@@ -13,9 +13,7 @@ import pl.touk.nussknacker.engine.api.deployment.{
DataFreshnessPolicy,
NoOpScenarioActivityManager,
ProcessingTypeActionServiceStub,
- ProcessingTypeDeployedScenariosProvider,
- ProcessingTypeDeployedScenariosProviderStub,
- ScenarioActivityManager
+ ProcessingTypeDeployedScenariosProviderStub
}
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.testing.LocalModelData
diff --git a/nussknacker-dist/src/universal/conf/dev-application.conf b/nussknacker-dist/src/universal/conf/dev-application.conf
index ddf3c4c27aa..f795c859b07 100644
--- a/nussknacker-dist/src/universal/conf/dev-application.conf
+++ b/nussknacker-dist/src/universal/conf/dev-application.conf
@@ -159,16 +159,15 @@ scenarioTypes {
}
"periodic-dev": {
deploymentConfig: {
- type: "flinkPeriodic"
+ type: "flinkStreaming"
+ scheduling {
+ enabled: true
+ processingType: streaming
+ jarsDir: ${storageDir}/jars
+ }
restUrl: "http://jobmanager:8081"
restUrl: ${?FLINK_REST_URL}
shouldVerifyBeforeDeploy: ${?FLINK_SHOULD_VERIFY_BEFORE_DEPLOY}
- deploymentManager {
- db: ${db},
- db.table: "periodic_flyway_schema_history"
- processingType: streaming,
- jarsDir: ${storageDir}/jars
- }
}
modelConfig: {
classPath: ["model/devModel.jar", "model/flinkExecutor.jar", "components/flink", "components/common", "flink-dropwizard-metrics-deps/"]
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
index 43f00f94bd9..d87f651729f 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/ModelData.scala
@@ -40,8 +40,11 @@ object ModelData extends LazyLogging {
Map[DesignerWideComponentId, ComponentAdditionalConfig]
) => ModelDefinition
- def apply(processingTypeConfig: ProcessingTypeConfig, dependencies: ModelDependencies): ModelData = {
- val modelClassLoader = ModelClassLoader(processingTypeConfig.classPath, dependencies.workingDirectoryOpt)
+ def apply(
+ processingTypeConfig: ProcessingTypeConfig,
+ dependencies: ModelDependencies,
+ modelClassLoader: ModelClassLoader
+ ): ModelData = {
ClassLoaderModelData(
_.resolveInputConfigDuringExecution(processingTypeConfig.modelConfig, modelClassLoader.classLoader),
modelClassLoader,
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExpressionParseError.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExpressionParseError.scala
index 49b27a7f186..5182bd6bae6 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExpressionParseError.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExpressionParseError.scala
@@ -106,6 +106,10 @@ object SpelExpressionParseError {
override def message: String = s"There is no property '$property' in type: ${typ.display}"
}
+ case class NoPropertyTypeError(typ: TypingResult, propertyType: TypingResult) extends MissingObjectError {
+ override def message: String = s"There is no property of type '${propertyType.display}' in type: ${typ.display}"
+ }
+
case class UnknownMethodError(methodName: String, displayableType: String) extends MissingObjectError {
override def message: String = s"Unknown method '$methodName' in $displayableType"
}
diff --git a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/Typer.scala b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/Typer.scala
index 94398a81cd6..23344aab195 100644
--- a/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/Typer.scala
+++ b/scenario-compiler/src/main/scala/pl/touk/nussknacker/engine/spel/Typer.scala
@@ -26,6 +26,7 @@ import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.IllegalOperation
import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.MissingObjectError.{
ConstructionOfUnknown,
NoPropertyError,
+ NoPropertyTypeError,
NonReferenceError,
UnresolvedReferenceError
}
@@ -199,11 +200,16 @@ private[spel] class Typer(
case _ => typeFieldNameReferenceOnRecord(indexString, record)
}
case indexKey :: Nil if indexKey.canBeConvertedTo(Typed[String]) =>
- if (dynamicPropertyAccessAllowed) valid(Unknown) else invalid(DynamicPropertyAccessError)
- case _ :: Nil =>
+ if (dynamicPropertyAccessAllowed) valid(Unknown)
+ else
+ record.runtimeObjType.params match {
+ case _ :: value :: Nil if record.runtimeObjType.klass == classOf[java.util.Map[_, _]] => valid(value)
+ case _ => valid(Unknown)
+ }
+ case e :: Nil =>
indexer.children match {
case (ref: PropertyOrFieldReference) :: Nil => typeFieldNameReferenceOnRecord(ref.getName, record)
- case _ => if (dynamicPropertyAccessAllowed) valid(Unknown) else invalid(DynamicPropertyAccessError)
+ case _ => if (dynamicPropertyAccessAllowed) valid(Unknown) else invalid(NoPropertyTypeError(record, e))
}
case _ =>
invalid(IllegalIndexingOperation)
@@ -218,7 +224,17 @@ private[spel] class Typer(
// TODO: validate indexer key - the only valid key is an integer - but its more complicated with references
withTypedChildren(_ => valid(param))
case TypedClass(clazz, keyParam :: valueParam :: Nil) if clazz.isAssignableFrom(classOf[java.util.Map[_, _]]) =>
- withTypedChildren(_ => valid(valueParam))
+ withTypedChildren {
+ // Spel implementation of map indexer (in class org.springframework.expression.spel.ast.Indexer, line 154) tries to convert
+ // indexer to key type of map, but this conversion can be accomplished only if key type of map is known to spel.
+ // Currently .asMap extension is implemented in such a way, that spel does not know key type of the resulting map
+ // (that is when spel evaluates this expression it only knows that it got map, but does not know its type parameters).
+ // It would be hard to change implementation of .asMap extension so we partially turn off this feature of indexer conversion
+ // by allowing in typing only situations when map key type and indexer type are the same (though we have to allow
+ // indexing with unknown type)
+ case indexKey :: Nil if indexKey.canBeConvertedWithoutConversionTo(keyParam) => valid(valueParam)
+ case _ => invalid(IllegalIndexingOperation)
+ }
case d: TypedDict => dictTyper.typeDictValue(d, e).map(toNodeResult)
case union: TypedUnion => typeUnion(e, union)
case TypedTaggedValue(underlying, _) => typeIndexer(e, underlying)
diff --git a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
index 6291e7c8ff4..bdf1386044c 100644
--- a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
+++ b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/SpelExpressionSpec.scala
@@ -272,6 +272,18 @@ class SpelExpressionSpec extends AnyFunSuite with Matchers with ValidatedValuesD
private def evaluate[T: TypeTag](expr: String, context: Context = ctx): T =
parse[T](expr = expr, context = context).validExpression.evaluateSync[T](context)
+ test("should be able to dynamically index record") {
+ evaluate[Int]("{a: 5, b: 10}[#input.toString()]", Context("abc").withVariable("input", "a")) shouldBe 5
+ evaluate[Integer]("{a: 5, b: 10}[#input.toString()]", Context("abc").withVariable("input", "asdf")) shouldBe null
+ }
+
+ test("should figure out result type when dynamically indexing record") {
+ evaluate[Int](
+ "{a: {g: 5, h: 10}, b: {g: 50, h: 100}}[#input.toString()].h",
+ Context("abc").withVariable("input", "b")
+ ) shouldBe 100
+ }
+
test("parsing first selection on array") {
parse[Any]("{1,2,3,4,5,6,7,8,9,10}.^[(#this%2==0)]").validExpression
.evaluateSync[java.util.ArrayList[Int]](ctx) should equal(2)
@@ -897,6 +909,28 @@ class SpelExpressionSpec extends AnyFunSuite with Matchers with ValidatedValuesD
parse[java.math.BigDecimal]("-1.1", ctx) shouldBe Symbol("valid")
}
+ test("should not validate map indexing if index type and map key type are different") {
+ parse[Any]("""{{key: "a", value: 5}}.toMap[0]""") shouldBe Symbol("invalid")
+ parse[Any]("""{{key: 1, value: 5}}.toMap["0"]""") shouldBe Symbol("invalid")
+ parse[Any]("""{{key: 1.toLong, value: 5}}.toMap[0]""") shouldBe Symbol("invalid")
+ parse[Any]("""{{key: 1, value: 5}}.toMap[0.toLong]""") shouldBe Symbol("invalid")
+ }
+
+ test("should validate map indexing if index type and map key type are the same") {
+ parse[Any]("""{{key: 1, value: 5}}.toMap[0]""") shouldBe Symbol("valid")
+ }
+
+ test("should handle map indexing with unknown key type") {
+ val context = Context("sth").withVariables(
+ Map(
+ "unknownString" -> ContainerOfUnknown("a"),
+ )
+ )
+
+ evaluate[Int]("""{{key: "a", value: 5}}.toMap[#unknownString.value]""", context) shouldBe 5
+ evaluate[Integer]("""{{key: "b", value: 5}}.toMap[#unknownString.value]""", context) shouldBe null
+ }
+
test("validate ternary operator") {
parse[Long]("'d'? 3 : 4", ctx) should not be Symbol("valid")
parse[String]("1 > 2 ? 12 : 23", ctx) should not be Symbol("valid")
diff --git a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/TyperSpec.scala b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/TyperSpec.scala
index f69278af040..9da82310d9f 100644
--- a/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/TyperSpec.scala
+++ b/scenario-compiler/src/test/scala/pl/touk/nussknacker/engine/spel/TyperSpec.scala
@@ -13,8 +13,10 @@ import pl.touk.nussknacker.engine.api.typed.typing._
import pl.touk.nussknacker.engine.definition.clazz.ClassDefinitionTestUtils
import pl.touk.nussknacker.engine.dict.{KeysDictTyper, SimpleDictRegistry}
import pl.touk.nussknacker.engine.expression.PositionRange
-import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.IllegalOperationError.DynamicPropertyAccessError
-import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.MissingObjectError.NoPropertyError
+import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.MissingObjectError.{
+ NoPropertyError,
+ NoPropertyTypeError
+}
import pl.touk.nussknacker.engine.spel.SpelExpressionParseError.UnsupportedOperationError.MapWithExpressionKeysError
import pl.touk.nussknacker.engine.spel.Typer.TypingResultWithContext
import pl.touk.nussknacker.engine.spel.TyperSpecTestData.TestRecord._
@@ -162,10 +164,8 @@ class TyperSpec extends AnyFunSuite with Matchers with ValidatedValuesDetailedMe
typeExpression(s"$testRecordExpr[#var]", "var" -> s"$nonPresentKey").invalidValue.toList should matchPattern {
case NoPropertyError(typingResult, key) :: Nil if typingResult == testRecordTyped && key == nonPresentKey =>
}
- // TODO: this behavior is to be fixed - ideally this should behave the same as above
typeExpression(s"$testRecordExpr[$nonPresentKey]").invalidValue.toList should matchPattern {
- case NoPropertyError(typingResult, key) :: DynamicPropertyAccessError :: Nil
- if typingResult == testRecordTyped && key == nonPresentKey =>
+ case NoPropertyError(typingResult, key) :: Nil if typingResult == testRecordTyped && key == nonPresentKey =>
}
}
@@ -183,6 +183,17 @@ class TyperSpec extends AnyFunSuite with Matchers with ValidatedValuesDetailedMe
}
}
+ test("indexing on records with key which is not known at compile time treats record as map") {
+ typeExpression("{a: 5, b: 10}[#var.toString()]", "var" -> "a").validValue.finalResult.typingResult shouldBe Typed
+ .typedClass[Int]
+ }
+
+ test("indexing on records with non string key produces error") {
+ typeExpression("{a: 5, b: 10}[4]").invalidValue.toList should matchPattern {
+ case NoPropertyTypeError(_, _) :: Nil =>
+ }
+ }
+
private def buildTyper(dynamicPropertyAccessAllowed: Boolean = false) = new Typer(
dictTyper = new KeysDictTyper(new SimpleDictRegistry(Map.empty)),
strictMethodsChecking = false,