diff --git a/build.gradle b/build.gradle index 0efa56a43b..be3b3dcc49 100644 --- a/build.gradle +++ b/build.gradle @@ -245,6 +245,10 @@ subprojects { javaLauncher = javaToolchains.launcherFor { languageVersion = JavaLanguageVersion.current() } + testLogging { + exceptionFormat "full" + showStackTraces false + } reports { junitXml.required html.required diff --git a/data-prepper-api/build.gradle b/data-prepper-api/build.gradle index 045d331704..bf0f0aebd6 100644 --- a/data-prepper-api/build.gradle +++ b/data-prepper-api/build.gradle @@ -13,8 +13,8 @@ dependencies { implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310' implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jdk8' implementation libs.parquet.common - testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' implementation libs.commons.lang3 + testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' testImplementation project(':data-prepper-test-common') testImplementation 'org.skyscreamer:jsonassert:1.5.3' testImplementation libs.commons.io diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/DataPrepperPlugin.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/DataPrepperPlugin.java index d94c0d8c19..7cfd29c1c9 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/DataPrepperPlugin.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/DataPrepperPlugin.java @@ -72,4 +72,16 @@ * @since 1.2 */ Class pluginConfigurationType() default PluginSetting.class; + + /** + * Optional Packages to scan for Data Prepper DI components. + * Plugins provide this list if they want to use Dependency Injection in its module. + * Providing this value, implicitly assumes and initiates plugin specific isolated ApplicationContext. + *

+ * The package names that spring context scans will be picked up by these marker classes. + * + * @return Array of classes to use for package scan. + * @since 2.2 + */ + Class[] packagesToScan() default {}; } diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/UsesDataPrepperPlugin.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/UsesDataPrepperPlugin.java new file mode 100644 index 0000000000..e94e2cca4c --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/annotations/UsesDataPrepperPlugin.java @@ -0,0 +1,23 @@ +package org.opensearch.dataprepper.model.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotates a field that uses Data Prepper plugin config as its value. + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.FIELD}) +public @interface UsesDataPrepperPlugin { + /** + * The class type for this plugin. + * + * @return The Java class + * @since 1.2 + */ + Class pluginType(); +} diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/ConditionalRoute.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/ConditionalRoute.java index 0055702169..23571989d5 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/ConditionalRoute.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/ConditionalRoute.java @@ -31,7 +31,7 @@ */ @JsonPropertyOrder @JsonClassDescription("The key-value pair defines routing condition, where the key is the name of a route and the " + - "value is a Data Prepper expression representing the routing condition.") + "value is an expression representing the routing condition.") @JsonSerialize(using = ConditionalRoute.ConditionalRouteSerializer.class) @JsonDeserialize(using = ConditionalRoute.ConditionalRouteDeserializer.class) public class ConditionalRoute { diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/DataType.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/DataType.java index c6e899a6f4..9a35532996 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/DataType.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/DataType.java @@ -95,7 +95,7 @@ public String getTypeName() { } @JsonCreator - static DataType fromTypeName(final String option) { + public static DataType fromTypeName(final String option) { return TYPES_MAP.get(option); } diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOption.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOption.java index 6c310eb395..8b5be2de4e 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOption.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOption.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.model.event; import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; import java.util.Arrays; import java.util.Map; @@ -45,4 +46,9 @@ public boolean shouldLog() { static HandleFailedEventsOption fromOptionValue(final String option) { return OPTIONS_MAP.get(option.toLowerCase()); } + + @JsonValue + public String toOptionValue() { + return option; + } } diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/DataTypeTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/DataTypeTest.java index ac7a5bf613..ac6be94b7f 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/DataTypeTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/DataTypeTest.java @@ -6,14 +6,21 @@ package org.opensearch.dataprepper.model.event; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.EnumSource; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.params.provider.Arguments.arguments; import java.math.BigDecimal; import java.util.ArrayList; @@ -39,26 +46,55 @@ void test_isSameType(Object object, String type, boolean expectedResult) { assertThat(DataType.isSameType(object, type), equalTo(expectedResult)); } + @ParameterizedTest + @EnumSource(DataType.class) + void getTypeName_returns_non_empty_string_for_all_types(final DataType dataType) { + assertThat(dataType.getTypeName(), notNullValue()); + assertThat(dataType.getTypeName(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(DataTypeToKnownString.class) + void getTypeName_returns_expected_name(final DataType dataType, final String expectedString) { + assertThat(dataType.getTypeName(), equalTo(expectedString)); + } + private static Stream getSameTypeTestData() { int[] testArray = {1,2}; List testList = new ArrayList<>(); return Stream.of( - Arguments.of(2, "integer", true), - Arguments.of("testString", "string", true), - Arguments.of(2L, "long", true), - Arguments.of(2.0, "double", true), - Arguments.of(BigDecimal.valueOf(2.34567), "big_decimal", true), - Arguments.of(true, "boolean", true), - Arguments.of(Map.of("k","v"), "map", true), - Arguments.of(testArray, "array", true), - Arguments.of(testList, "array", true), - Arguments.of(2.0, "integer", false), - Arguments.of(2, "string", false), - Arguments.of("testString", "long", false), - Arguments.of("testString", "double", false), - Arguments.of(2, "boolean", false), - Arguments.of(2L, "map", false), - Arguments.of(2, "array", false) + arguments(2, "integer", true), + arguments("testString", "string", true), + arguments(2L, "long", true), + arguments(2.0, "double", true), + arguments(BigDecimal.valueOf(2.34567), "big_decimal", true), + arguments(true, "boolean", true), + arguments(Map.of("k","v"), "map", true), + arguments(testArray, "array", true), + arguments(testList, "array", true), + arguments(2.0, "integer", false), + arguments(2, "string", false), + arguments("testString", "long", false), + arguments("testString", "double", false), + arguments(2, "boolean", false), + arguments(2L, "map", false), + arguments(2, "array", false) ); } + + static class DataTypeToKnownString implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(DataType.STRING, "string"), + arguments(DataType.BOOLEAN, "boolean"), + arguments(DataType.INTEGER, "integer"), + arguments(DataType.LONG, "long"), + arguments(DataType.DOUBLE, "double"), + arguments(DataType.BIG_DECIMAL, "big_decimal"), + arguments(DataType.MAP, "map"), + arguments(DataType.ARRAY, "array") + ); + } + } } diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOptionTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOptionTest.java index 90a319ad24..ca40ea28a5 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOptionTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/HandleFailedEventsOptionTest.java @@ -5,29 +5,84 @@ package org.opensearch.dataprepper.model.event; -import org.hamcrest.CoreMatchers; +import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.EnumSource; +import java.util.stream.Stream; + import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.jupiter.params.provider.Arguments.arguments; class HandleFailedEventsOptionTest { + @ParameterizedTest + @ArgumentsSource(EnumToShouldLogArgumentsProvider.class) + void shouldLog_returns_expected_value(final HandleFailedEventsOption option, final boolean shouldLog) { + assertThat(option.shouldLog(), equalTo(shouldLog)); + } + + @ParameterizedTest + @ArgumentsSource(EnumToShouldShouldDropArgumentsProvider.class) + void shouldDropEvent_returns_expected_value(final HandleFailedEventsOption option, final boolean shouldDrop) { + assertThat(option.shouldDropEvent(), equalTo(shouldDrop)); + } + + @ParameterizedTest + @ArgumentsSource(EnumToOptionValueArgumentsProvider.class) + void toOptionValue_returns_expected_value(final HandleFailedEventsOption option, final String optionValue) { + assertThat(option.toOptionValue(), equalTo(optionValue)); + } + + @ParameterizedTest + @ArgumentsSource(EnumToOptionValueArgumentsProvider.class) + void fromOptionValue_returns_expected_option(final HandleFailedEventsOption option, final String optionValue) { + assertThat(HandleFailedEventsOption.fromOptionValue(optionValue), equalTo(option)); + } + @ParameterizedTest @EnumSource(HandleFailedEventsOption.class) - void fromOptionValue(final HandleFailedEventsOption option) { - assertThat(HandleFailedEventsOption.fromOptionValue(option.name()), CoreMatchers.is(option)); + void toOptionValue_returns_non_null_for_all(final HandleFailedEventsOption option) { + assertThat(option.toOptionValue(), notNullValue()); + } - if (option == HandleFailedEventsOption.SKIP || option == HandleFailedEventsOption.SKIP_SILENTLY) { - assertThat(option.shouldDropEvent(), equalTo(false)); - } else { - assertThat(option.shouldDropEvent(), equalTo(true)); + private static class EnumToOptionValueArgumentsProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext context) { + return Stream.of( + arguments(HandleFailedEventsOption.SKIP, "skip"), + arguments(HandleFailedEventsOption.SKIP_SILENTLY, "skip_silently"), + arguments(HandleFailedEventsOption.DROP, "drop"), + arguments(HandleFailedEventsOption.DROP_SILENTLY, "drop_silently") + ); } + } + + private static class EnumToShouldShouldDropArgumentsProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext context) { + return Stream.of( + arguments(HandleFailedEventsOption.SKIP, false), + arguments(HandleFailedEventsOption.SKIP_SILENTLY, false), + arguments(HandleFailedEventsOption.DROP, true), + arguments(HandleFailedEventsOption.DROP_SILENTLY, true) + ); + } + } - if (option == HandleFailedEventsOption.SKIP_SILENTLY || option == HandleFailedEventsOption.DROP_SILENTLY) { - assertThat(option.shouldLog(), equalTo(false)); - } else { - assertThat(option.shouldLog(), equalTo(true)); + private static class EnumToShouldLogArgumentsProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext context) { + return Stream.of( + arguments(HandleFailedEventsOption.SKIP, true), + arguments(HandleFailedEventsOption.DROP, true), + arguments(HandleFailedEventsOption.SKIP_SILENTLY, false), + arguments(HandleFailedEventsOption.DROP_SILENTLY, false) + ); } } } diff --git a/data-prepper-core/src/integrationTest/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryIT.java b/data-prepper-core/src/integrationTest/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryIT.java index 4c52c614d4..64c642dae6 100644 --- a/data-prepper-core/src/integrationTest/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryIT.java +++ b/data-prepper-core/src/integrationTest/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryIT.java @@ -15,7 +15,10 @@ import org.opensearch.dataprepper.model.configuration.PipelinesDataFlowModel; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; +import org.opensearch.dataprepper.model.source.Source; import org.opensearch.dataprepper.plugins.TestObjectPlugin; +import org.opensearch.dataprepper.plugins.test.TestComponent; +import org.opensearch.dataprepper.plugins.test.TestDISource; import org.opensearch.dataprepper.plugins.test.TestPlugin; import org.opensearch.dataprepper.validation.LoggingPluginErrorsHandler; import org.opensearch.dataprepper.validation.PluginErrorCollector; @@ -30,6 +33,8 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; /** @@ -96,6 +101,23 @@ void loadPlugin_should_return_a_new_plugin_instance_with_the_expected_configurat assertThat(configuration.getOptionalString(), equalTo(optionalStringValue)); } + @Test + void loadPlugin_should_return_a_new_plugin_instance_with_DI_context_initialized() { + + final Map pluginSettingMap = new HashMap<>(); + final PluginSetting pluginSetting = new PluginSetting("test_di_source", pluginSettingMap); + pluginSetting.setPipelineName(pipelineName); + + final Source sourcePlugin = createObjectUnderTest().loadPlugin(Source.class, pluginSetting); + + assertThat(sourcePlugin, instanceOf(TestDISource.class)); + TestDISource plugin = (TestDISource) sourcePlugin; + // Testing the auto wired been with the Dependency Injection + assertNotNull(plugin.getTestComponent()); + assertInstanceOf(TestComponent.class, plugin.getTestComponent()); + assertThat(plugin.getTestComponent().getIdentifier(), equalTo("test-component")); + } + @Test void loadPlugin_should_return_a_new_plugin_instance_with_the_expected_configuration_variable_args() { diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/DataPrepperConfiguration.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/DataPrepperConfiguration.java index a8006a1aea..58cdefc167 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/DataPrepperConfiguration.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/DataPrepperConfiguration.java @@ -11,6 +11,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSetter; import com.fasterxml.jackson.annotation.Nulls; +import org.opensearch.dataprepper.core.event.EventConfiguration; +import org.opensearch.dataprepper.core.event.EventConfigurationContainer; import org.opensearch.dataprepper.model.configuration.PipelineExtensions; import org.opensearch.dataprepper.model.configuration.PluginModel; import org.opensearch.dataprepper.parser.config.MetricTagFilter; @@ -29,7 +31,7 @@ /** * Class to hold configuration for DataPrepper, including server port and Log4j settings */ -public class DataPrepperConfiguration implements ExtensionsConfiguration { +public class DataPrepperConfiguration implements ExtensionsConfiguration, EventConfigurationContainer { static final Duration DEFAULT_SHUTDOWN_DURATION = Duration.ofSeconds(30L); private static final String DEFAULT_SOURCE_COORDINATION_STORE = "in_memory"; @@ -47,6 +49,7 @@ public class DataPrepperConfiguration implements ExtensionsConfiguration { private CircuitBreakerConfig circuitBreakerConfig; private SourceCoordinationConfig sourceCoordinationConfig; private PipelineShutdownOption pipelineShutdown; + private EventConfiguration eventConfiguration; private Map metricTags = new HashMap<>(); private List metricTagFilters = new LinkedList<>(); private PeerForwarderConfiguration peerForwarderConfiguration; @@ -92,6 +95,7 @@ public DataPrepperConfiguration( @JsonProperty("circuit_breakers") final CircuitBreakerConfig circuitBreakerConfig, @JsonProperty("source_coordination") final SourceCoordinationConfig sourceCoordinationConfig, @JsonProperty("pipeline_shutdown") final PipelineShutdownOption pipelineShutdown, + @JsonProperty("event") final EventConfiguration eventConfiguration, @JsonProperty("extensions") @JsonInclude(JsonInclude.Include.NON_NULL) @JsonSetter(nulls = Nulls.SKIP) @@ -102,6 +106,7 @@ public DataPrepperConfiguration( ? new SourceCoordinationConfig(new PluginModel(DEFAULT_SOURCE_COORDINATION_STORE, Collections.emptyMap()), null) : sourceCoordinationConfig; this.pipelineShutdown = pipelineShutdown != null ? pipelineShutdown : DEFAULT_PIPELINE_SHUTDOWN; + this.eventConfiguration = eventConfiguration != null ? eventConfiguration : EventConfiguration.defaultConfiguration(); setSsl(ssl); this.keyStoreFilePath = keyStoreFilePath != null ? keyStoreFilePath : ""; this.keyStorePassword = keyStorePassword != null ? keyStorePassword : ""; @@ -226,6 +231,10 @@ public PipelineShutdownOption getPipelineShutdown() { return pipelineShutdown; } + public EventConfiguration getEventConfiguration() { + return eventConfiguration; + } + @Override public PipelineExtensions getPipelineExtensions() { return pipelineExtensions; diff --git a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/PipelineTransformerTests.java b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/PipelineTransformerTests.java index 13b30965a6..bcddc49b94 100644 --- a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/PipelineTransformerTests.java +++ b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/PipelineTransformerTests.java @@ -131,6 +131,7 @@ void setUp() { @AfterEach void tearDown() { + verify(dataPrepperConfiguration).getEventConfiguration(); verifyNoMoreInteractions(dataPrepperConfiguration); } diff --git a/data-prepper-event/build.gradle b/data-prepper-event/build.gradle index 168d330579..5dc39d92a6 100644 --- a/data-prepper-event/build.gradle +++ b/data-prepper-event/build.gradle @@ -19,5 +19,7 @@ dependencies { implementation(libs.spring.context) { exclude group: 'commons-logging', module: 'commons-logging' } + implementation 'com.fasterxml.jackson.core:jackson-databind' + implementation libs.caffeine testImplementation libs.commons.lang3 } diff --git a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactory.java b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactory.java new file mode 100644 index 0000000000..df7e7c6255 --- /dev/null +++ b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactory.java @@ -0,0 +1,68 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import org.opensearch.dataprepper.model.event.EventKey; +import org.opensearch.dataprepper.model.event.EventKeyFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Objects; + +class CachingEventKeyFactory implements EventKeyFactory { + private static final Logger log = LoggerFactory.getLogger(CachingEventKeyFactory.class); + private final EventKeyFactory delegateEventKeyFactory; + private final Cache cache; + + private static class CacheKey { + private final String key; + private final EventAction[] eventActions; + + private CacheKey(final String key, final EventAction[] eventActions) { + this.key = key; + this.eventActions = eventActions; + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final CacheKey cacheKey = (CacheKey) o; + return Objects.equals(key, cacheKey.key) && Arrays.equals(eventActions, cacheKey.eventActions); + } + + @Override + public int hashCode() { + int result = Objects.hash(key); + result = 31 * result + Arrays.hashCode(eventActions); + return result; + } + } + + CachingEventKeyFactory(final EventKeyFactory delegateEventKeyFactory, final EventConfiguration eventConfiguration) { + Objects.requireNonNull(delegateEventKeyFactory); + Objects.requireNonNull(eventConfiguration); + + log.debug("Configured to cache a maximum of {} event keys.", eventConfiguration.getMaximumCachedKeys()); + + this.delegateEventKeyFactory = delegateEventKeyFactory; + cache = Caffeine.newBuilder() + .maximumSize(eventConfiguration.getMaximumCachedKeys()) + .build(); + } + + @Override + public EventKey createEventKey(final String key, final EventAction... forActions) { + return getOrCreateEventKey(new CacheKey(key, forActions)); + } + + private EventKey getOrCreateEventKey(final CacheKey cacheKey) { + return cache.asMap().computeIfAbsent(cacheKey, key -> delegateEventKeyFactory.createEventKey(key.key, key.eventActions)); + } +} diff --git a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/DefaultEventKeyFactory.java b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/DefaultEventKeyFactory.java index 605b5bcb41..c46ea75985 100644 --- a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/DefaultEventKeyFactory.java +++ b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/DefaultEventKeyFactory.java @@ -9,10 +9,7 @@ import org.opensearch.dataprepper.model.event.EventKeyFactory; import org.opensearch.dataprepper.model.event.InternalOnlyEventKeyBridge; -import javax.inject.Named; - -@Named -public class DefaultEventKeyFactory implements EventKeyFactory { +class DefaultEventKeyFactory implements EventKeyFactory { @Override public EventKey createEventKey(final String key, final EventAction... forActions) { return InternalOnlyEventKeyBridge.createEventKey(key, forActions); diff --git a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfiguration.java b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfiguration.java new file mode 100644 index 0000000000..754a985af3 --- /dev/null +++ b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfiguration.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Data Prepper configurations for events. + */ +public class EventConfiguration { + @JsonProperty("maximum_cached_keys") + private Integer maximumCachedKeys = 512; + + public static EventConfiguration defaultConfiguration() { + return new EventConfiguration(); + } + + /** + * Gets the maximum number of cached {@link org.opensearch.dataprepper.model.event.EventKey} objects. + * + * @return the cache maximum count + */ + Integer getMaximumCachedKeys() { + return maximumCachedKeys; + } + + void setMaximumCachedKeys(final Integer maximumCachedKeys) { + this.maximumCachedKeys = maximumCachedKeys; + } +} diff --git a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfigurationContainer.java b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfigurationContainer.java new file mode 100644 index 0000000000..cc7da76427 --- /dev/null +++ b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventConfigurationContainer.java @@ -0,0 +1,10 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +public interface EventConfigurationContainer { + EventConfiguration getEventConfiguration(); +} diff --git a/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfiguration.java b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfiguration.java new file mode 100644 index 0000000000..cf33c3e8d2 --- /dev/null +++ b/data-prepper-event/src/main/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfiguration.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +import org.opensearch.dataprepper.model.event.EventKeyFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; + +import javax.inject.Named; + +@Configuration +class EventFactoryApplicationConfiguration { + @Bean + EventConfiguration eventConfiguration(@Autowired(required = false) final EventConfigurationContainer eventConfigurationContainer) { + if(eventConfigurationContainer == null || eventConfigurationContainer.getEventConfiguration() == null) + return EventConfiguration.defaultConfiguration(); + return eventConfigurationContainer.getEventConfiguration(); + } + + @Bean(name = "innerEventKeyFactory") + EventKeyFactory innerEventKeyFactory() { + return new DefaultEventKeyFactory(); + } + + @Primary + @Bean(name = "eventKeyFactory") + EventKeyFactory eventKeyFactory( + @Named("innerEventKeyFactory") final EventKeyFactory eventKeyFactory, + final EventConfiguration eventConfiguration) { + if(eventConfiguration.getMaximumCachedKeys() <= 0) { + return eventKeyFactory; + } + return new CachingEventKeyFactory(eventKeyFactory, eventConfiguration); + } +} diff --git a/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactoryTest.java b/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactoryTest.java new file mode 100644 index 0000000000..30b74183fb --- /dev/null +++ b/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/CachingEventKeyFactoryTest.java @@ -0,0 +1,162 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.dataprepper.model.event.EventKey; +import org.opensearch.dataprepper.model.event.EventKeyFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +class CachingEventKeyFactoryTest { + private static final int CACHE_SIZE = 2; + @Mock + private EventKeyFactory innerEventKeyFactory; + + @Mock + private EventConfiguration eventConfiguration; + + @BeforeEach + void setUp() { + when(eventConfiguration.getMaximumCachedKeys()).thenReturn(CACHE_SIZE); + } + + private EventKeyFactory createObjectUnderTest() { + return new CachingEventKeyFactory(innerEventKeyFactory, eventConfiguration); + } + + @ParameterizedTest + @EnumSource(EventKeyFactory.EventAction.class) + void createEventKey_with_EventAction_returns_inner_createEventKey(final EventKeyFactory.EventAction eventAction) { + final String key = UUID.randomUUID().toString(); + final EventKey eventKey = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key, eventAction)).thenReturn(eventKey); + + final EventKey actualEventKey = createObjectUnderTest().createEventKey(key, eventAction); + assertThat(actualEventKey, sameInstance(eventKey)); + } + + @Test + void createEventKey_returns_inner_createEventKey() { + final String key = UUID.randomUUID().toString(); + final EventKey eventKey = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key, EventKeyFactory.EventAction.ALL)).thenReturn(eventKey); + final EventKey actualEventKey = createObjectUnderTest().createEventKey(key); + assertThat(actualEventKey, sameInstance(eventKey)); + } + + @ParameterizedTest + @EnumSource(EventKeyFactory.EventAction.class) + void createEventKey_with_EventAction_returns_same_instance_without_calling_inner_createEventKey_for_same_key(final EventKeyFactory.EventAction eventAction) { + final String key = UUID.randomUUID().toString(); + final EventKey eventKey = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key, eventAction)).thenReturn(eventKey); + + final EventKeyFactory objectUnderTest = createObjectUnderTest(); + final EventKey actualKey = objectUnderTest.createEventKey(key, eventAction); + final EventKey actualKey2 = objectUnderTest.createEventKey(key, eventAction); + + assertThat(actualKey, sameInstance(eventKey)); + assertThat(actualKey2, sameInstance(eventKey)); + + verify(innerEventKeyFactory).createEventKey(key, eventAction); + } + + @Test + void createEventKey_returns_same_instance_without_calling_inner_createEventKey_for_same_key() { + final String key = UUID.randomUUID().toString(); + final EventKey eventKey = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key, EventKeyFactory.EventAction.ALL)).thenReturn(eventKey); + + final EventKeyFactory objectUnderTest = createObjectUnderTest(); + final EventKey actualKey = objectUnderTest.createEventKey(key); + final EventKey actualKey2 = objectUnderTest.createEventKey(key); + + assertThat(actualKey, sameInstance(eventKey)); + assertThat(actualKey2, sameInstance(eventKey)); + + verify(innerEventKeyFactory).createEventKey(key, EventKeyFactory.EventAction.ALL); + } + + @Test + void createEventKey_with_EventAction_returns_different_values_for_different_keys() { + final String key1 = UUID.randomUUID().toString(); + final String key2 = UUID.randomUUID().toString(); + final EventKey eventKey1 = mock(EventKey.class); + final EventKey eventKey2 = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key1, EventKeyFactory.EventAction.ALL)).thenReturn(eventKey1); + when(innerEventKeyFactory.createEventKey(key2, EventKeyFactory.EventAction.ALL)).thenReturn(eventKey2); + + final EventKeyFactory objectUnderTest = createObjectUnderTest(); + final EventKey actualEventKey1 = objectUnderTest.createEventKey(key1, EventKeyFactory.EventAction.ALL); + assertThat(actualEventKey1, sameInstance(eventKey1)); + final EventKey actualEventKey2 = objectUnderTest.createEventKey(key2, EventKeyFactory.EventAction.ALL); + assertThat(actualEventKey2, sameInstance(eventKey2)); + } + + @Test + void createEventKey_with_EventAction_returns_different_values_for_different_actions() { + final String key = UUID.randomUUID().toString(); + final EventKey eventKeyGet = mock(EventKey.class); + final EventKey eventKeyPut = mock(EventKey.class); + + when(innerEventKeyFactory.createEventKey(key, EventKeyFactory.EventAction.GET)).thenReturn(eventKeyGet); + when(innerEventKeyFactory.createEventKey(key, EventKeyFactory.EventAction.PUT)).thenReturn(eventKeyPut); + + final EventKeyFactory objectUnderTest = createObjectUnderTest(); + final EventKey actualEventKeyGet = objectUnderTest.createEventKey(key, EventKeyFactory.EventAction.GET); + assertThat(actualEventKeyGet, sameInstance(eventKeyGet)); + final EventKey actualEventKeyPut = objectUnderTest.createEventKey(key, EventKeyFactory.EventAction.PUT); + assertThat(actualEventKeyPut, sameInstance(eventKeyPut)); + } + + @Test + void createEventKey_expires_after_reaching_maximum() { + + final List keys = new ArrayList<>(CACHE_SIZE); + for (int i = 0; i < CACHE_SIZE * 2; i++) { + final String key = UUID.randomUUID().toString(); + final EventKey eventKey = mock(EventKey.class); + when(innerEventKeyFactory.createEventKey(key, EventKeyFactory.EventAction.ALL)).thenReturn(eventKey); + keys.add(key); + } + + final EventKeyFactory objectUnderTest = createObjectUnderTest(); + + final int numberOfIterations = 20; + for (int i = 0; i < numberOfIterations; i++) { + for (final String key : keys) { + objectUnderTest.createEventKey(key); + } + } + + verify(innerEventKeyFactory, atLeast(CACHE_SIZE + 1)) + .createEventKey(anyString(), eq(EventKeyFactory.EventAction.ALL)); + } +} \ No newline at end of file diff --git a/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfigurationTest.java b/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfigurationTest.java new file mode 100644 index 0000000000..2d8cc7a390 --- /dev/null +++ b/data-prepper-event/src/test/java/org/opensearch/dataprepper/core/event/EventFactoryApplicationConfigurationTest.java @@ -0,0 +1,48 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.opensearch.dataprepper.model.event.EventKeyFactory; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class EventFactoryApplicationConfigurationTest { + private EventFactoryApplicationConfiguration createObjectUnderTest() { + return new EventFactoryApplicationConfiguration(); + } + + @ParameterizedTest + @ValueSource(ints = {-1, 0}) + void eventKeyFactory_returns_innerEventKeyFactory_if_EventConfiguration_is_cache_disabled(final int cacheMax) { + final EventKeyFactory innerEventKeyFactory = mock(EventKeyFactory.class); + final EventConfiguration eventConfiguration = mock(EventConfiguration.class); + when(eventConfiguration.getMaximumCachedKeys()).thenReturn(cacheMax); + + final EventKeyFactory actualEventKeyFactory = createObjectUnderTest().eventKeyFactory(innerEventKeyFactory, eventConfiguration); + + assertThat(actualEventKeyFactory, sameInstance(innerEventKeyFactory)); + } + + @ParameterizedTest + @ValueSource(ints = {1, 2, 512, 1_000}) + void eventKeyFactory_returns_CachingEventKeyFactory_for_cacheable_sizes(final int cacheMax) { + final EventKeyFactory innerEventKeyFactory = mock(EventKeyFactory.class); + final EventConfiguration eventConfiguration = mock(EventConfiguration.class); + when(eventConfiguration.getMaximumCachedKeys()).thenReturn(cacheMax); + + final EventKeyFactory actualEventKeyFactory = createObjectUnderTest().eventKeyFactory(innerEventKeyFactory, eventConfiguration); + + assertThat(actualEventKeyFactory, not(sameInstance(innerEventKeyFactory))); + assertThat(actualEventKeyFactory, instanceOf(CachingEventKeyFactory.class)); + } +} \ No newline at end of file diff --git a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/ClasspathPluginProvider.java b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/ClasspathPluginProvider.java index 764c83f4db..df29915cd1 100644 --- a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/ClasspathPluginProvider.java +++ b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/ClasspathPluginProvider.java @@ -18,6 +18,7 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.opensearch.dataprepper.model.annotations.DataPrepperPlugin.DEFAULT_ALTERNATE_NAME; import static org.opensearch.dataprepper.model.annotations.DataPrepperPlugin.DEFAULT_DEPRECATED_NAME; @@ -60,6 +61,20 @@ public Optional> findPluginClass(final Class pluginTyp return Optional.ofNullable((Class) supportedTypesMap.get(pluginType)); } + @Override + public Set> findPluginClasses(Class pluginType) { + if (nameToSupportedTypeToPluginType == null) { + nameToSupportedTypeToPluginType = scanForPlugins(); + } + + return nameToSupportedTypeToPluginType.values().stream() + .flatMap(supportedTypeToPluginType -> + supportedTypeToPluginType.entrySet().stream() + .filter(entry -> pluginType.equals(entry.getKey())) + .flatMap(entry -> Stream.of((Class) entry.getValue()))) + .collect(Collectors.toSet()); + } + private Map, Class>> scanForPlugins() { final Set> dataPrepperPluginClasses = reflections.getTypesAnnotatedWith(DataPrepperPlugin.class); diff --git a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java index 1565ce396e..a866016e27 100644 --- a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java +++ b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java @@ -13,6 +13,7 @@ import org.opensearch.dataprepper.model.sink.SinkContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.BeanFactory; import org.springframework.context.annotation.DependsOn; import javax.inject.Inject; @@ -115,13 +116,16 @@ private ComponentPluginArgumentsContext getConstructionContext(final PluginS final PluginConfigObservable pluginConfigObservable = pluginConfigurationObservableFactory .createDefaultPluginConfigObservable(pluginConfigurationConverter, pluginConfigurationType, pluginSetting); + Class[] markersToScan = pluginAnnotation.packagesToScan(); + BeanFactory beanFactory = pluginBeanFactoryProvider.createPluginSpecificContext(markersToScan); + return new ComponentPluginArgumentsContext.Builder() .withPluginSetting(pluginSetting) .withPipelineDescription(pluginSetting) .withPluginConfiguration(configuration) .withPluginFactory(this) .withSinkContext(sinkContext) - .withBeanFactory(pluginBeanFactoryProvider.get()) + .withBeanFactory(beanFactory) .withPluginConfigurationObservable(pluginConfigObservable) .withTypeArgumentSuppliers(applicationContextToTypedSuppliers.getArgumentsSuppliers()) .build(); diff --git a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProvider.java b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProvider.java index 66a42eb36a..6a16917f9d 100644 --- a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProvider.java +++ b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProvider.java @@ -7,11 +7,12 @@ import org.springframework.beans.factory.BeanFactory; import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.support.GenericApplicationContext; import javax.inject.Inject; import javax.inject.Named; -import javax.inject.Provider; +import java.util.Arrays; import java.util.Objects; /** @@ -25,7 +26,7 @@ *

publicContext is the root {@link ApplicationContext}

*/ @Named -class PluginBeanFactoryProvider implements Provider { +class PluginBeanFactoryProvider { private final GenericApplicationContext sharedPluginApplicationContext; private final GenericApplicationContext coreApplicationContext; @@ -57,8 +58,17 @@ GenericApplicationContext getCoreApplicationContext() { * instead, a new isolated {@link ApplicationContext} should be created. * @return BeanFactory A BeanFactory that inherits from {@link PluginBeanFactoryProvider#sharedPluginApplicationContext} */ - public BeanFactory get() { - final GenericApplicationContext isolatedPluginApplicationContext = new GenericApplicationContext(sharedPluginApplicationContext); + public BeanFactory createPluginSpecificContext(Class[] markersToScan) { + AnnotationConfigApplicationContext isolatedPluginApplicationContext = new AnnotationConfigApplicationContext(); + if(markersToScan !=null && markersToScan.length>0) { + // If packages to scan is provided in this plugin annotation, which indicates + // that this plugin is interested in using Dependency Injection isolated for its module + Arrays.stream(markersToScan) + .map(Class::getPackageName) + .forEach(isolatedPluginApplicationContext::scan); + isolatedPluginApplicationContext.refresh(); + } + isolatedPluginApplicationContext.setParent(sharedPluginApplicationContext); return isolatedPluginApplicationContext.getBeanFactory(); } } diff --git a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginProvider.java b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginProvider.java index dd15176569..3aff8b0e3c 100644 --- a/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginProvider.java +++ b/data-prepper-plugin-framework/src/main/java/org/opensearch/dataprepper/plugin/PluginProvider.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.plugin; +import java.util.Collection; import java.util.Optional; /** @@ -27,4 +28,15 @@ public interface PluginProvider { * @since 1.2 */ Optional> findPluginClass(Class pluginType, String pluginName); + + /** + * Finds the Java classes for a specific pluginType. + * + * @param pluginType The type of plugin which is being supported. + * e.g. {@link org.opensearch.dataprepper.model.sink.Sink}. + * @param The type + * @return An {@link Collection} of Java classes for plugins + * @since 1.2 + */ + Collection> findPluginClasses(Class pluginType); } diff --git a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/ClasspathPluginProviderTest.java b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/ClasspathPluginProviderTest.java index 6cda169636..56ec0f4167 100644 --- a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/ClasspathPluginProviderTest.java +++ b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/ClasspathPluginProviderTest.java @@ -11,6 +11,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; +import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.sink.Sink; import org.opensearch.dataprepper.model.source.Source; import org.opensearch.dataprepper.plugins.test.TestSink; @@ -18,13 +19,16 @@ import org.reflections.Reflections; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.UUID; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.BDDMockito.given; @@ -61,6 +65,22 @@ void findPlugin_should_scan_for_plugins() { .getTypesAnnotatedWith(DataPrepperPlugin.class); } + @Test + void findPlugins_should_scan_for_plugins() { + final ClasspathPluginProvider objectUnderTest = createObjectUnderTest(); + + then(reflections).shouldHaveNoInteractions(); + + given(reflections.getTypesAnnotatedWith(DataPrepperPlugin.class)) + .willReturn(Collections.emptySet()); + + objectUnderTest.findPluginClasses(Sink.class); + + then(reflections) + .should() + .getTypesAnnotatedWith(DataPrepperPlugin.class); + } + @Test void findPlugin_should_scan_for_plugins_only_once() { final ClasspathPluginProvider objectUnderTest = createObjectUnderTest(); @@ -76,6 +96,21 @@ void findPlugin_should_scan_for_plugins_only_once() { .getTypesAnnotatedWith(DataPrepperPlugin.class); } + @Test + void findPlugins_should_scan_for_plugins_only_once() { + final ClasspathPluginProvider objectUnderTest = createObjectUnderTest(); + + given(reflections.getTypesAnnotatedWith(DataPrepperPlugin.class)) + .willReturn(Collections.emptySet()); + + for (int i = 0; i < 10; i++) + objectUnderTest.findPluginClasses(Sink.class); + + then(reflections) + .should() + .getTypesAnnotatedWith(DataPrepperPlugin.class); + } + @Test void findPlugin_should_return_empty_if_no_plugins_found() { given(reflections.getTypesAnnotatedWith(DataPrepperPlugin.class)) @@ -130,6 +165,17 @@ void findPlugin_should_return_plugin_if_found_for_alternate_name_and_type_using_ assertThat(optionalPlugin.get(), equalTo(TestSource.class)); } + @Test + void findPlugins_should_return_empty_if_no_plugins_found() { + given(reflections.getTypesAnnotatedWith(DataPrepperPlugin.class)) + .willReturn(Collections.emptySet()); + + final Collection> foundPlugins = createObjectUnderTest().findPluginClasses( + PluginSetting.class); + assertThat(foundPlugins, notNullValue()); + assertThat(foundPlugins.isEmpty(), is(true)); + } + @Nested class WithPredefinedPlugins { @@ -161,5 +207,13 @@ void findPlugin_should_return_plugin_if_found_for_name_and_type_using_pluginType assertThat(optionalPlugin.isPresent(), equalTo(true)); assertThat(optionalPlugin.get(), equalTo(TestSink.class)); } + + @Test + void findPlugins_should_return_plugins_if_plugin_found_for_specified_type() { + final Set> foundPlugins = createObjectUnderTest().findPluginClasses(Source.class); + assertThat(foundPlugins, notNullValue()); + assertThat(foundPlugins.size(), equalTo(1)); + assertThat(foundPlugins.stream().iterator().next(), equalTo(TestSource.class)); + } } } \ No newline at end of file diff --git a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryTest.java b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryTest.java index 495d003bb3..8c282152d9 100644 --- a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryTest.java +++ b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/DefaultPluginFactoryTest.java @@ -18,6 +18,8 @@ import org.opensearch.dataprepper.model.plugin.NoPluginFoundException; import org.opensearch.dataprepper.model.plugin.PluginConfigObservable; import org.opensearch.dataprepper.model.sink.Sink; +import org.opensearch.dataprepper.model.source.Source; +import org.opensearch.dataprepper.plugins.test.TestDISource; import org.opensearch.dataprepper.plugins.test.TestSink; import org.springframework.beans.factory.BeanFactory; @@ -192,6 +194,25 @@ void setUp() { .willReturn(Optional.of(expectedPluginClass)); } + @Test + void loadPlugin_should_create_a_new_instance_of_the_plugin_with_di_initialized() { + + final TestDISource expectedInstance = mock(TestDISource.class); + final Object convertedConfiguration = mock(Object.class); + given(pluginConfigurationConverter.convert(PluginSetting.class, pluginSetting)) + .willReturn(convertedConfiguration); + given(firstPluginProvider.findPluginClass(Source.class, pluginName)) + .willReturn(Optional.of(TestDISource.class)); + given(pluginCreator.newPluginInstance(eq(TestDISource.class), any(ComponentPluginArgumentsContext.class), eq(pluginName))) + .willReturn(expectedInstance); + + assertThat(createObjectUnderTest().loadPlugin(Source.class, pluginSetting), + equalTo(expectedInstance)); + verify(pluginConfigurationObservableFactory).createDefaultPluginConfigObservable(eq(pluginConfigurationConverter), + eq(PluginSetting.class), eq(pluginSetting)); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{TestDISource.class}); + } + @Test void loadPlugin_should_create_a_new_instance_of_the_first_plugin_found() { @@ -206,7 +227,7 @@ void loadPlugin_should_create_a_new_instance_of_the_first_plugin_found() { equalTo(expectedInstance)); verify(pluginConfigurationObservableFactory).createDefaultPluginConfigObservable(eq(pluginConfigurationConverter), eq(PluginSetting.class), eq(pluginSetting)); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); } @Test @@ -240,7 +261,7 @@ void loadPlugins_should_return_an_empty_list_when_the_number_of_instances_is_0() assertThat(plugins, notNullValue()); assertThat(plugins.size(), equalTo(0)); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); verifyNoInteractions(pluginCreator); } @@ -256,7 +277,7 @@ void loadPlugins_should_return_a_single_instance_when_the_the_numberOfInstances_ final List plugins = createObjectUnderTest().loadPlugins( baseClass, pluginSetting, c -> 1); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); verify(pluginConfigurationObservableFactory).createDefaultPluginConfigObservable(eq(pluginConfigurationConverter), eq(PluginSetting.class), eq(pluginSetting)); final ArgumentCaptor pluginArgumentsContextArgCapture = ArgumentCaptor.forClass(ComponentPluginArgumentsContext.class); @@ -285,7 +306,7 @@ void loadPlugin_with_varargs_should_return_a_single_instance_when_the_the_number final Object plugin = createObjectUnderTest().loadPlugin(baseClass, pluginSetting, object); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); verify(pluginConfigurationObservableFactory).createDefaultPluginConfigObservable(eq(pluginConfigurationConverter), eq(PluginSetting.class), eq(pluginSetting)); final ArgumentCaptor pluginArgumentsContextArgCapture = ArgumentCaptor.forClass(ComponentPluginArgumentsContext.class); @@ -320,7 +341,7 @@ void loadPlugins_should_return_an_instance_for_the_total_count() { final List plugins = createObjectUnderTest().loadPlugins( baseClass, pluginSetting, c -> 3); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); final ArgumentCaptor pluginArgumentsContextArgCapture = ArgumentCaptor.forClass(ComponentPluginArgumentsContext.class); verify(pluginCreator, times(3)).newPluginInstance(eq(expectedPluginClass), pluginArgumentsContextArgCapture.capture(), eq(pluginName)); final List actualPluginArgumentsContextList = pluginArgumentsContextArgCapture.getAllValues(); @@ -356,7 +377,7 @@ void loadPlugins_should_return_a_single_instance_with_values_from_ApplicationCon final List plugins = createObjectUnderTest().loadPlugins( baseClass, pluginSetting, c -> 1); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); final ArgumentCaptor pluginArgumentsContextArgCapture = ArgumentCaptor.forClass(ComponentPluginArgumentsContext.class); verify(pluginCreator).newPluginInstance(eq(expectedPluginClass), pluginArgumentsContextArgCapture.capture(), eq(pluginName)); final ComponentPluginArgumentsContext actualPluginArgumentsContext = pluginArgumentsContextArgCapture.getValue(); @@ -398,7 +419,7 @@ void loadPlugin_should_create_a_new_instance_of_the_first_plugin_found_with_corr assertThat(createObjectUnderTest().loadPlugin(baseClass, pluginSetting), equalTo(expectedInstance)); MatcherAssert.assertThat(expectedInstance.getClass().getAnnotation(DataPrepperPlugin.class).deprecatedName(), equalTo(TEST_SINK_DEPRECATED_NAME)); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); } } @@ -427,7 +448,7 @@ void loadPlugin_should_create_a_new_instance_of_the_first_plugin_found_with_corr assertThat(createObjectUnderTest().loadPlugin(baseClass, pluginSetting), equalTo(expectedInstance)); MatcherAssert.assertThat(expectedInstance.getClass().getAnnotation(DataPrepperPlugin.class).alternateNames(), equalTo(new String[]{TEST_SINK_ALTERNATE_NAME})); - verify(beanFactoryProvider).get(); + verify(beanFactoryProvider).createPluginSpecificContext(new Class[]{}); } } } diff --git a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProviderTest.java b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProviderTest.java index 14681b27eb..4545f21310 100644 --- a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProviderTest.java +++ b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugin/PluginBeanFactoryProviderTest.java @@ -7,7 +7,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.plugins.test.TestComponent; import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.context.support.GenericApplicationContext; import static org.hamcrest.CoreMatchers.equalTo; @@ -21,6 +23,7 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; class PluginBeanFactoryProviderTest { @@ -48,14 +51,14 @@ void testPluginBeanFactoryProviderUsesParentContext() { @Test void testPluginBeanFactoryProviderRequiresContext() { context = null; - assertThrows(NullPointerException.class, () -> createObjectUnderTest()); + assertThrows(NullPointerException.class, this::createObjectUnderTest); } @Test void testPluginBeanFactoryProviderRequiresParentContext() { context = mock(GenericApplicationContext.class); - assertThrows(NullPointerException.class, () -> createObjectUnderTest()); + assertThrows(NullPointerException.class, this::createObjectUnderTest); } @Test @@ -65,7 +68,7 @@ void testPluginBeanFactoryProviderGetReturnsBeanFactory() { final PluginBeanFactoryProvider beanFactoryProvider = createObjectUnderTest(); verify(context).getParent(); - assertThat(beanFactoryProvider.get(), is(instanceOf(BeanFactory.class))); + assertThat(beanFactoryProvider.createPluginSpecificContext(new Class[]{}), is(instanceOf(BeanFactory.class))); } @Test @@ -73,8 +76,8 @@ void testPluginBeanFactoryProviderGetReturnsUniqueBeanFactory() { doReturn(context).when(context).getParent(); final PluginBeanFactoryProvider beanFactoryProvider = createObjectUnderTest(); - final BeanFactory isolatedBeanFactoryA = beanFactoryProvider.get(); - final BeanFactory isolatedBeanFactoryB = beanFactoryProvider.get(); + final BeanFactory isolatedBeanFactoryA = beanFactoryProvider.createPluginSpecificContext(new Class[]{}); + final BeanFactory isolatedBeanFactoryB = beanFactoryProvider.createPluginSpecificContext(new Class[]{}); verify(context).getParent(); assertThat(isolatedBeanFactoryA, not(sameInstance(isolatedBeanFactoryB))); @@ -95,4 +98,22 @@ void getSharedPluginApplicationContext_called_multiple_times_returns_same_instan final PluginBeanFactoryProvider objectUnderTest = createObjectUnderTest(); assertThat(objectUnderTest.getSharedPluginApplicationContext(), sameInstance(objectUnderTest.getSharedPluginApplicationContext())); } + + @Test + void testCreatePluginSpecificContext() { + when(context.getParent()).thenReturn(context); + final PluginBeanFactoryProvider objectUnderTest = createObjectUnderTest(); + BeanFactory beanFactory = objectUnderTest.createPluginSpecificContext(new Class[]{TestComponent.class}); + assertThat(beanFactory, notNullValue()); + assertThat(beanFactory.getBean(TestComponent.class), notNullValue()); + } + + @Test + void testCreatePluginSpecificContext_with_empty_array() { + when(context.getParent()).thenReturn(context); + final PluginBeanFactoryProvider objectUnderTest = createObjectUnderTest(); + BeanFactory beanFactory = objectUnderTest.createPluginSpecificContext(new Class[]{}); + assertThat(beanFactory, notNullValue()); + assertThrows(NoSuchBeanDefinitionException.class, ()->beanFactory.getBean(TestComponent.class)); + } } \ No newline at end of file diff --git a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestComponent.java b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestComponent.java new file mode 100644 index 0000000000..1f7d21564b --- /dev/null +++ b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestComponent.java @@ -0,0 +1,10 @@ +package org.opensearch.dataprepper.plugins.test; + +import javax.inject.Named; + +@Named +public class TestComponent { + public String getIdentifier() { + return "test-component"; + } +} diff --git a/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestDISource.java b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestDISource.java new file mode 100644 index 0000000000..a0c1e73e1e --- /dev/null +++ b/data-prepper-plugin-framework/src/test/java/org/opensearch/dataprepper/plugins/test/TestDISource.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.test; + +import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; +import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; +import org.opensearch.dataprepper.model.buffer.Buffer; +import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.model.source.Source; +import org.opensearch.dataprepper.plugin.TestPluggableInterface; + +@DataPrepperPlugin(name = "test_di_source", + alternateNames = { "test_source_alternate_name1", "test_source_alternate_name2" }, + deprecatedName = "test_source_deprecated_name", + pluginType = Source.class, + packagesToScan = {TestDISource.class}) +public class TestDISource implements Source>, TestPluggableInterface { + + private final TestComponent testComponent; + + @DataPrepperPluginConstructor + public TestDISource(TestComponent testComponent) { + this.testComponent = testComponent; + } + + @Override + public void start(Buffer> buffer) { + } + + public TestComponent getTestComponent() { + return testComponent; + } + + @Override + public void stop() {} +} diff --git a/data-prepper-plugin-schema-cli/build.gradle b/data-prepper-plugin-schema-cli/build.gradle index fdcf46cf92..830a401ae5 100644 --- a/data-prepper-plugin-schema-cli/build.gradle +++ b/data-prepper-plugin-schema-cli/build.gradle @@ -9,6 +9,7 @@ application { dependencies { implementation project(':data-prepper-plugins') + implementation project(':data-prepper-plugin-framework') implementation project(':data-prepper-plugin-schema') implementation 'com.fasterxml.jackson.core:jackson-databind' implementation 'org.reflections:reflections:0.10.2' diff --git a/data-prepper-plugin-schema-cli/src/main/java/org/opensearch/dataprepper/schemas/DataPrepperPluginSchemaExecute.java b/data-prepper-plugin-schema-cli/src/main/java/org/opensearch/dataprepper/schemas/DataPrepperPluginSchemaExecute.java index 75115eb7e6..ffa8e6fa87 100644 --- a/data-prepper-plugin-schema-cli/src/main/java/org/opensearch/dataprepper/schemas/DataPrepperPluginSchemaExecute.java +++ b/data-prepper-plugin-schema-cli/src/main/java/org/opensearch/dataprepper/schemas/DataPrepperPluginSchemaExecute.java @@ -5,11 +5,9 @@ import com.github.victools.jsonschema.generator.SchemaVersion; import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationModule; import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationOption; +import org.opensearch.dataprepper.plugin.ClasspathPluginProvider; +import org.opensearch.dataprepper.plugin.PluginProvider; import org.opensearch.dataprepper.schemas.module.CustomJacksonModule; -import org.reflections.Reflections; -import org.reflections.scanners.Scanners; -import org.reflections.util.ClasspathHelper; -import org.reflections.util.ConfigurationBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import picocli.CommandLine; @@ -25,6 +23,7 @@ import java.util.function.Function; import java.util.stream.Collectors; +import static com.github.victools.jsonschema.module.jackson.JacksonOption.FLATTENED_ENUMS_FROM_JSONVALUE; import static com.github.victools.jsonschema.module.jackson.JacksonOption.RESPECT_JSONPROPERTY_ORDER; import static com.github.victools.jsonschema.module.jackson.JacksonOption.RESPECT_JSONPROPERTY_REQUIRED; @@ -54,15 +53,13 @@ public static void main(String[] args) { @Override public void run() { final List modules = List.of( - new CustomJacksonModule(RESPECT_JSONPROPERTY_REQUIRED, RESPECT_JSONPROPERTY_ORDER), + new CustomJacksonModule(RESPECT_JSONPROPERTY_REQUIRED, RESPECT_JSONPROPERTY_ORDER, FLATTENED_ENUMS_FROM_JSONVALUE), new JakartaValidationModule(JakartaValidationOption.NOT_NULLABLE_FIELD_IS_REQUIRED, JakartaValidationOption.INCLUDE_PATTERN_EXPRESSIONS) ); - final Reflections reflections = new Reflections(new ConfigurationBuilder() - .setUrls(ClasspathHelper.forPackage(DEFAULT_PLUGINS_CLASSPATH)) - .setScanners(Scanners.TypesAnnotated, Scanners.SubTypes)); + final PluginProvider pluginProvider = new ClasspathPluginProvider(); final PluginConfigsJsonSchemaConverter pluginConfigsJsonSchemaConverter = new PluginConfigsJsonSchemaConverter( - reflections, new JsonSchemaConverter(modules), siteUrl, siteBaseUrl); + pluginProvider, new JsonSchemaConverter(modules, pluginProvider), siteUrl, siteBaseUrl); final Class pluginType = pluginConfigsJsonSchemaConverter.pluginTypeNameToPluginType(pluginTypeName); final Map pluginNameToJsonSchemaMap = pluginConfigsJsonSchemaConverter.convertPluginConfigsIntoJsonSchemas( SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON, pluginType); diff --git a/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/JsonSchemaConverter.java b/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/JsonSchemaConverter.java index 0985485952..c17d0e50ee 100644 --- a/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/JsonSchemaConverter.java +++ b/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/JsonSchemaConverter.java @@ -1,5 +1,7 @@ package org.opensearch.dataprepper.schemas; +import com.fasterxml.classmate.TypeBindings; +import com.fasterxml.classmate.types.ResolvedObjectType; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -10,16 +12,29 @@ import com.github.victools.jsonschema.generator.SchemaGeneratorConfig; import com.github.victools.jsonschema.generator.SchemaGeneratorConfigBuilder; import com.github.victools.jsonschema.generator.SchemaGeneratorConfigPart; +import com.github.victools.jsonschema.generator.SchemaGeneratorGeneralConfigPart; import com.github.victools.jsonschema.generator.SchemaVersion; +import org.opensearch.dataprepper.model.event.EventKey; +import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; +import org.opensearch.dataprepper.model.annotations.UsesDataPrepperPlugin; +import org.opensearch.dataprepper.plugin.PluginProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.Collections; import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; public class JsonSchemaConverter { + private static final Logger LOG = LoggerFactory.getLogger(JsonSchemaConverter.class); static final String DEPRECATED_SINCE_KEY = "deprecated"; private final List jsonSchemaGeneratorModules; + private final PluginProvider pluginProvider; - public JsonSchemaConverter(final List jsonSchemaGeneratorModules) { + public JsonSchemaConverter(final List jsonSchemaGeneratorModules, final PluginProvider pluginProvider) { this.jsonSchemaGeneratorModules = jsonSchemaGeneratorModules; + this.pluginProvider = pluginProvider; } public ObjectNode convertIntoJsonSchema( @@ -30,7 +45,10 @@ public ObjectNode convertIntoJsonSchema( loadJsonSchemaGeneratorModules(configBuilder); final SchemaGeneratorConfigPart scopeSchemaGeneratorConfigPart = configBuilder.forFields(); overrideInstanceAttributeWithDeprecated(scopeSchemaGeneratorConfigPart); + overrideTargetTypeWithUsesDataPrepperPlugin(scopeSchemaGeneratorConfigPart); resolveDefaultValueFromJsonProperty(scopeSchemaGeneratorConfigPart); + overrideDataPrepperPluginTypeAttribute(configBuilder.forTypesInGeneral(), schemaVersion, optionPreset); + resolveDataPrepperTypes(scopeSchemaGeneratorConfigPart); final SchemaGeneratorConfig config = configBuilder.build(); final SchemaGenerator generator = new SchemaGenerator(config); @@ -52,6 +70,37 @@ private void overrideInstanceAttributeWithDeprecated( }); } + private void overrideTargetTypeWithUsesDataPrepperPlugin( + final SchemaGeneratorConfigPart scopeSchemaGeneratorConfigPart) { + scopeSchemaGeneratorConfigPart.withTargetTypeOverridesResolver(field -> Optional + .ofNullable(field.getAnnotationConsideringFieldAndGetterIfSupported(UsesDataPrepperPlugin.class)) + .map(usesDataPrepperPlugin -> + pluginProvider.findPluginClasses(usesDataPrepperPlugin.pluginType()).stream()) + .map(stream -> stream.map(specificSubtype -> field.getContext().resolve(specificSubtype))) + .map(stream -> stream.collect(Collectors.toList())) + .orElse(null)); + } + + private void overrideDataPrepperPluginTypeAttribute( + final SchemaGeneratorGeneralConfigPart schemaGeneratorGeneralConfigPart, + final SchemaVersion schemaVersion, final OptionPreset optionPreset) { + schemaGeneratorGeneralConfigPart.withTypeAttributeOverride((node, scope, context) -> { + final DataPrepperPlugin dataPrepperPlugin = scope.getType().getErasedType() + .getAnnotation(DataPrepperPlugin.class); + if (dataPrepperPlugin != null) { + final ObjectNode propertiesNode = node.putObject("properties"); + try { + final ObjectNode schemaNode = this.convertIntoJsonSchema( + schemaVersion, optionPreset, dataPrepperPlugin.pluginConfigurationType()); + propertiesNode.set(dataPrepperPlugin.name(), schemaNode); + } catch (JsonProcessingException e) { + LOG.error("Encountered error retrieving JSON schema for {}", dataPrepperPlugin.name(), e); + throw new RuntimeException(e); + } + } + }); + } + private void resolveDefaultValueFromJsonProperty( final SchemaGeneratorConfigPart scopeSchemaGeneratorConfigPart) { scopeSchemaGeneratorConfigPart.withDefaultResolver(field -> { @@ -59,4 +108,13 @@ private void resolveDefaultValueFromJsonProperty( return annotation == null || annotation.defaultValue().isEmpty() ? null : annotation.defaultValue(); }); } + + private void resolveDataPrepperTypes(final SchemaGeneratorConfigPart scopeSchemaGeneratorConfigPart) { + scopeSchemaGeneratorConfigPart.withTargetTypeOverridesResolver(field -> { + if(field.getType().getErasedType().equals(EventKey.class)) { + return Collections.singletonList(ResolvedObjectType.create(String.class, TypeBindings.emptyBindings(), null, null)); + } + return Collections.singletonList(field.getType()); + }); + } } diff --git a/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverter.java b/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverter.java index b7f4c1a531..eb9c29dd63 100644 --- a/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverter.java +++ b/data-prepper-plugin-schema/src/main/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverter.java @@ -1,6 +1,5 @@ package org.opensearch.dataprepper.schemas; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.node.ObjectNode; import com.github.victools.jsonschema.generator.OptionPreset; import com.github.victools.jsonschema.generator.SchemaVersion; @@ -10,7 +9,7 @@ import org.opensearch.dataprepper.model.processor.Processor; import org.opensearch.dataprepper.model.sink.Sink; import org.opensearch.dataprepper.model.source.Source; -import org.reflections.Reflections; +import org.opensearch.dataprepper.plugin.PluginProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,15 +50,15 @@ public class PluginConfigsJsonSchemaConverter { private final String siteUrl; private final String siteBaseUrl; - private final Reflections reflections; + private final PluginProvider pluginProvider; private final JsonSchemaConverter jsonSchemaConverter; public PluginConfigsJsonSchemaConverter( - final Reflections reflections, + final PluginProvider pluginProvider, final JsonSchemaConverter jsonSchemaConverter, final String siteUrl, final String siteBaseUrl) { - this.reflections = reflections; + this.pluginProvider = pluginProvider; this.jsonSchemaConverter = jsonSchemaConverter; this.siteUrl = siteUrl == null ? SITE_URL_PLACEHOLDER : siteUrl; this.siteBaseUrl = siteBaseUrl == null ? SITE_BASE_URL_PLACEHOLDER : siteBaseUrl; @@ -90,8 +89,8 @@ public Map convertPluginConfigsIntoJsonSchemas( addPluginName(jsonSchemaNode, pluginName); addDocumentationLink(jsonSchemaNode, pluginName, pluginType); value = jsonSchemaNode.toPrettyString(); - } catch (JsonProcessingException e) { - LOG.error("Encountered error retrieving JSON schema for {}", pluginName); + } catch (final Exception e) { + LOG.error("Encountered error retrieving JSON schema for {}", pluginName, e); return Stream.empty(); } return Stream.of(Map.entry(entry.getKey(), value)); @@ -107,7 +106,7 @@ private Map> scanForPluginConfigs(final Class pluginType) { if (ConditionalRoute.class.equals(pluginType)) { return Map.of(CONDITIONAL_ROUTE_PROCESSOR_NAME, ConditionalRoute.class); } - return reflections.getTypesAnnotatedWith(DataPrepperPlugin.class).stream() + return pluginProvider.findPluginClasses(pluginType).stream() .map(clazz -> clazz.getAnnotation(DataPrepperPlugin.class)) .filter(dataPrepperPlugin -> pluginType.equals(dataPrepperPlugin.pluginType())) .collect(Collectors.toMap( diff --git a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterIT.java b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterIT.java new file mode 100644 index 0000000000..7a3dca5991 --- /dev/null +++ b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterIT.java @@ -0,0 +1,72 @@ +package org.opensearch.dataprepper.schemas; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.victools.jsonschema.generator.Module; +import com.github.victools.jsonschema.generator.OptionPreset; +import com.github.victools.jsonschema.generator.SchemaVersion; +import com.github.victools.jsonschema.module.jackson.JacksonModule; +import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationModule; +import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationOption; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.annotations.UsesDataPrepperPlugin; +import org.opensearch.dataprepper.model.configuration.PluginModel; +import org.opensearch.dataprepper.plugin.ClasspathPluginProvider; +import org.opensearch.dataprepper.plugin.PluginProvider; +import org.opensearch.dataprepper.plugins.processor.aggregate.AggregateAction; + +import java.util.List; + +import static com.github.victools.jsonschema.module.jackson.JacksonOption.RESPECT_JSONPROPERTY_REQUIRED; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +public class JsonSchemaConverterIT { + static final String PROPERTIES_KEY = "properties"; + static final String ANY_OF_KEY = "anyOf"; + + private JsonSchemaConverter objectUnderTest; + + @BeforeEach + void setUp() { + final List modules = List.of( + new JacksonModule(RESPECT_JSONPROPERTY_REQUIRED), + new JakartaValidationModule(JakartaValidationOption.NOT_NULLABLE_FIELD_IS_REQUIRED, + JakartaValidationOption.INCLUDE_PATTERN_EXPRESSIONS) + ); + final PluginProvider pluginProvider = new ClasspathPluginProvider(); + objectUnderTest = new JsonSchemaConverter(modules, pluginProvider); + } + + @Test + void testSubTypes() throws JsonProcessingException { + final ObjectNode jsonSchemaNode = objectUnderTest.convertIntoJsonSchema( + SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON, TestConfig.class); + assertThat(jsonSchemaNode, instanceOf(ObjectNode.class)); + final JsonNode propertiesNode = jsonSchemaNode.at("/" + PROPERTIES_KEY); + assertThat(propertiesNode, instanceOf(ObjectNode.class)); + assertThat(propertiesNode.has("action"), is(true)); + final JsonNode actionNode = propertiesNode.at("/action"); + assertThat(actionNode.has(ANY_OF_KEY), is(true)); + final JsonNode anyOfNode = actionNode.at("/" + ANY_OF_KEY); + assertThat(anyOfNode, instanceOf(ArrayNode.class)); + anyOfNode.forEach(aggregateActionNode -> assertThat(aggregateActionNode.has(PROPERTIES_KEY), is(true))); + } + + @JsonClassDescription("test config") + static class TestConfig { + @JsonPropertyDescription("The aggregate action description") + @UsesDataPrepperPlugin(pluginType = AggregateAction.class) + private PluginModel action; + + public PluginModel getAction() { + return action; + } + } +} diff --git a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterTest.java b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterTest.java index 67cf0ac527..2b756d4698 100644 --- a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterTest.java +++ b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/JsonSchemaConverterTest.java @@ -10,6 +10,11 @@ import com.github.victools.jsonschema.generator.OptionPreset; import com.github.victools.jsonschema.generator.SchemaVersion; import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.event.EventKey; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.dataprepper.plugin.PluginProvider; import org.opensearch.dataprepper.schemas.module.CustomJacksonModule; import java.util.Collections; @@ -18,17 +23,22 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +@ExtendWith(MockitoExtension.class) class JsonSchemaConverterTest { + @Mock + private PluginProvider pluginProvider; - public JsonSchemaConverter createObjectUnderTest(final List modules) { - return new JsonSchemaConverter(modules); + public JsonSchemaConverter createObjectUnderTest(final List modules, final PluginProvider pluginProvider) { + return new JsonSchemaConverter(modules, pluginProvider); } @Test void testConvertIntoJsonSchemaWithDefaultModules() throws JsonProcessingException { - final JsonSchemaConverter jsonSchemaConverter = createObjectUnderTest(Collections.emptyList()); + final JsonSchemaConverter jsonSchemaConverter = createObjectUnderTest( + Collections.emptyList(), pluginProvider); final ObjectNode jsonSchemaNode = jsonSchemaConverter.convertIntoJsonSchema( SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON, TestConfig.class); assertThat(jsonSchemaNode, instanceOf(ObjectNode.class)); @@ -44,7 +54,8 @@ void testConvertIntoJsonSchemaWithDefaultModules() throws JsonProcessingExceptio @Test void testConvertIntoJsonSchemaWithCustomJacksonModule() throws JsonProcessingException { final JsonSchemaConverter jsonSchemaConverter = createObjectUnderTest( - Collections.singletonList(new CustomJacksonModule())); + Collections.singletonList(new CustomJacksonModule()), + pluginProvider); final ObjectNode jsonSchemaNode = jsonSchemaConverter.convertIntoJsonSchema( SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON, TestConfig.class); assertThat(jsonSchemaNode, instanceOf(ObjectNode.class)); @@ -55,6 +66,18 @@ void testConvertIntoJsonSchemaWithCustomJacksonModule() throws JsonProcessingExc assertThat(propertiesNode.has("custom_test_attribute"), is(true)); } + @Test + void testConvertIntoJsonSchemaWithEventKey() throws JsonProcessingException { + final JsonSchemaConverter jsonSchemaConverter = createObjectUnderTest(Collections.emptyList(), pluginProvider); + final ObjectNode jsonSchemaNode = jsonSchemaConverter.convertIntoJsonSchema( + SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON, TestConfig.class); + final JsonNode propertiesNode = jsonSchemaNode.at("/properties"); + assertThat(propertiesNode, instanceOf(ObjectNode.class)); + assertThat(propertiesNode.has("testAttributeEventKey"), is(equalTo(true))); + assertThat(propertiesNode.get("testAttributeEventKey"), is(notNullValue())); + assertThat(propertiesNode.get("testAttributeEventKey").get("type"), is(equalTo(TextNode.valueOf("string")))); + } + @JsonClassDescription("test config") static class TestConfig { private String testAttributeWithGetter; @@ -68,5 +91,7 @@ static class TestConfig { public String getTestAttributeWithGetter() { return testAttributeWithGetter; } + + private EventKey testAttributeEventKey; } } \ No newline at end of file diff --git a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterIT.java b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterIT.java index 71e9bf5faa..d825a3472f 100644 --- a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterIT.java +++ b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterIT.java @@ -13,10 +13,8 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.reflections.Reflections; -import org.reflections.scanners.Scanners; -import org.reflections.util.ClasspathHelper; -import org.reflections.util.ConfigurationBuilder; +import org.opensearch.dataprepper.plugin.ClasspathPluginProvider; +import org.opensearch.dataprepper.plugin.PluginProvider; import java.util.List; import java.util.Map; @@ -46,11 +44,9 @@ void setUp() { new JakartaValidationModule(JakartaValidationOption.NOT_NULLABLE_FIELD_IS_REQUIRED, JakartaValidationOption.INCLUDE_PATTERN_EXPRESSIONS) ); - final Reflections reflections = new Reflections(new ConfigurationBuilder() - .setUrls(ClasspathHelper.forPackage(DEFAULT_PLUGINS_CLASSPATH)) - .setScanners(Scanners.TypesAnnotated, Scanners.SubTypes)); + final PluginProvider pluginProvider = new ClasspathPluginProvider(); objectUnderTest = new PluginConfigsJsonSchemaConverter( - reflections, new JsonSchemaConverter(modules), TEST_URL, TEST_BASE_URL); + pluginProvider, new JsonSchemaConverter(modules, pluginProvider), TEST_URL, TEST_BASE_URL); } @ParameterizedTest diff --git a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterTest.java b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterTest.java index 3d1c1b585a..39c5629e0c 100644 --- a/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterTest.java +++ b/data-prepper-plugin-schema/src/test/java/org/opensearch/dataprepper/schemas/PluginConfigsJsonSchemaConverterTest.java @@ -12,7 +12,7 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; -import org.reflections.Reflections; +import org.opensearch.dataprepper.plugin.PluginProvider; import java.util.Map; import java.util.Set; @@ -40,7 +40,7 @@ class PluginConfigsJsonSchemaConverterTest { private JsonSchemaConverter jsonSchemaConverter; @Mock - private Reflections reflections; + private PluginProvider pluginProvider; @InjectMocks private PluginConfigsJsonSchemaConverter objectUnderTest; @@ -67,7 +67,7 @@ void testPluginTypeNameToPluginTypeWithInValidInput() { @Test void testConvertPluginConfigsIntoJsonSchemasHappyPath() throws JsonProcessingException { - when(reflections.getTypesAnnotatedWith(eq(DataPrepperPlugin.class))).thenReturn(Set.of(TestPlugin.class)); + when(pluginProvider.findPluginClasses(eq(TestPluginType.class))).thenReturn(Set.of(TestPlugin.class)); final ObjectNode objectNode = OBJECT_MAPPER.createObjectNode(); when(jsonSchemaConverter.convertIntoJsonSchema( any(SchemaVersion.class), any(OptionPreset.class), eq(TestPluginConfig.class))).thenReturn(objectNode); @@ -84,7 +84,7 @@ void testConvertPluginConfigsIntoJsonSchemasHappyPath() throws JsonProcessingExc @Test void testConvertPluginConfigsIntoJsonSchemasWithError() throws JsonProcessingException { - when(reflections.getTypesAnnotatedWith(eq(DataPrepperPlugin.class))).thenReturn(Set.of(TestPlugin.class)); + when(pluginProvider.findPluginClasses(eq(TestPluginType.class))).thenReturn(Set.of(TestPlugin.class)); final JsonProcessingException jsonProcessingException = mock(JsonProcessingException.class); when(jsonSchemaConverter.convertIntoJsonSchema( any(SchemaVersion.class), any(OptionPreset.class), eq(TestPluginConfig.class))).thenThrow( @@ -96,7 +96,7 @@ void testConvertPluginConfigsIntoJsonSchemasWithError() throws JsonProcessingExc @DataPrepperPlugin( name = "test_plugin", pluginType = TestPluginType.class, pluginConfigurationType = TestPluginConfig.class) - static class TestPlugin { + static class TestPlugin extends TestPluginType { } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorConfig.java index 40ed0f14d7..d3a038361a 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorConfig.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import org.opensearch.dataprepper.model.annotations.UsesDataPrepperPlugin; import org.opensearch.dataprepper.model.configuration.PluginModel; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonProperty; @@ -18,33 +19,35 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `aggregate` processor groups events based on the values of identification_keys. " + +@JsonClassDescription("The aggregate processor groups events based on the values of identification_keys. " + "Then, the processor performs an action on each group, helping reduce unnecessary log volume and " + "creating aggregated logs over time.") public class AggregateProcessorConfig { - static int DEFAULT_GROUP_DURATION_SECONDS = 180; - @JsonPropertyDescription("An unordered list by which to group events. Events with the same values as these keys are put into the same group. If an event does not contain one of the identification_keys, then the value of that key is considered to be equal to null. At least one identification_key is required (for example, [\"sourceIp\", \"destinationIp\", \"port\"].") + @JsonPropertyDescription("An unordered list by which to group events. Events with the same values as these keys are put into the same group. " + + "If an event does not contain one of the identification_keys, then the value of that key is considered to be equal to null. " + + "At least one identification_key is required. And example configuration is [\"sourceIp\", \"destinationIp\", \"port\"].") @JsonProperty("identification_keys") @NotEmpty private List identificationKeys; - @JsonPropertyDescription("The amount of time that a group should exist before it is concluded automatically. Supports ISO_8601 notation strings (\"PT20.345S\", \"PT15M\", etc.) as well as simple notation for seconds (\"60s\") and milliseconds (\"1500ms\"). Default value is 180s.") - @JsonProperty("group_duration") - private Duration groupDuration = Duration.ofSeconds(DEFAULT_GROUP_DURATION_SECONDS); - @JsonPropertyDescription("The action to be performed on each group. One of the available aggregate actions must be provided.") @JsonProperty("action") @NotNull + @UsesDataPrepperPlugin(pluginType = AggregateAction.class) private PluginModel aggregateAction; - @JsonPropertyDescription("When local_mode is set to true, the aggregation is performed locally on each Data Prepper node instead of forwarding events to a specific node based on the identification_keys using a hash function. Default is false.") + @JsonPropertyDescription("The amount of time that a group should exist before it is concluded automatically. Supports ISO_8601 notation strings (\"PT20.345S\", \"PT15M\", etc.) as well as simple notation for seconds (\"60s\") and milliseconds (\"1500ms\"). Default value is 180s.") + @JsonProperty("group_duration") + private Duration groupDuration = Duration.ofSeconds(DEFAULT_GROUP_DURATION_SECONDS); + + @JsonPropertyDescription("When local_mode is set to true, the aggregation is performed locally on each node instead of forwarding events to a specific node based on the identification_keys using a hash function. Default is false.") @JsonProperty("local_mode") @NotNull private Boolean localMode = false; - @JsonPropertyDescription("A boolean indicating if the unaggregated events should be forwarded to the next processor/sink in the chain.") + @JsonPropertyDescription("A boolean indicating if the unaggregated events should be forwarded to the next processor or sink in the chain.") @JsonProperty("output_unaggregated_events") private Boolean outputUnaggregatedEvents = false; @@ -52,7 +55,7 @@ public class AggregateProcessorConfig { @JsonProperty("aggregated_events_tag") private String aggregatedEventsTag; - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), such as '/some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") + @JsonPropertyDescription("A conditional expression, such as '/some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") @JsonProperty("aggregate_when") private String whenCondition; diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/AppendAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/AppendAggregateActionConfig.java index 529ef0bde3..03067608cb 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/AppendAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/AppendAggregateActionConfig.java @@ -7,9 +7,13 @@ import java.util.List; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +@JsonPropertyOrder +@JsonClassDescription("Appends multiple events into a single event.") public class AppendAggregateActionConfig { @JsonProperty("keys_to_append") diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateAction.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateAction.java index c8fd772336..8b67ca64cd 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateAction.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateAction.java @@ -51,7 +51,7 @@ public class CountAggregateAction implements AggregateAction { public final String countKey; public final String startTimeKey; public final String endTimeKey; - public final String outputFormat; + public final OutputFormat outputFormat; private long startTimeNanos; private final String metricName; private final IdentificationKeysHasher uniqueKeysHasher; @@ -141,7 +141,7 @@ public AggregateActionOutput concludeGroup(final AggregateActionInput aggregateA Instant endTime = (Instant)groupState.get(endTimeKey); groupState.remove(endTimeKey); groupState.remove(UNIQUE_KEYS_SETKEY); - if (outputFormat.equals(OutputFormat.RAW.toString())) { + if (outputFormat == OutputFormat.RAW) { groupState.put(startTimeKey, startTime.atZone(ZoneId.of(ZoneId.systemDefault().toString())).format(DateTimeFormatter.ofPattern(DATE_FORMAT))); event = JacksonEvent.builder() .withEventType(EVENT_TYPE) diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfig.java index a0325ee3a9..d1011f4145 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfig.java @@ -5,42 +5,49 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; -import java.util.HashSet; import java.util.List; -import java.util.Set; + +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +@JsonPropertyOrder +@JsonClassDescription("The count action counts events that belong to the same group and " + + "generates a new event with values of the identification_keys and the count, which indicates the number of new events.") public class CountAggregateActionConfig { static final String SUM_METRIC_NAME = "count"; public static final String DEFAULT_COUNT_KEY = "aggr._count"; public static final String DEFAULT_START_TIME_KEY = "aggr._start_time"; public static final String DEFAULT_END_TIME_KEY = "aggr._end_time"; - public static final Set validOutputFormats = new HashSet<>(Set.of(OutputFormat.OTEL_METRICS.toString(), OutputFormat.RAW.toString())); - @JsonPropertyDescription("Key used for storing the count. Default name is aggr._count.") - @JsonProperty("count_key") - String countKey = DEFAULT_COUNT_KEY; + @JsonPropertyDescription("Format of the aggregated event. Specifying otel_metrics outputs aggregate events in OTel metrics SUM type with count as value. " + + "Specifying raw outputs aggregate events as with the count_key field as a count value and includes the start_time_key and end_time_key keys.") + @JsonProperty("output_format") + OutputFormat outputFormat = OutputFormat.OTEL_METRICS; - @JsonPropertyDescription("Metric name to be used when otel format is used.") + @JsonPropertyDescription("Metric name to be used when the OTel metrics format is used. The default value is count.") @JsonProperty("metric_name") String metricName = SUM_METRIC_NAME; - @JsonPropertyDescription("List of unique keys to count.") - @JsonProperty("unique_keys") - List uniqueKeys = null; + @JsonPropertyDescription("The key in the aggregate event that will have the count value. " + + "This is the count of events in the aggregation. Default name is aggr._count.") + @JsonProperty("count_key") + String countKey = DEFAULT_COUNT_KEY; - @JsonPropertyDescription("Key used for storing the start time. Default name is aggr._start_time.") + @JsonPropertyDescription("The key in the aggregate event that will have the start time of the aggregation. " + + "Default name is aggr._start_time.") @JsonProperty("start_time_key") String startTimeKey = DEFAULT_START_TIME_KEY; - @JsonPropertyDescription("Key used for storing the end time. Default name is aggr._end_time.") + @JsonPropertyDescription("The key in the aggregate event that will have the end time of the aggregation. " + + "Default name is aggr._end_time.") @JsonProperty("end_time_key") String endTimeKey = DEFAULT_END_TIME_KEY; - @JsonPropertyDescription("Format of the aggregated event. otel_metrics is the default output format which outputs in OTel metrics SUM type with count as value. Other options is - raw - which generates a JSON object with the count_key field as a count value and the start_time_key field with aggregation start time as value.") - @JsonProperty("output_format") - String outputFormat = OutputFormat.OTEL_METRICS.toString(); + @JsonPropertyDescription("List of unique keys to count.") + @JsonProperty("unique_keys") + List uniqueKeys = null; public String getMetricName() { return metricName; @@ -62,10 +69,7 @@ public String getStartTimeKey() { return startTimeKey; } - public String getOutputFormat() { - if (!validOutputFormats.contains(outputFormat)) { - throw new IllegalArgumentException("Unknown output format " + outputFormat); - } + public OutputFormat getOutputFormat() { return outputFormat; } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateAction.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateAction.java index 0bded67d75..22cfa7efb7 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateAction.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateAction.java @@ -49,7 +49,7 @@ public class HistogramAggregateAction implements AggregateAction { private final String bucketsKey; private final String startTimeKey; private final String endTimeKey; - private final String outputFormat; + private final OutputFormat outputFormat; private final String sumKey; private final String maxKey; private final String minKey; @@ -217,7 +217,7 @@ public AggregateActionOutput concludeGroup(final AggregateActionInput aggregateA List exemplarList = new ArrayList<>(); exemplarList.add(createExemplar("min", minEvent, minValue)); exemplarList.add(createExemplar("max", maxEvent, maxValue)); - if (outputFormat.equals(OutputFormat.RAW.toString())) { + if (outputFormat == OutputFormat.RAW) { groupState.put(histogramKey, key); groupState.put(durationKey, endTimeNanos-startTimeNanos); groupState.put(bucketsKey, Arrays.copyOfRange(this.buckets, 1, this.buckets.length-1)); diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfig.java index 6d89a1bd8f..11dafd7626 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfig.java @@ -5,13 +5,21 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; -import java.util.Set; import java.util.List; -import java.util.HashSet; + +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.NotNull; +@JsonPropertyOrder +@JsonClassDescription("The histogram action aggregates events belonging to the same " + + "group and generates a new event with values of the identification_keys " + + "and histogram of the aggregated events based on a configured key. " + + "The histogram contains the number of events, sum, buckets, bucket counts, and optionally " + + "min and max of the values corresponding to the key. The action drops all events " + + "that make up the combined event.") public class HistogramAggregateActionConfig { public static final String HISTOGRAM_METRIC_NAME = "histogram"; public static final String DEFAULT_GENERATED_KEY_PREFIX = "aggr._"; @@ -24,13 +32,16 @@ public class HistogramAggregateActionConfig { public static final String START_TIME_KEY = "startTime"; public static final String END_TIME_KEY = "endTime"; public static final String DURATION_KEY = "duration"; - public static final Set validOutputFormats = new HashSet<>(Set.of(OutputFormat.OTEL_METRICS.toString(), OutputFormat.RAW.toString())); @JsonPropertyDescription("Name of the field in the events the histogram generates.") @JsonProperty("key") @NotNull String key; + @JsonPropertyDescription("Format of the aggregated event. otel_metrics is the default output format which outputs in OTel metrics SUM type with count as value. Other options is - raw - which generates a JSON object with the count_key field as a count value and the start_time_key field with aggregation start time as value.") + @JsonProperty("output_format") + OutputFormat outputFormat = OutputFormat.OTEL_METRICS; + @JsonPropertyDescription("The name of units for the values in the key. For example, bytes, traces etc") @JsonProperty("units") @NotNull @@ -49,10 +60,6 @@ public class HistogramAggregateActionConfig { @NotNull List buckets; - @JsonPropertyDescription("Format of the aggregated event. otel_metrics is the default output format which outputs in OTel metrics SUM type with count as value. Other options is - raw - which generates a JSON object with the count_key field as a count value and the start_time_key field with aggregation start time as value.") - @JsonProperty("output_format") - String outputFormat = OutputFormat.OTEL_METRICS.toString(); - @JsonPropertyDescription("A Boolean value indicating whether the histogram should include the min and max of the values in the aggregation.") @JsonProperty("record_minmax") boolean recordMinMax = false; @@ -120,10 +127,7 @@ public List getBuckets() { return buckets; } - public String getOutputFormat() { - if (!validOutputFormats.contains(outputFormat)) { - throw new IllegalArgumentException("Unknown output format " + outputFormat); - } + public OutputFormat getOutputFormat() { return outputFormat; } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormat.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormat.java index d465355e4b..1fb77bd385 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormat.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormat.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; import java.util.Arrays; import java.util.Map; @@ -37,4 +38,8 @@ static OutputFormat fromOptionValue(final String option) { return ACTIONS_MAP.get(option.toLowerCase()); } + @JsonValue + public String getOptionValue() { + return name; + } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PercentSamplerAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PercentSamplerAggregateActionConfig.java index 0a17e37c43..51d68a32b1 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PercentSamplerAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PercentSamplerAggregateActionConfig.java @@ -5,14 +5,18 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.NotNull; -import com.fasterxml.jackson.annotation.JsonPropertyDescription; import jakarta.validation.constraints.AssertTrue; +@JsonPropertyOrder +@JsonClassDescription("The percent_sampler action controls the number of events aggregated based " + + "on a percentage of events. The action drops any events not included in the percentage.") public class PercentSamplerAggregateActionConfig { - @JsonPropertyDescription("The percentage of events to be processed during a one second interval. Must be greater than 0.0 and less than 100.0") + @JsonPropertyDescription("The percentage of events to be processed during a one second interval. Must be greater than 0.0 and less than 100.0.") @JsonProperty("percent") @NotNull private double percent; diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PutAllAggregateAction.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PutAllAggregateAction.java index 0f96584bd5..78debabb35 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PutAllAggregateAction.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/PutAllAggregateAction.java @@ -5,6 +5,8 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; import org.opensearch.dataprepper.model.event.Event; import org.opensearch.dataprepper.model.event.JacksonEvent; @@ -22,7 +24,7 @@ * most recently handled Event. * @since 1.3 */ -@DataPrepperPlugin(name = "put_all", pluginType = AggregateAction.class) +@DataPrepperPlugin(name = "put_all", pluginType = AggregateAction.class, pluginConfigurationType = PutAllAggregateAction.PutAllAggregateActionConfig.class) public class PutAllAggregateAction implements AggregateAction { static final String EVENT_TYPE = "event"; @@ -43,4 +45,10 @@ public AggregateActionOutput concludeGroup(final AggregateActionInput aggregateA return new AggregateActionOutput(List.of(event)); } + + @JsonPropertyOrder + @JsonClassDescription("The put_all action combines events belonging to the same group by overwriting existing keys and adding new keys, similarly to the Java `Map.putAll`. " + + "The action drops all events that make up the combined event.") + static class PutAllAggregateActionConfig { + } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateAction.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateAction.java index ecdb1b4438..3ea0d0b8af 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateAction.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateAction.java @@ -22,7 +22,7 @@ @DataPrepperPlugin(name = "rate_limiter", pluginType = AggregateAction.class, pluginConfigurationType = RateLimiterAggregateActionConfig.class) public class RateLimiterAggregateAction implements AggregateAction { private final RateLimiter rateLimiter; - private final String rateLimiterMode; + private final RateLimiterMode rateLimiterMode; @DataPrepperPluginConstructor public RateLimiterAggregateAction(final RateLimiterAggregateActionConfig ratelimiterAggregateActionConfig) { @@ -33,7 +33,7 @@ public RateLimiterAggregateAction(final RateLimiterAggregateActionConfig ratelim @Override public AggregateActionResponse handleEvent(final Event event, final AggregateActionInput aggregateActionInput) { - if (rateLimiterMode.equals(RateLimiterMode.DROP.toString())) { + if (rateLimiterMode == RateLimiterMode.DROP) { if (!rateLimiter.tryAcquire()) { return AggregateActionResponse.nullEventResponse(); } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfig.java index 158f3b1ac6..21eca83090 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfig.java @@ -5,32 +5,32 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; -import java.util.Set; -import java.util.HashSet; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.NotNull; +@JsonPropertyOrder +@JsonClassDescription("The rate_limiter action controls the number of events aggregated per second. " + + "By default, rate_limiter blocks the aggregate processor from running if it receives more events than the configured number allowed. " + + "You can overwrite the number events that triggers the rate_limited by using the when_exceeds configuration option.") public class RateLimiterAggregateActionConfig { - public static final Set validRateLimiterModes = new HashSet<>(Set.of(RateLimiterMode.BLOCK.toString(), RateLimiterMode.DROP.toString())); - @JsonPropertyDescription("The number of events allowed per second.") @JsonProperty("events_per_second") @NotNull int eventsPerSecond; - @JsonPropertyDescription("Indicates what action the rate_limiter takes when the number of events received is greater than the number of events allowed per second. Default value is block, which blocks the processor from running after the maximum number of events allowed per second is reached until the next second. Alternatively, the drop option drops the excess events received in that second. Default is block") + @JsonPropertyDescription("Indicates what action the rate_limiter takes when the number of events received is greater than the number of events allowed per second. " + + "Default value is block, which blocks the processor from running after the maximum number of events allowed per second is reached until the next second. Alternatively, the drop option drops the excess events received in that second. Default is block") @JsonProperty("when_exceeds") - String whenExceedsMode = RateLimiterMode.BLOCK.toString(); + RateLimiterMode whenExceedsMode = RateLimiterMode.BLOCK; public int getEventsPerSecond() { return eventsPerSecond; } - public String getWhenExceeds() { - if (!validRateLimiterModes.contains(whenExceedsMode)) { - throw new IllegalArgumentException("Unknown rate limiter mode " + whenExceedsMode); - } + public RateLimiterMode getWhenExceeds() { return whenExceedsMode; } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterMode.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterMode.java index bf7e5bba76..dd12d71de2 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterMode.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterMode.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; import java.util.Arrays; import java.util.Map; @@ -37,4 +38,9 @@ static RateLimiterMode fromOptionValue(final String option) { return ACTIONS_MAP.get(option.toLowerCase()); } + @JsonValue + public String getOptionValue() { + return name; + } + } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RemoveDuplicatesAggregateAction.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RemoveDuplicatesAggregateAction.java index 3d364eb0f2..7fc98d0bf8 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RemoveDuplicatesAggregateAction.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RemoveDuplicatesAggregateAction.java @@ -5,6 +5,8 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; import org.opensearch.dataprepper.model.event.Event; import org.opensearch.dataprepper.plugins.processor.aggregate.AggregateAction; @@ -17,7 +19,8 @@ * that have a non-empty groupState associated with them * @since 1.3 */ -@DataPrepperPlugin(name = "remove_duplicates", pluginType = AggregateAction.class) +@DataPrepperPlugin(name = "remove_duplicates", pluginType = AggregateAction.class, + pluginConfigurationType = RemoveDuplicatesAggregateAction.RemoveDuplicatesAggregateActionConfig.class) public class RemoveDuplicatesAggregateAction implements AggregateAction { static final String GROUP_STATE_HAS_EVENT = "GROUP_STATE_HAS_EVENT"; @@ -31,4 +34,9 @@ public AggregateActionResponse handleEvent(final Event event, final AggregateAct return AggregateActionResponse.nullEventResponse(); } + + @JsonPropertyOrder + @JsonClassDescription("The remove_duplicates action processes the first event for a group immediately and drops any events that duplicate the first event from the source.") + static class RemoveDuplicatesAggregateActionConfig { + } } diff --git a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/TailSamplerAggregateActionConfig.java b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/TailSamplerAggregateActionConfig.java index 85ce0b1135..1c5ad1c393 100644 --- a/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/TailSamplerAggregateActionConfig.java +++ b/data-prepper-plugins/aggregate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/TailSamplerAggregateActionConfig.java @@ -5,25 +5,29 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.AssertTrue; import java.time.Duration; +@JsonPropertyOrder +@JsonClassDescription("The tail_sampler action samples OpenTelemetry traces after collecting spans for a trace.") public class TailSamplerAggregateActionConfig { @JsonPropertyDescription("Period to wait before considering that a trace event is complete") @JsonProperty("wait_period") @NotNull private Duration waitPeriod; - @JsonPropertyDescription("Percent value to use for sampling non error events. 0.0 < percent < 100.0") + @JsonPropertyDescription("Percent value to use for sampling non error events. Must be greater than 0.0 and less than 100.0") @JsonProperty("percent") @NotNull private Integer percent; - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), such as '/some-key == \"test\"', that will be evaluated to determine whether the event is an error event or not") + @JsonPropertyDescription("A conditional expression, such as '/some-key == \"test\"', that will be evaluated to determine whether the event is an error event or not") @JsonProperty("condition") private String condition; diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorIT.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorIT.java index 46ec0a996e..a7608decec 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorIT.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/AggregateProcessorIT.java @@ -297,7 +297,7 @@ void aggregateWithPercentSamplerAction(double testPercent) throws InterruptedExc void aggregateWithRateLimiterAction() throws InterruptedException { final int eventsPerSecond = 500; lenient().when(rateLimiterAggregateActionConfig.getEventsPerSecond()).thenReturn(eventsPerSecond); - lenient().when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP.toString()); + lenient().when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP); aggregateAction = new RateLimiterAggregateAction(rateLimiterAggregateActionConfig); when(pluginFactory.loadPlugin(eq(AggregateAction.class), any(PluginSetting.class))) @@ -367,7 +367,7 @@ void aggregateWithRateLimiterActionNoDrops() throws InterruptedException { @RepeatedTest(value = 2) void aggregateWithCountAggregateAction() throws InterruptedException, NoSuchFieldException, IllegalAccessException { CountAggregateActionConfig countAggregateActionConfig = new CountAggregateActionConfig(); - setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW); aggregateAction = new CountAggregateAction(countAggregateActionConfig); when(pluginFactory.loadPlugin(eq(AggregateAction.class), any(PluginSetting.class))) .thenReturn(aggregateAction); @@ -404,7 +404,7 @@ void aggregateWithCountAggregateAction() throws InterruptedException, NoSuchFiel @RepeatedTest(value = 2) void aggregateWithCountAggregateActionWithCondition() throws InterruptedException, NoSuchFieldException, IllegalAccessException { CountAggregateActionConfig countAggregateActionConfig = new CountAggregateActionConfig(); - setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW); aggregateAction = new CountAggregateAction(countAggregateActionConfig); when(pluginFactory.loadPlugin(eq(AggregateAction.class), any(PluginSetting.class))) .thenReturn(aggregateAction); @@ -454,7 +454,7 @@ void aggregateWithCountAggregateActionWithUnaggregatedEvents() throws Interrupte String tag = UUID.randomUUID().toString(); when(aggregateProcessorConfig.getAggregatedEventsTag()).thenReturn(tag); CountAggregateActionConfig countAggregateActionConfig = new CountAggregateActionConfig(); - setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW); aggregateAction = new CountAggregateAction(countAggregateActionConfig); when(pluginFactory.loadPlugin(eq(AggregateAction.class), any(PluginSetting.class))) .thenReturn(aggregateAction); @@ -496,7 +496,7 @@ void aggregateWithCountAggregateActionWithUnaggregatedEvents() throws Interrupte void aggregateWithHistogramAggregateAction() throws InterruptedException, NoSuchFieldException, IllegalAccessException { HistogramAggregateActionConfig histogramAggregateActionConfig = new HistogramAggregateActionConfig(); - setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", OutputFormat.RAW); final String testKey = RandomStringUtils.randomAlphabetic(5); setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "key", testKey); final String testKeyPrefix = RandomStringUtils.randomAlphabetic(4)+"_"; diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfigTests.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfigTests.java index 1975918e37..3581c27347 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfigTests.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionConfigTests.java @@ -5,7 +5,6 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.opensearch.dataprepper.test.helper.ReflectivelySetField.setField; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; @@ -39,7 +38,7 @@ void setup() { void testDefault() { assertThat(countAggregateActionConfig.getCountKey(), equalTo(DEFAULT_COUNT_KEY)); assertThat(countAggregateActionConfig.getStartTimeKey(), equalTo(DEFAULT_START_TIME_KEY)); - assertThat(countAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS.toString())); + assertThat(countAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS)); assertThat(countAggregateActionConfig.getMetricName(), equalTo(CountAggregateActionConfig.SUM_METRIC_NAME)); assertThat(countAggregateActionConfig.getUniqueKeys(), equalTo(null)); } @@ -52,9 +51,9 @@ void testValidConfig() throws NoSuchFieldException, IllegalAccessException { final String testStartTimeKey = UUID.randomUUID().toString(); setField(CountAggregateActionConfig.class, countAggregateActionConfig, "startTimeKey", testStartTimeKey); assertThat(countAggregateActionConfig.getStartTimeKey(), equalTo(testStartTimeKey)); - final String testOutputFormat = OutputFormat.OTEL_METRICS.toString(); + final OutputFormat testOutputFormat = OutputFormat.OTEL_METRICS; setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", testOutputFormat); - assertThat(countAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS.toString())); + assertThat(countAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS)); final String testName = UUID.randomUUID().toString(); setField(CountAggregateActionConfig.class, countAggregateActionConfig, "metricName", testName); assertThat(countAggregateActionConfig.getMetricName(), equalTo(testName)); @@ -64,10 +63,4 @@ void testValidConfig() throws NoSuchFieldException, IllegalAccessException { setField(CountAggregateActionConfig.class, countAggregateActionConfig, "uniqueKeys", uniqueKeys); assertThat(countAggregateActionConfig.getUniqueKeys(), equalTo(uniqueKeys)); } - - @Test - void testInvalidConfig() throws NoSuchFieldException, IllegalAccessException { - setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", UUID.randomUUID().toString()); - assertThrows(IllegalArgumentException.class, () -> countAggregateActionConfig.getOutputFormat()); - } } diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionTest.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionTest.java index af81ca001f..c9ed55797b 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionTest.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/CountAggregateActionTest.java @@ -52,7 +52,7 @@ private AggregateAction createObjectUnderTest(CountAggregateActionConfig config) void testCountAggregate(int testCount) throws NoSuchFieldException, IllegalAccessException { final String testName = UUID.randomUUID().toString(); CountAggregateActionConfig countAggregateActionConfig = new CountAggregateActionConfig(); - setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(CountAggregateActionConfig.class, countAggregateActionConfig, "outputFormat", OutputFormat.RAW); countAggregateAction = createObjectUnderTest(countAggregateActionConfig); final String key = UUID.randomUUID().toString(); final String value = UUID.randomUUID().toString(); @@ -160,7 +160,7 @@ void testCountAggregateOTelFormatWithStartAndEndTimesInTheEvent(int testCount) { String endTimeKey = UUID.randomUUID().toString(); when(mockConfig.getStartTimeKey()).thenReturn(startTimeKey); when(mockConfig.getEndTimeKey()).thenReturn(endTimeKey); - when(mockConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS.toString()); + when(mockConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS); countAggregateAction = createObjectUnderTest(mockConfig); final String key1 = "key-"+UUID.randomUUID().toString(); final String value1 = UUID.randomUUID().toString(); diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfigTests.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfigTests.java index 60ba8dc202..de0be19bf2 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfigTests.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionConfigTests.java @@ -42,7 +42,7 @@ void setup() { void testDefault() { assertThat(histogramAggregateActionConfig.getGeneratedKeyPrefix(), equalTo(DEFAULT_GENERATED_KEY_PREFIX)); assertThat(histogramAggregateActionConfig.getRecordMinMax(), equalTo(false)); - assertThat(histogramAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS.toString())); + assertThat(histogramAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS)); assertThat(histogramAggregateActionConfig.getMetricName(), equalTo(HistogramAggregateActionConfig.HISTOGRAM_METRIC_NAME)); } @@ -53,9 +53,9 @@ void testValidConfig() throws NoSuchFieldException, IllegalAccessException { assertThat(histogramAggregateActionConfig.getGeneratedKeyPrefix(), equalTo(testGeneratedKeyPrefix)); setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "recordMinMax", true); assertThat(histogramAggregateActionConfig.getRecordMinMax(), equalTo(true)); - final String testOutputFormat = OutputFormat.OTEL_METRICS.toString(); + final OutputFormat testOutputFormat = OutputFormat.OTEL_METRICS; setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", testOutputFormat); - assertThat(histogramAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS.toString())); + assertThat(histogramAggregateActionConfig.getOutputFormat(), equalTo(OutputFormat.OTEL_METRICS)); final String testKey = RandomStringUtils.randomAlphabetic(10); setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "key", testKey); assertThat(histogramAggregateActionConfig.getKey(), equalTo(testKey)); @@ -114,12 +114,6 @@ void testValidConfig() throws NoSuchFieldException, IllegalAccessException { assertThat(histogramAggregateActionConfig.getMetricName(), equalTo(testName)); } - @Test - void testInvalidOutputFormatConfig() throws NoSuchFieldException, IllegalAccessException { - setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", RandomStringUtils.randomAlphabetic(10)); - assertThrows(IllegalArgumentException.class, () -> histogramAggregateActionConfig.getOutputFormat()); - } - @Test void testInvalidBucketsConfig() throws NoSuchFieldException, IllegalAccessException { setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "buckets", new ArrayList()); diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionTests.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionTests.java index 27e8f8d801..facd090a64 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionTests.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/HistogramAggregateActionTests.java @@ -61,7 +61,7 @@ private AggregateAction createObjectUnderTest() { @ParameterizedTest @ValueSource(ints = {10, 20, 50, 100}) void testHistogramAggregate(final int testCount) throws NoSuchFieldException, IllegalAccessException { - setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", OutputFormat.RAW.toString()); + setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "outputFormat", OutputFormat.RAW); final String testKeyPrefix = RandomStringUtils.randomAlphabetic(5)+"_"; setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "generatedKeyPrefix", testKeyPrefix); setField(HistogramAggregateActionConfig.class, histogramAggregateActionConfig, "units", "ms"); @@ -255,7 +255,7 @@ void testHistogramAggregateOTelFormatWithStartAndEndTimesInTheEvent(final int te when(histogramAggregateActionConfig.getEndTimeKey()).thenReturn(endTimeKey); final String testName = UUID.randomUUID().toString(); when(histogramAggregateActionConfig.getMetricName()).thenReturn(testName); - when(histogramAggregateActionConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS.toString()); + when(histogramAggregateActionConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS); String keyPrefix = UUID.randomUUID().toString(); final String testUnits = "ms"; when(histogramAggregateActionConfig.getUnits()).thenReturn(testUnits); @@ -381,7 +381,7 @@ void testHistogramAggregateOTelFormat_with_startTime_before_currentTime_and_all_ when(histogramAggregateActionConfig.getEndTimeKey()).thenReturn(endTimeKey); final String testName = UUID.randomUUID().toString(); when(histogramAggregateActionConfig.getMetricName()).thenReturn(testName); - when(histogramAggregateActionConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS.toString()); + when(histogramAggregateActionConfig.getOutputFormat()).thenReturn(OutputFormat.OTEL_METRICS); final String testUnits = "ms"; when(histogramAggregateActionConfig.getUnits()).thenReturn(testUnits); when(histogramAggregateActionConfig.getRecordMinMax()).thenReturn(true); diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormatTest.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormatTest.java index 9adaf228f3..53615a563e 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormatTest.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/OutputFormatTest.java @@ -5,15 +5,25 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.EnumSource; +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; +import static org.junit.jupiter.params.provider.Arguments.arguments; public class OutputFormatTest { - @ParameterizedTest @EnumSource(OutputFormat.class) void fromOptionValue(final OutputFormat value) { @@ -21,4 +31,32 @@ void fromOptionValue(final OutputFormat value) { assertThat(value, instanceOf(OutputFormat.class)); } + @ParameterizedTest + @ArgumentsSource(OutputFormatToKnownName.class) + void fromOptionValue_returns_expected_value(final OutputFormat outputFormat, final String knownString) { + assertThat(OutputFormat.fromOptionValue(knownString), equalTo(outputFormat)); + } + + @ParameterizedTest + @EnumSource(OutputFormat.class) + void getOptionValue_returns_non_empty_string_for_all_types(final OutputFormat outputFormat) { + assertThat(outputFormat.getOptionValue(), notNullValue()); + assertThat(outputFormat.getOptionValue(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(OutputFormatToKnownName.class) + void getOptionValue_returns_expected_name(final OutputFormat outputFormat, final String expectedString) { + assertThat(outputFormat.getOptionValue(), equalTo(expectedString)); + } + + static class OutputFormatToKnownName implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(OutputFormat.OTEL_METRICS, "otel_metrics"), + arguments(OutputFormat.RAW, "raw") + ); + } + } } diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfigTests.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfigTests.java index e54b4a230d..5796d48609 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfigTests.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionConfigTests.java @@ -8,7 +8,6 @@ import static org.opensearch.dataprepper.test.helper.ReflectivelySetField.setField; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; -import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -16,7 +15,6 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; @ExtendWith(MockitoExtension.class) public class RateLimiterAggregateActionConfigTests { @@ -33,21 +31,15 @@ void setup() { @Test void testDefault() { - assertThat(rateLimiterAggregateActionConfig.getWhenExceeds(), equalTo(RateLimiterMode.BLOCK.toString())); - } - - @Test - void testInvalidConfig() throws NoSuchFieldException, IllegalAccessException { - setField(RateLimiterAggregateActionConfig.class, rateLimiterAggregateActionConfig, "whenExceedsMode", RandomStringUtils.randomAlphabetic(4)); - assertThrows(IllegalArgumentException.class, () -> rateLimiterAggregateActionConfig.getWhenExceeds()); + assertThat(rateLimiterAggregateActionConfig.getWhenExceeds(), equalTo(RateLimiterMode.BLOCK)); } @Test void testValidConfig() throws NoSuchFieldException, IllegalAccessException { final int testEventsPerSecond = ThreadLocalRandom.current().nextInt(); setField(RateLimiterAggregateActionConfig.class, rateLimiterAggregateActionConfig, "eventsPerSecond", testEventsPerSecond); - setField(RateLimiterAggregateActionConfig.class, rateLimiterAggregateActionConfig, "whenExceedsMode", "drop"); + setField(RateLimiterAggregateActionConfig.class, rateLimiterAggregateActionConfig, "whenExceedsMode", RateLimiterMode.fromOptionValue("drop")); assertThat(rateLimiterAggregateActionConfig.getEventsPerSecond(), equalTo(testEventsPerSecond)); - assertThat(rateLimiterAggregateActionConfig.getWhenExceeds(), equalTo(RateLimiterMode.DROP.toString())); + assertThat(rateLimiterAggregateActionConfig.getWhenExceeds(), equalTo(RateLimiterMode.DROP)); } } diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionTests.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionTests.java index 202452bfce..b642a983f4 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionTests.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterAggregateActionTests.java @@ -46,7 +46,7 @@ private AggregateAction createObjectUnderTest(RateLimiterAggregateActionConfig c @ValueSource(ints = {1, 2, 100, 1000}) void testRateLimiterAggregateSmoothTraffic(int testEventsPerSecond) throws InterruptedException { when(rateLimiterAggregateActionConfig.getEventsPerSecond()).thenReturn(testEventsPerSecond); - when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP.toString()); + when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP); rateLimiterAggregateAction = createObjectUnderTest(rateLimiterAggregateActionConfig); final String key = UUID.randomUUID().toString(); final String value = UUID.randomUUID().toString(); @@ -72,7 +72,7 @@ void testRateLimiterAggregateSmoothTraffic(int testEventsPerSecond) throws Inter @ValueSource(ints = {100, 200, 500, 1000}) void testRateLimiterInDropMode(int testEventsPerSecond) throws InterruptedException { when(rateLimiterAggregateActionConfig.getEventsPerSecond()).thenReturn(testEventsPerSecond); - when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP.toString()); + when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.DROP); rateLimiterAggregateAction = createObjectUnderTest(rateLimiterAggregateActionConfig); final String key = UUID.randomUUID().toString(); final String value = UUID.randomUUID().toString(); @@ -101,7 +101,7 @@ void testRateLimiterInDropMode(int testEventsPerSecond) throws InterruptedExcept @ValueSource(ints = {100, 200, 500, 1000}) void testRateLimiterInBlockMode(int testEventsPerSecond) throws InterruptedException { when(rateLimiterAggregateActionConfig.getEventsPerSecond()).thenReturn(testEventsPerSecond); - when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.BLOCK.toString()); + when(rateLimiterAggregateActionConfig.getWhenExceeds()).thenReturn(RateLimiterMode.BLOCK); rateLimiterAggregateAction = createObjectUnderTest(rateLimiterAggregateActionConfig); final String key = UUID.randomUUID().toString(); final String value = UUID.randomUUID().toString(); diff --git a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterModeTest.java b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterModeTest.java index 3fccff55ba..79de57a44d 100644 --- a/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterModeTest.java +++ b/data-prepper-plugins/aggregate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/aggregate/actions/RateLimiterModeTest.java @@ -5,12 +5,23 @@ package org.opensearch.dataprepper.plugins.processor.aggregate.actions; +import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; import org.junit.jupiter.params.provider.EnumSource; +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; +import static org.junit.jupiter.params.provider.Arguments.arguments; public class RateLimiterModeTest { @@ -20,4 +31,33 @@ void fromOptionValue(final RateLimiterMode value) { assertThat(RateLimiterMode.fromOptionValue(value.name()), is(value)); assertThat(value, instanceOf(RateLimiterMode.class)); } + + @ParameterizedTest + @ArgumentsSource(RateLimiterModeToKnownName.class) + void fromOptionValue_returns_expected_value(final RateLimiterMode rateLimiterMode, final String knownString) { + assertThat(RateLimiterMode.fromOptionValue(knownString), equalTo(rateLimiterMode)); + } + + @ParameterizedTest + @EnumSource(RateLimiterMode.class) + void getOptionValue_returns_non_empty_string_for_all_types(final RateLimiterMode rateLimiterMode) { + assertThat(rateLimiterMode.getOptionValue(), notNullValue()); + assertThat(rateLimiterMode.getOptionValue(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(RateLimiterModeToKnownName.class) + void getOptionValue_returns_expected_name(final RateLimiterMode rateLimiterMode, final String expectedString) { + assertThat(rateLimiterMode.getOptionValue(), equalTo(expectedString)); + } + + static class RateLimiterModeToKnownName implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(RateLimiterMode.DROP, "drop"), + arguments(RateLimiterMode.BLOCK, "block") + ); + } + } } diff --git a/data-prepper-plugins/anomaly-detector-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/anomalydetector/AnomalyDetectorProcessorConfig.java b/data-prepper-plugins/anomaly-detector-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/anomalydetector/AnomalyDetectorProcessorConfig.java index 0eb59edc58..2122a0f318 100644 --- a/data-prepper-plugins/anomaly-detector-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/anomalydetector/AnomalyDetectorProcessorConfig.java +++ b/data-prepper-plugins/anomaly-detector-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/anomalydetector/AnomalyDetectorProcessorConfig.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import org.opensearch.dataprepper.model.annotations.UsesDataPrepperPlugin; import org.opensearch.dataprepper.model.configuration.PluginModel; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonProperty; @@ -23,6 +24,7 @@ public class AnomalyDetectorProcessorConfig { @JsonPropertyDescription("The ML algorithm (or model) used to detect anomalies. You must provide a mode. See random_cut_forest mode.") @JsonProperty("mode") @NotNull + @UsesDataPrepperPlugin(pluginType = AnomalyDetectorMode.class) private PluginModel detectorMode; @JsonPropertyDescription("A non-ordered List that is used as input to the ML algorithm to detect anomalies in the values of the keys in the list. At least one key is required.") diff --git a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/DelayProcessor.java b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/DelayProcessor.java index 74be383972..00429e9ac0 100644 --- a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/DelayProcessor.java +++ b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/DelayProcessor.java @@ -61,7 +61,7 @@ public void shutdown() { "Typically, you should use this only for testing, experimenting, and debugging.") public static class Configuration { @JsonProperty("for") - @JsonPropertyDescription("The duration of time to delay. Defaults to `1s`.") + @JsonPropertyDescription("The duration of time to delay. Defaults to 1s.") private Duration delayFor = Duration.ofSeconds(1); public Duration getDelayFor() { diff --git a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/StringProcessor.java b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/StringProcessor.java index ff18a1e629..0502951137 100644 --- a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/StringProcessor.java +++ b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/processor/StringProcessor.java @@ -43,9 +43,9 @@ public class StringProcessor implements Processor, Record> private final boolean upperCase; @JsonPropertyOrder - @JsonClassDescription("The `string_converter` processor converts a string to uppercase or lowercase.") + @JsonClassDescription("The string_converter processor converts a string to uppercase or lowercase.") public static class Configuration { - @JsonPropertyDescription("Whether to convert to uppercase (`true`) or lowercase (`false`).") + @JsonPropertyDescription("Whether to convert to uppercase (true) or lowercase (false).") private boolean upperCase = true; public boolean getUpperCase() { diff --git a/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/codec/csv/CsvOutputCodecConfig.java b/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/codec/csv/CsvOutputCodecConfig.java index 133e11cad9..b27cdfd3eb 100644 --- a/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/codec/csv/CsvOutputCodecConfig.java +++ b/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/codec/csv/CsvOutputCodecConfig.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.Valid; import jakarta.validation.constraints.Size; @@ -13,14 +14,16 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `csv` processor parses comma-separated values (CSVs) from the event into columns.") +@JsonClassDescription("The csv codec parses comma-separated values (CSVs) content into events from that content.") public class CsvOutputCodecConfig { static final String DEFAULT_DELIMITER = ","; @JsonProperty("delimiter") + @JsonPropertyDescription("The character separating each column. Default value is ,.") private String delimiter = DEFAULT_DELIMITER; @JsonProperty("header") + @JsonPropertyDescription("User-specified names for the CSV columns.") private List header; @Valid @@ -32,10 +35,12 @@ public class CsvOutputCodecConfig { @Size(max = 0, message = "Header from file is not supported.") @JsonProperty("region") private String region; + @Valid @Size(max = 0, message = "Header from file is not supported.") @JsonProperty("bucket_name") private String bucketName; + @Valid @Size(max = 0, message = "Header from file is not supported.") @JsonProperty("fileKey") diff --git a/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/csv/CsvProcessorConfig.java b/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/csv/CsvProcessorConfig.java index e21968ebdf..07cfe5798f 100644 --- a/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/csv/CsvProcessorConfig.java +++ b/data-prepper-plugins/csv-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/csv/CsvProcessorConfig.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.plugins.processor.csv; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import jakarta.validation.constraints.AssertTrue; @@ -14,6 +15,7 @@ /** * Configuration class for {@link CsvProcessor}. */ +@JsonClassDescription("The csv processor parses comma-separated values (CSVs) strings into structured data.") public class CsvProcessorConfig { static final String DEFAULT_SOURCE = "message"; static final String DEFAULT_DELIMITER = ","; @@ -21,45 +23,44 @@ public class CsvProcessorConfig { static final Boolean DEFAULT_DELETE_HEADERS = true; @JsonProperty("source") - @JsonPropertyDescription("The field in the event that will be parsed. Default value is `message`.") + @JsonPropertyDescription("The field in the event that will be parsed. Default value is message.") private String source = DEFAULT_SOURCE; @JsonProperty("delimiter") - @JsonPropertyDescription("The character separating each column. Default value is `,`.") + @JsonPropertyDescription("The character separating each column. Default value is ,.") private String delimiter = DEFAULT_DELIMITER; @JsonProperty("delete_header") - @JsonPropertyDescription("If specified, the event header (`column_names_source_key`) is deleted after the event " + + @JsonPropertyDescription("If specified, the event header (column_names_source_key) is deleted after the event " + "is parsed. If there is no event header, no action is taken. Default value is true.") private Boolean deleteHeader = DEFAULT_DELETE_HEADERS; @JsonProperty("quote_character") @JsonPropertyDescription("The character used as a text qualifier for a single column of data. " + - "Default value is `\"`.") + "Default value is \".") private String quoteCharacter = DEFAULT_QUOTE_CHARACTER; @JsonProperty("column_names_source_key") @JsonPropertyDescription("The field in the event that specifies the CSV column names, which will be " + - "automatically detected. If there need to be extra column names, the column names are automatically " + - "generated according to their index. If `column_names` is also defined, the header in " + - "`column_names_source_key` can also be used to generate the event fields. " + + "automatically detected. If there are additional columns in the source, the column names are automatically " + + "generated according to column index. If column_names is also defined, the header in " + + "column_names_source_key can also be used to generate the event fields. " + "If too few columns are specified in this field, the remaining column names are automatically generated. " + "If too many column names are specified in this field, the CSV processor omits the extra column names.") private String columnNamesSourceKey; @JsonProperty("column_names") @JsonPropertyDescription("User-specified names for the CSV columns. " + - "Default value is `[column1, column2, ..., columnN]` if there are no columns of data in the CSV " + - "record and `column_names_source_key` is not defined. If `column_names_source_key` is defined, " + - "the header in `column_names_source_key` generates the event fields. If too few columns are specified " + + "Default value is [column1, column2, ..., columnN] if there are no columns of data in the CSV " + + "record and column_names_source_key is not defined. If column_names_source_key is defined, " + + "the header in column_names_source_key generates the event fields. If too few columns are specified " + "in this field, the remaining column names are automatically generated. " + "If too many column names are specified in this field, the CSV processor omits the extra column names.") private List columnNames; @JsonProperty("csv_when") - @JsonPropertyDescription("Allows you to specify a Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"`, that will be evaluated to determine whether " + - "the processor should be applied to the event.") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the csv processor will only run on events when the expression evaluates to true. ") private String csvWhen; @JsonPropertyDescription("If true, the configured source field will be deleted after the CSV data is parsed into separate fields.") diff --git a/data-prepper-plugins/date-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/date/DateProcessorConfig.java b/data-prepper-plugins/date-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/date/DateProcessorConfig.java index c6a4c471b7..bf2a45756c 100644 --- a/data-prepper-plugins/date-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/date/DateProcessorConfig.java +++ b/data-prepper-plugins/date-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/date/DateProcessorConfig.java @@ -18,7 +18,7 @@ import java.time.format.DateTimeFormatter; @JsonPropertyOrder -@JsonClassDescription("The `date` processor adds a default timestamp to an event, parses timestamp fields, " + +@JsonClassDescription("The date processor adds a default timestamp to an event, parses timestamp fields, " + "and converts timestamp information to the International Organization for Standardization (ISO) 8601 format. " + "This timestamp information can be used as an event timestamp.") public class DateProcessorConfig { @@ -32,13 +32,16 @@ public class DateProcessorConfig { public static class DateMatch { @JsonProperty("key") @JsonPropertyDescription("Represents the event key against which to match patterns. " + - "Required if `match` is configured. ") + "Required if match is configured.") private String key; + @JsonProperty("patterns") @JsonPropertyDescription("A list of possible patterns that the timestamp value of the key can have. The patterns " + - "are based on a sequence of letters and symbols. The `patterns` support all the patterns listed in the " + + "are based on a sequence of letters and symbols. The patterns support all the patterns listed in the " + "Java DateTimeFormatter (https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html) reference. " + - "The timestamp value also supports `epoch_second`, `epoch_milli`, and `epoch_nano` values, " + + "To match ISO 8601 formatted strings, use, yyyy-MM-dd'T'HH:mm:ss.SSSXXX. " + + "To match Apache Common Log Format, use dd/MMM/yyyy:HH:mm:ss Z. " + + "The timestamp value also supports epoch_second, epoch_milli, and epoch_nano values, " + "which represent the timestamp as the number of seconds, milliseconds, and nanoseconds since the epoch. " + "Epoch values always use the UTC time zone.") private List patterns; @@ -98,31 +101,33 @@ public static boolean isValidPattern(final String pattern) { } @JsonProperty("from_time_received") - @JsonPropertyDescription("When `true`, the timestamp from the event metadata, " + + @JsonPropertyDescription("When true, the timestamp from the event metadata, " + "which is the time at which the source receives the event, is added to the event data. " + - "This option cannot be defined at the same time as `match`. Default is `false`.") + "This option cannot be defined at the same time as match. Default is false.") private Boolean fromTimeReceived = DEFAULT_FROM_TIME_RECEIVED; - @JsonProperty("to_origination_metadata") - @JsonPropertyDescription("When `true`, the matched time is also added to the event's metadata as an instance of " + - "`Instant`. Default is `false`.") - private Boolean toOriginationMetadata = DEFAULT_TO_ORIGINATION_METADATA; - @JsonProperty("match") @JsonPropertyDescription("The date match configuration. " + - "This option cannot be defined at the same time as `from_time_received`. There is no default value.") + "This option cannot be defined at the same time as from_time_received. " + + "The date processor will use the first pattern that matches each event's timestamp field. " + + "You must provide at least one pattern unless you have from_time_received.") private List match; @JsonProperty("destination") @JsonPropertyDescription("The field used to store the timestamp parsed by the date processor. " + - "Can be used with both `match` and `from_time_received`. Default is `@timestamp`.") + "Can be used with both match and from_time_received. Default is @timestamp.") private String destination = DEFAULT_DESTINATION; @JsonProperty("output_format") @JsonPropertyDescription("Determines the format of the timestamp added to an event. " + - "Default is `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`.") + "Default is yyyy-MM-dd'T'HH:mm:ss.SSSXXX.") private String outputFormat = DEFAULT_OUTPUT_FORMAT; + @JsonProperty("to_origination_metadata") + @JsonPropertyDescription("When true, the matched time is also added to the event's metadata as an instance of " + + "Instant. Default is false.") + private Boolean toOriginationMetadata = DEFAULT_TO_ORIGINATION_METADATA; + @JsonProperty("source_timezone") @JsonPropertyDescription("The time zone used to parse dates, including when the zone or offset cannot be extracted " + "from the value. If the zone or offset are part of the value, then the time zone is ignored. " + @@ -131,23 +136,23 @@ public static boolean isValidPattern(final String pattern) { private String sourceTimezone = DEFAULT_SOURCE_TIMEZONE; @JsonProperty("destination_timezone") - @JsonPropertyDescription("The time zone used for storing the timestamp in the `destination` field. " + + @JsonPropertyDescription("The time zone used for storing the timestamp in the destination field. " + "A list of all the available time zones is contained in the TZ database name column of " + "(https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List).") private String destinationTimezone = DEFAULT_DESTINATION_TIMEZONE; @JsonProperty("locale") - @JsonPropertyDescription("The location used for parsing dates. Commonly used for parsing month names (`MMM`). " + - "The value can contain language, country, or variant fields in IETF BCP 47, such as `en-US`, " + + @JsonPropertyDescription("The location used for parsing dates. Commonly used for parsing month names (MMM). " + + "The value can contain language, country, or variant fields in IETF BCP 47, such as en-US, " + "or a string representation of the " + - "locale (https://docs.oracle.com/javase/8/docs/api/java/util/Locale.html) object, such as `en_US`. " + + "locale (https://docs.oracle.com/javase/8/docs/api/java/util/Locale.html) object, such as en_US. " + "A full list of locale fields, including language, country, and variant, can be found in " + "(https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry). " + - "Default is `Locale.ROOT`.") + "Default is Locale.ROOT.") private String locale; @JsonProperty("date_when") - @JsonPropertyDescription("Specifies under what condition the `date` processor should perform matching. " + + @JsonPropertyDescription("Specifies under what condition the date processor should perform matching. " + "Default is no condition.") private String dateWhen; diff --git a/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressProcessorConfig.java b/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressProcessorConfig.java index be1238885c..406654c9da 100644 --- a/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressProcessorConfig.java +++ b/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressProcessorConfig.java @@ -18,7 +18,7 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `decompress` processor decompresses any Base64-encoded " + +@JsonClassDescription("The decompress processor decompresses any Base64-encoded " + "compressed fields inside of an event.") public class DecompressProcessorConfig { @@ -28,19 +28,19 @@ public class DecompressProcessorConfig { @NotNull private List keys; - @JsonPropertyDescription("The type of decompression to use for the keys in the event. Only gzip is supported.") + @JsonPropertyDescription("The type of decompression to use for the keys in the event. Only gzip is supported.") @JsonProperty("type") @NotNull private DecompressionType decompressionType; - @JsonPropertyDescription("A conditional expression that determines when the decompress processor will run on certain events.") - @JsonProperty("decompress_when") - private String decompressWhen; - - @JsonPropertyDescription("A list of strings with which to tag events when the processor fails to decompress the keys inside an event. Defaults to _decompression_failure.") + @JsonPropertyDescription("A list of strings with which to tag events when the processor fails to decompress the keys inside an event. Defaults to _decompression_failure.") @JsonProperty("tags_on_failure") private List tagsOnFailure = List.of("_decompression_failure"); + @JsonPropertyDescription("A conditional expression, such as '/is_compressed == true', that determines when the decompress processor will run on certain events.") + @JsonProperty("decompress_when") + private String decompressWhen; + @JsonIgnore private final EncodingType encodingType = EncodingType.BASE64; diff --git a/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionType.java b/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionType.java index 88f64a52e1..1b7c51d39a 100644 --- a/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionType.java +++ b/data-prepper-plugins/decompress-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionType.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.plugins.processor.decompress; import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; import org.opensearch.dataprepper.model.codec.DecompressionEngine; import org.opensearch.dataprepper.plugins.codec.GZipDecompressionEngine; @@ -37,6 +38,11 @@ static DecompressionType fromOptionValue(final String option) { return OPTIONS_MAP.get(option); } + @JsonValue + public String getOptionValue() { + return option; + } + @Override public DecompressionEngine getDecompressionEngine() { return DECOMPRESSION_ENGINE_MAP.get(this.option); diff --git a/data-prepper-plugins/decompress-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionTypeTest.java b/data-prepper-plugins/decompress-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionTypeTest.java index 287e0cdb1d..7eccd3dbf8 100644 --- a/data-prepper-plugins/decompress-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionTypeTest.java +++ b/data-prepper-plugins/decompress-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/decompress/DecompressionTypeTest.java @@ -34,6 +34,12 @@ void getDecompressionEngine_returns_expected_DecompressionEngine(final Decompres assertThat(enumValue.getDecompressionEngine(), instanceOf(decompressionEngineClass)); } + @ParameterizedTest + @ArgumentsSource(EnumToStringNameArgumentsProvider.class) + void getOptionValue_returns_data_type_name(final DecompressionType decompressionType, final String optionValue) { + assertThat(decompressionType.getOptionValue(), equalTo(optionValue)); + } + private static class EnumToStringNameArgumentsProvider implements ArgumentsProvider { @Override public Stream provideArguments(final ExtensionContext context) { diff --git a/data-prepper-plugins/dissect-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/dissect/DissectProcessorConfig.java b/data-prepper-plugins/dissect-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/dissect/DissectProcessorConfig.java index f1e7ce0686..e7d2b2a489 100644 --- a/data-prepper-plugins/dissect-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/dissect/DissectProcessorConfig.java +++ b/data-prepper-plugins/dissect-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/dissect/DissectProcessorConfig.java @@ -10,23 +10,31 @@ import java.util.Map; @JsonPropertyOrder -@JsonClassDescription("The `dissect` processor extracts values from an event and maps them to individual fields " + - "based on user-defined `dissect` patterns. The processor is well suited for field extraction from log " + +@JsonClassDescription("The dissect processor extracts values from an event and maps them to individual fields " + + "based on user-defined dissect patterns. The processor is well suited for field extraction from log " + "messages with a known structure.") public class DissectProcessorConfig { @NotNull @JsonProperty("map") - @JsonPropertyDescription("Defines the `dissect` patterns for specific keys. For details on how to define fields " + - "in the `dissect` pattern, see (https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/processors/dissect/#field-notations).") + @JsonPropertyDescription("Defines the dissect patterns for specific keys. " + + "Each key is a field name, and the value is the dissect pattern to use for dissecting it. " + + "For details on how to define fields " + + "in the dissect pattern, see (https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/processors/dissect/#field-notations). " + + "An example dissect pattern is %{Date} %{Time} %{Log_Type}: %{Message}, which will dissect into four fields.") private Map map; + @JsonProperty("target_types") - @JsonPropertyDescription("Specifies the data types for extract fields. Valid options are `integer`, " + - "`double`, `string`, `long`, `big_decimal`, and `boolean`. By default, all fields are of the `string` type.") + @JsonPropertyDescription("Specifies the data types for extract fields. " + + "Each key is a field name, and the value is the data type to use for that field. " + + "Valid data types are integer, double, string, long, big_decimal, and boolean. " + + "By default, all fields are treated as string.") private Map targetTypes; + @JsonProperty("dissect_when") - @JsonPropertyDescription("Specifies a condition for performing the `dissect` operation using a Data Prepper [conditional expression]" + - "(https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/). " + - "If specified, the `dissect` operation will only run when the expression evaluates to true.") + @JsonPropertyDescription("Specifies a condition for performing the dissect operation using a " + + "conditional expression. " + + "If specified, the dissect operation will only run when the expression evaluates to true. " + + "For example, '/some_value == \"log\"'.") private String dissectWhen; public String getDissectWhen(){ diff --git a/data-prepper-plugins/drop-events-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/drop/DropEventProcessorConfig.java b/data-prepper-plugins/drop-events-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/drop/DropEventProcessorConfig.java index 4f79c6575f..7674675e8b 100644 --- a/data-prepper-plugins/drop-events-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/drop/DropEventProcessorConfig.java +++ b/data-prepper-plugins/drop-events-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/drop/DropEventProcessorConfig.java @@ -13,15 +13,16 @@ import org.opensearch.dataprepper.model.event.HandleFailedEventsOption; @JsonPropertyOrder -@JsonClassDescription("The `drop_events` processor drops all the events that are passed into it.") +@JsonClassDescription("The drop_events processor conditionally drops events.") public class DropEventProcessorConfig { - @JsonPropertyDescription("Accepts a Data Prepper conditional expression string following the [Data Prepper Expression Syntax](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/). Configuring drop_events with drop_when: true drops all the events received.") + @JsonPropertyDescription("A conditional expression such as '/log_type == \"DEBUG\"'. " + + "The drop_when processor will drop all events where the condition evaluates to true. Those events will not go to any further processors or sinks.") @JsonProperty("drop_when") @NotEmpty private String dropWhen; - @JsonPropertyDescription("Specifies how exceptions are handled when an exception occurs while evaluating an event. Default value is 'drop', which drops the event so that it is not sent to OpenSearch. Available options are 'drop', 'drop_silently', 'skip', and 'skip_silently'.") + @JsonPropertyDescription("Specifies how exceptions are handled when an exception occurs while evaluating an event. Default value is skip, which drops the event so that it is not sent to further processors or sinks.") @JsonProperty("handle_failed_events") private HandleFailedEventsOption handleFailedEventsOption = HandleFailedEventsOption.SKIP; diff --git a/data-prepper-plugins/flatten-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/flatten/FlattenProcessorConfig.java b/data-prepper-plugins/flatten-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/flatten/FlattenProcessorConfig.java index f3fc6326ee..b7541f65a1 100644 --- a/data-prepper-plugins/flatten-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/flatten/FlattenProcessorConfig.java +++ b/data-prepper-plugins/flatten-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/flatten/FlattenProcessorConfig.java @@ -17,52 +17,52 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `flatten` processor transforms nested objects inside of events into flattened structures.") +@JsonClassDescription("The flatten processor transforms nested objects inside of events into flattened structures.") public class FlattenProcessorConfig { private static final List DEFAULT_EXCLUDE_KEYS = new ArrayList<>(); @NotNull @JsonProperty("source") - @JsonPropertyDescription("The source key on which to perform the operation. If set to an empty string (`\"\"`), " + + @JsonPropertyDescription("The source key on which to perform the operation. If set to an empty string (\"\"), " + "then the processor uses the root of the event as the source.") private String source; @NotNull @JsonProperty("target") - @JsonPropertyDescription("The target key to put into the flattened fields. If set to an empty string (`\"\"`), " + + @JsonPropertyDescription("The target key to put into the flattened fields. If set to an empty string (\"\") " + "then the processor uses the root of the event as the target.") private String target; @JsonProperty("remove_processed_fields") - @JsonPropertyDescription("When `true`, the processor removes all processed fields from the source. Default is `false`.") + @JsonPropertyDescription("When true, the processor removes all processed fields from the source. " + + "The default is false which leaves the source fields.") private boolean removeProcessedFields = false; @JsonProperty("remove_list_indices") - @JsonPropertyDescription("When `true`, the processor converts the fields from the source map into lists and " + - "puts the lists into the target field. Default is `false`.") + @JsonPropertyDescription("When true, the processor converts the fields from the source map into lists and " + + "puts the lists into the target field. Default is false.") private boolean removeListIndices = false; @JsonProperty("remove_brackets") - @JsonPropertyDescription("When `true`, the processor also removes brackets around the indices. Can only be " + - "set to `true` when `remove_list_indices` is `true`.") + @JsonPropertyDescription("When true, the processor also removes brackets around the indices. Can only be " + + "set to true when remove_list_indices is true.") private boolean removeBrackets = false; @JsonProperty("exclude_keys") @JsonPropertyDescription("The keys from the source field that should be excluded from processing. " + - "Default is an empty list (`[]`).") + "By default no keys are excluded.") private List excludeKeys = DEFAULT_EXCLUDE_KEYS; - @JsonProperty("flatten_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that determines whether the `flatten` processor will be run on the " + - "event. Default is `null`, which means that all events will be processed unless otherwise stated.") - private String flattenWhen; - @JsonProperty("tags_on_failure") @JsonPropertyDescription("A list of tags to add to the event metadata when the event fails to process.") private List tagsOnFailure; + @JsonProperty("flatten_when") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the flatten processor will only run on events when the expression evaluates to true. ") + private String flattenWhen; + public String getSource() { return source; } diff --git a/data-prepper-plugins/geoip-processor/build.gradle b/data-prepper-plugins/geoip-processor/build.gradle index d1ebc0c971..10398d6cc1 100644 --- a/data-prepper-plugins/geoip-processor/build.gradle +++ b/data-prepper-plugins/geoip-processor/build.gradle @@ -27,6 +27,7 @@ dependencies { implementation libs.armeria.core testImplementation project(':data-prepper-core') + testImplementation project(':data-prepper-event') testImplementation project(':data-prepper-plugin-framework') testImplementation project(':data-prepper-pipeline-parser') testImplementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310' diff --git a/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/EntryConfig.java b/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/EntryConfig.java index b425ce3bbb..d6763eb512 100644 --- a/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/EntryConfig.java +++ b/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/EntryConfig.java @@ -5,8 +5,10 @@ package org.opensearch.dataprepper.plugins.geoip.processor; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.AssertTrue; import jakarta.validation.constraints.NotEmpty; import org.opensearch.dataprepper.plugins.geoip.GeoIPField; @@ -15,6 +17,8 @@ import java.util.EnumSet; import java.util.List; +@JsonPropertyOrder +@JsonClassDescription("Defines a single entry for geolocation.") public class EntryConfig { static final String DEFAULT_TARGET = "geo"; @@ -23,15 +27,17 @@ public class EntryConfig { @NotEmpty private String source; - @JsonPropertyDescription("The key of the target field in which to save the geolocation data. Default is geo.") + @JsonPropertyDescription("The key of the target field in which to set the geolocation data. Default is geo.") @JsonProperty("target") private String target = DEFAULT_TARGET; - @JsonPropertyDescription("The list of geolocation fields to include in the target object. By default, this is all the fields provided by the configured databases.") + @JsonPropertyDescription("The list of geolocation fields to include in the target object. By default, this is all the fields provided by the configured databases. " + + "For example, if you wish to only obtain the actual location, you can specify location.") @JsonProperty("include_fields") private List includeFields; - @JsonPropertyDescription("The list of geolocation fields to exclude from the target object.") + @JsonPropertyDescription("The list of geolocation fields to exclude from the target object. " + + "For example, you can exclude ASN fields by including asn, asn_organization, network, ip.") @JsonProperty("exclude_fields") private List excludeFields; diff --git a/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/GeoIPProcessorConfig.java b/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/GeoIPProcessorConfig.java index 2d48fd8fda..f42d1f7d3c 100644 --- a/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/GeoIPProcessorConfig.java +++ b/data-prepper-plugins/geoip-processor/src/main/java/org/opensearch/dataprepper/plugins/geoip/processor/GeoIPProcessorConfig.java @@ -19,7 +19,7 @@ * An implementation class of GeoIP Processor configuration */ @JsonPropertyOrder -@JsonClassDescription("The `geoip` processor enriches events with geographic information extracted from IP addresses " + +@JsonClassDescription("The geoip processor enriches events with geographic information extracted from IP addresses " + "contained in the events.") public class GeoIPProcessorConfig { @@ -27,25 +27,24 @@ public class GeoIPProcessorConfig { @NotNull @Size(min = 1) @JsonProperty("entries") - @JsonPropertyDescription("The list of entries marked for enrichment.") + @JsonPropertyDescription("The list of entries for enrichment. Each entry provides a source field with an IP address along with a target for the enriched geolocation data.") private List entries; @JsonProperty("tags_on_engine_failure") - @JsonPropertyDescription("The tags to add to the event metadata if the geoip processor is unable to enrich an event due to an engine failure.") + @JsonPropertyDescription("The tags to add to the event metadata if the geoip processor is unable to enrich an event due to an engine failure.") private List tagsOnEngineFailure; @JsonProperty("tags_on_ip_not_found") - @JsonPropertyDescription("The tags to add to the event metadata if the geoip processor is unable to find a location for the IP address.") + @JsonPropertyDescription("The tags to add to the event metadata if the geoip processor is unable to find a location for a valid IP address.") private List tagsOnIPNotFound; @JsonProperty("tags_on_no_valid_ip") - @JsonPropertyDescription("The tags to add to the event metadata if the source field is not a valid IP address. This includes the localhost IP address.") + @JsonPropertyDescription("The tags to add to the event metadata if the source field is not a valid IP address. A source field may not be valid because it is incorrectly formatted or is the loopback/localhost IP address.") private List tagsOnNoValidIp; @JsonProperty("geoip_when") - @JsonPropertyDescription("Specifies a condition for including Events in the `geoip` processor using a Data Prepper [conditional expression]" + - "(https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/)." + - " If specified, the `geoip` processor will only run when the expression evaluates to true.") + @JsonPropertyDescription("A conditional expression such as '/srcaddr != \"8.8.8.8\"'. " + + "If specified, the geoip processor will only run on events when the expression evaluates to true. ") private String whenCondition; /** diff --git a/data-prepper-plugins/grok-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/grok/GrokProcessorConfig.java b/data-prepper-plugins/grok-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/grok/GrokProcessorConfig.java index 7fe746ad8e..9a505316a6 100644 --- a/data-prepper-plugins/grok-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/grok/GrokProcessorConfig.java +++ b/data-prepper-plugins/grok-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/grok/GrokProcessorConfig.java @@ -9,13 +9,15 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import java.util.Collections; import java.util.List; import java.util.Map; @JsonPropertyOrder -@JsonClassDescription("The `grok` processor uses pattern matching to structure and extract important keys from " + +@JsonClassDescription("The grok processor uses pattern matching to structure and extract important keys from " + "unstructured data.") public class GrokProcessorConfig { @@ -46,56 +48,75 @@ public class GrokProcessorConfig { static final int DEFAULT_TIMEOUT_MILLIS = 30000; static final String DEFAULT_TARGET_KEY = null; + @JsonProperty(MATCH) + @NotEmpty + @NotNull + @JsonPropertyDescription("Specifies which keys should match specific patterns. " + + "Each key is a source field. The value is a list of possible grok patterns to match on. " + + "The grok processor will extract values from the first match for each field. " + + "Default is an empty response body.") + private Map> match = Collections.emptyMap(); + + @JsonProperty(TARGET_KEY) + @JsonPropertyDescription("Specifies a parent-level key used to store all captures. Default value is null which will write captures into the root of the event.") + private String targetKey = DEFAULT_TARGET_KEY; + @JsonProperty(BREAK_ON_MATCH) - @JsonPropertyDescription("Specifies whether to match all patterns (`false`) or stop once the first successful " + - "match is found (`true`). Default is `true`.") + @JsonPropertyDescription("Specifies whether to match all patterns (false) or stop once the first successful " + + "match is found (true). Default is true.") private boolean breakOnMatch = DEFAULT_BREAK_ON_MATCH; + @JsonProperty(KEEP_EMPTY_CAPTURES) - @JsonPropertyDescription("Enables the preservation of `null` captures from the processed output. Default is `false`.") + @JsonPropertyDescription("Enables the preservation of null captures from the processed output. Default is false.") private boolean keepEmptyCaptures = DEFAULT_KEEP_EMPTY_CAPTURES; - @JsonProperty(MATCH) - @JsonPropertyDescription("Specifies which keys should match specific patterns. Default is an empty response body.") - private Map> match = Collections.emptyMap(); + @JsonProperty(NAMED_CAPTURES_ONLY) - @JsonPropertyDescription("Specifies whether to keep only named captures. Default is `true`.") + @JsonPropertyDescription("Specifies whether to keep only named captures. Default is true.") private boolean namedCapturesOnly = DEFAULT_NAMED_CAPTURES_ONLY; + @JsonProperty(KEYS_TO_OVERWRITE) @JsonPropertyDescription("Specifies which existing keys will be overwritten if there is a capture with the same key value. " + - "Default is `[]`.") + "Default is an empty list.") private List keysToOverwrite = Collections.emptyList(); + + @JsonProperty(PATTERN_DEFINITIONS) + @JsonPropertyDescription("Allows for a custom pattern that can be used inline inside the response body. " + + "Default is an empty response body.") + private Map patternDefinitions = Collections.emptyMap(); + @JsonProperty(PATTERNS_DIRECTORIES) @JsonPropertyDescription("Specifies which directory paths contain the custom pattern files. Default is an empty list.") private List patternsDirectories = Collections.emptyList(); + @JsonProperty(PATTERNS_FILES_GLOB) @JsonPropertyDescription("Specifies which pattern files to use from the directories specified for " + - "`pattern_directories`. Default is `*`.") + "pattern_directories. Default is *.") private String patternsFilesGlob = DEFAULT_PATTERNS_FILES_GLOB; - @JsonProperty(PATTERN_DEFINITIONS) - @JsonPropertyDescription("Allows for a custom pattern that can be used inline inside the response body. " + - "Default is an empty response body.") - private Map patternDefinitions = Collections.emptyMap(); + @JsonProperty(TIMEOUT_MILLIS) @JsonPropertyDescription("The maximum amount of time during which matching occurs. " + - "Setting to `0` prevents any matching from occurring. Default is `30,000`.") + "Setting to 0 prevents any matching from occurring. Default is 30000.") private int timeoutMillis = DEFAULT_TIMEOUT_MILLIS; - @JsonProperty(TARGET_KEY) - @JsonPropertyDescription("Specifies a parent-level key used to store all captures. Default value is `null`.") - private String targetKey = DEFAULT_TARGET_KEY; - @JsonProperty(GROK_WHEN) - @JsonPropertyDescription("Specifies under what condition the `grok` processor should perform matching. " + - "Default is no condition.") - private String grokWhen; + @JsonProperty(TAGS_ON_MATCH_FAILURE) - @JsonPropertyDescription("A `List` of `String`s that specifies the tags to be set in the event when grok fails to " + + @JsonPropertyDescription("A List of Strings that specifies the tags to be set in the event when grok fails to " + "match or an unknown exception occurs while matching. This tag may be used in conditional expressions in " + "other parts of the configuration") private List tagsOnMatchFailure = Collections.emptyList(); + @JsonProperty(TAGS_ON_TIMEOUT) - @JsonPropertyDescription("A `List` of `String`s that specifies the tags to be set in the event when grok match times out.") + @JsonPropertyDescription("The tags to add to the event metadata if the grok match times out.") private List tagsOnTimeout = Collections.emptyList(); + + @JsonProperty(GROK_WHEN) + @JsonPropertyDescription("A conditional expression such as '/test != false'. " + + "If specified, the grok processor will only run on events when the expression evaluates to true. ") + private String grokWhen; + @JsonProperty(INCLUDE_PERFORMANCE_METADATA) - @JsonPropertyDescription("A `Boolean` on whether to include performance metadata into event metadata, " + - "e.g. _total_grok_patterns_attempted, _total_grok_processing_time.") + @JsonPropertyDescription("A boolean value to determine whether to include performance metadata into event metadata. " + + "If set to true, the events coming out of grok will have new fields such as _total_grok_patterns_attempted and _total_grok_processing_time." + + "You can use this metadata to perform performance testing and tuning of your grok patterns. By default, it is not included.") private boolean includePerformanceMetadata = false; public boolean isBreakOnMatch() { diff --git a/data-prepper-plugins/kafka-plugins/build.gradle b/data-prepper-plugins/kafka-plugins/build.gradle index 23ad2d05c1..0ccb88b395 100644 --- a/data-prepper-plugins/kafka-plugins/build.gradle +++ b/data-prepper-plugins/kafka-plugins/build.gradle @@ -59,6 +59,7 @@ dependencies { testImplementation project(':data-prepper-test-common') testImplementation project(':data-prepper-plugins:blocking-buffer') testImplementation project(':data-prepper-core') + testImplementation project(':data-prepper-event') testImplementation project(':data-prepper-plugin-framework') testImplementation project(':data-prepper-pipeline-parser') testImplementation 'org.apache.kafka:kafka_2.13:3.6.1' diff --git a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessor.java b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessor.java index c42e015829..8c7cfeb28d 100644 --- a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessor.java +++ b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessor.java @@ -55,10 +55,6 @@ public class KeyValueProcessor extends AbstractProcessor, Record excludeKeysSet = new HashSet(); private final HashMap defaultValuesMap = new HashMap<>(); private final Set defaultValuesSet = new HashSet(); - private final String lowercaseKey = "lowercase"; - private final String uppercaseKey = "uppercase"; - private final String capitalizeKey = "capitalize"; - private final Set validTransformOptionSet = Set.of("", lowercaseKey, uppercaseKey, capitalizeKey); private final String whitespaceStrict = "strict"; private final String whitespaceLenient = "lenient"; private final Set validWhitespaceSet = Set.of(whitespaceLenient, whitespaceStrict); @@ -167,14 +163,6 @@ public KeyValueProcessor(final PluginMetrics pluginMetrics, validateKeySets(includeKeysSet, excludeKeysSet, defaultValuesSet); - if (!validTransformOptionSet.contains(keyValueProcessorConfig.getTransformKey())) { - throw new IllegalArgumentException(String.format("The transform_key value: %s is not a valid option", keyValueProcessorConfig.getTransformKey())); - } - - if (!(validWhitespaceSet.contains(keyValueProcessorConfig.getWhitespace()))) { - throw new IllegalArgumentException(String.format("The whitespace value: %s is not a valid option", keyValueProcessorConfig.getWhitespace())); - } - final Pattern boolCheck = Pattern.compile("true|false", Pattern.CASE_INSENSITIVE); final Matcher duplicateValueBoolMatch = boolCheck.matcher(String.valueOf(keyValueProcessorConfig.getSkipDuplicateValues())); final Matcher removeBracketsBoolMatch = boolCheck.matcher(String.valueOf(keyValueProcessorConfig.getRemoveBrackets())); @@ -596,14 +584,14 @@ private Map executeConfigs(Map map) { value = ((String)value).replaceAll(keyValueProcessorConfig.getDeleteValueRegex(), ""); } - if (keyValueProcessorConfig.getWhitespace().equals(whitespaceStrict)) { + if (keyValueProcessorConfig.getWhitespace() == WhitespaceOption.STRICT) { String[] whitespace_arr = trimWhitespace(key, value); key = whitespace_arr[0]; value = whitespace_arr[1]; } if (keyValueProcessorConfig.getTransformKey() != null - && !keyValueProcessorConfig.getTransformKey().isEmpty()) { + && keyValueProcessorConfig.getTransformKey() != TransformOption.NONE) { key = transformKey(key); } @@ -636,14 +624,7 @@ private String[] trimWhitespace(String key, Object value) { } private String transformKey(String key) { - if (keyValueProcessorConfig.getTransformKey().equals(lowercaseKey)) { - key = key.toLowerCase(); - } else if (keyValueProcessorConfig.getTransformKey().equals(capitalizeKey)) { - key = key.substring(0, 1).toUpperCase() + key.substring(1); - } else if (keyValueProcessorConfig.getTransformKey().equals(uppercaseKey)) { - key = key.toUpperCase(); - } - return key; + return keyValueProcessorConfig.getTransformKey().getTransformFunction().apply(key); } private boolean validKeyAndValue(String key, Object value) { diff --git a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorConfig.java b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorConfig.java index 34fd2c805b..945b8f7faf 100644 --- a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorConfig.java +++ b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorConfig.java @@ -19,7 +19,7 @@ import java.util.Map; @JsonPropertyOrder -@JsonClassDescription("You can use the `key_value` processor to parse the specified field into key-value pairs.") +@JsonClassDescription("You can use the key_value processor to create structured data by parsing key-value pairs from strings.") public class KeyValueProcessorConfig { static final String DEFAULT_SOURCE = "message"; static final String DEFAULT_DESTINATION = "parsed_message"; @@ -32,174 +32,178 @@ public class KeyValueProcessorConfig { static final String DEFAULT_PREFIX = ""; static final String DEFAULT_DELETE_KEY_REGEX = ""; static final String DEFAULT_DELETE_VALUE_REGEX = ""; - static final String DEFAULT_TRANSFORM_KEY = ""; - static final String DEFAULT_WHITESPACE = "lenient"; + static final WhitespaceOption DEFAULT_WHITESPACE = WhitespaceOption.LENIENT; static final boolean DEFAULT_SKIP_DUPLICATE_VALUES = false; static final boolean DEFAULT_REMOVE_BRACKETS = false; static final boolean DEFAULT_VALUE_GROUPING = false; static final boolean DEFAULT_RECURSIVE = false; @NotEmpty - @JsonPropertyDescription("The message field to be parsed. Optional. Default value is `message`.") + @JsonPropertyDescription("The source field to parse for key-value pairs. The default value is message.") private String source = DEFAULT_SOURCE; - @JsonPropertyDescription("The destination field for the parsed source. The parsed source overwrites the " + - "preexisting data for that key. Optional. If `destination` is set to `null`, the parsed fields will be " + - "written to the root of the event. Default value is `parsed_message`.") + @JsonPropertyDescription("The destination field for the structured data. The destination will be a structured map with the key value pairs extracted from the source. " + + "If destination is set to null, the parsed fields will be written to the root of the event. " + + "The default value is parsed_message.") private String destination = DEFAULT_DESTINATION; - @JsonProperty("field_delimiter_regex") - @JsonPropertyDescription("A regular expression specifying the delimiter that separates key-value pairs. " + - "Special regular expression characters such as `[` and `]` must be escaped with `\\\\`. " + - "Cannot be defined at the same time as `field_split_characters`. Optional. " + - "If this option is not defined, `field_split_characters` is used.") - private String fieldDelimiterRegex; - @JsonProperty("field_split_characters") @JsonPropertyDescription("A string of characters specifying the delimiter that separates key-value pairs. " + - "Special regular expression characters such as `[` and `]` must be escaped with `\\\\`. " + - "Cannot be defined at the same time as `field_delimiter_regex`. Optional. Default value is `&`.") + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "This field cannot be defined along with field_delimiter_regex. " + + "The default value is &.") private String fieldSplitCharacters = DEFAULT_FIELD_SPLIT_CHARACTERS; - @JsonProperty("include_keys") - @JsonPropertyDescription("An array specifying the keys that should be added for parsing. " + - "By default, all keys will be added.") - @NotNull - private List includeKeys = DEFAULT_INCLUDE_KEYS; + @JsonProperty("field_delimiter_regex") + @JsonPropertyDescription("A regular expression specifying the delimiter that separates key-value pairs. " + + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "This field cannot be defined along with field_split_characters. " + + "If this option is not defined, the key_value processor will parse the source using field_split_characters.") + private String fieldDelimiterRegex; - @JsonProperty("exclude_keys") - @JsonPropertyDescription("An array specifying the parsed keys that should not be added to the event. " + - "By default, no keys will be excluded.") - @NotNull - private List excludeKeys = DEFAULT_EXCLUDE_KEYS; + @JsonProperty("value_split_characters") + @JsonPropertyDescription("A string of characters specifying the delimiter that separates keys from their values within a key-value pair. " + + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "This field cannot be defined along with key_value_delimiter_regex. " + + "The default value is =.") + private String valueSplitCharacters = DEFAULT_VALUE_SPLIT_CHARACTERS; + + @JsonProperty("key_value_delimiter_regex") + @JsonPropertyDescription("A regular expression specifying the delimiter that separates keys from their values within a key-value pair. " + + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "This field cannot be defined along with value_split_characters. " + + "If this option is not defined, the key_value processor will parse the source using value_split_characters.") + private String keyValueDelimiterRegex; @JsonProperty("default_values") @JsonPropertyDescription("A map specifying the default keys and their values that should be added " + "to the event in case these keys do not exist in the source field being parsed. " + - "If the default key already exists in the message, the value is not changed. " + - "The `include_keys` filter will be applied to the message before `default_values`.") + "If the key was parsed from the source field that value will remain and the default value is not used. " + + "If the default values includes keys which are not part of include_keys those keys and value will be added to the event.") @NotNull private Map defaultValues = DEFAULT_DEFAULT_VALUES; - @JsonProperty("key_value_delimiter_regex") - @JsonPropertyDescription("A regular expression specifying the delimiter that separates the key and value " + - "within a key-value pair. Special regular expression characters such as `[` and `]` must be escaped with " + - "`\\\\`. This option cannot be defined at the same time as `value_split_characters`. Optional. " + - "If this option is not defined, `value_split_characters` is used.") - private String keyValueDelimiterRegex; - - @JsonProperty("value_split_characters") - @JsonPropertyDescription("A string of characters specifying the delimiter that separates the key and value within " + - "a key-value pair. Special regular expression characters such as `[` and `]` must be escaped with `\\\\`. " + - "Cannot be defined at the same time as `key_value_delimiter_regex`. Optional. Default value is `=`.") - private String valueSplitCharacters = DEFAULT_VALUE_SPLIT_CHARACTERS; - @JsonProperty("non_match_value") - @JsonPropertyDescription("When a key-value pair cannot be successfully split, the key-value pair is " + - "placed in the `key` field, and the specified value is placed in the `value` field. " + - "Optional. Default value is `null`.") + @JsonPropertyDescription("Configures a value to use when the processor cannot split a key-value pair. " + + "The value specified in this configuration is the value used in destination map. " + + "The default behavior is to drop the key-value pair.") private Object nonMatchValue = DEFAULT_NON_MATCH_VALUE; - @JsonPropertyDescription("A prefix to append before all keys. Optional. Default value is an empty string.") + @JsonProperty("include_keys") + @JsonPropertyDescription("An array specifying the keys that should be included in the destination field. " + + "By default, all keys will be added.") + @NotNull + private List includeKeys = DEFAULT_INCLUDE_KEYS; + + @JsonProperty("exclude_keys") + @JsonPropertyDescription("An array specifying the parsed keys that should be excluded from the destination field. " + + "By default, no keys will be excluded.") + @NotNull + private List excludeKeys = DEFAULT_EXCLUDE_KEYS; + + @JsonPropertyDescription("A prefix to append before all keys. By default no prefix is added.") @NotNull private String prefix = DEFAULT_PREFIX; @JsonProperty("delete_key_regex") - @JsonPropertyDescription("A regular expression specifying the characters to delete from the key. " + - "Special regular expression characters such as `[` and `]` must be escaped with `\\\\`. Cannot be an " + - "empty string. Optional. No default value.") + @JsonPropertyDescription("A regular expression specifying characters to delete from the key. " + + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "Cannot be an empty string. " + + "By default, no characters are deleted from the key.") @NotNull private String deleteKeyRegex = DEFAULT_DELETE_KEY_REGEX; @JsonProperty("delete_value_regex") - @JsonPropertyDescription("A regular expression specifying the characters to delete from the value. " + - "Special regular expression characters such as `[` and `]` must be escaped with `\\\\`. " + - "Cannot be an empty string. Optional. No default value.") + @JsonPropertyDescription("A regular expression specifying characters to delete from the value. " + + "Special regular expression characters such as [ and ] must be escaped with \\\\. " + + "Cannot be an empty string. " + + "By default, no characters are deleted from the value.") @NotNull private String deleteValueRegex = DEFAULT_DELETE_VALUE_REGEX; @JsonProperty("transform_key") - @JsonPropertyDescription("When to lowercase, uppercase, or capitalize keys.") + @JsonPropertyDescription("Allows transforming the key's name such as making the name all lowercase.") @NotNull - private String transformKey = DEFAULT_TRANSFORM_KEY; + private TransformOption transformKey = TransformOption.NONE; @JsonProperty("whitespace") @JsonPropertyDescription("Specifies whether to be lenient or strict with the acceptance of " + - "unnecessary white space surrounding the configured value-split sequence. Default is `lenient`.") + "unnecessary white space surrounding the configured value-split sequence. " + + "In this case, strict means that whitespace is trimmed and lenient means it is retained in the key name and in the value." + + "Default is lenient.") @NotNull - private String whitespace = DEFAULT_WHITESPACE; + private WhitespaceOption whitespace = DEFAULT_WHITESPACE; @JsonProperty("skip_duplicate_values") - @JsonPropertyDescription("A Boolean option for removing duplicate key-value pairs. When set to `true`, " + - "only one unique key-value pair will be preserved. Default is `false`.") + @JsonPropertyDescription("A Boolean option for removing duplicate key-value pairs. When set to true, " + + "only one unique key-value pair will be preserved. Default is false.") @NotNull private boolean skipDuplicateValues = DEFAULT_SKIP_DUPLICATE_VALUES; @JsonProperty("remove_brackets") - @JsonPropertyDescription("Specifies whether to treat square brackets, angle brackets, and parentheses " + - "as value “wrappers” that should be removed from the value. Default is `false`.") + @JsonPropertyDescription("Specifies whether to treat certain grouping characters as wrapping text that should be removed from values." + + "When set to true, the following grouping characters will be removed: square brackets, angle brackets, and parentheses. " + + "The default configuration is false which retains those grouping characters.") @NotNull private boolean removeBrackets = DEFAULT_REMOVE_BRACKETS; @JsonProperty("value_grouping") - @JsonPropertyDescription("Specifies whether to group values using predefined value grouping delimiters: " + - "`{...}`, `[...]`, `<...>`, `(...)`, `\"...\"`, `'...'`, `http://... (space)`, and `https:// (space)`. " + + @JsonPropertyDescription("Specifies whether to group values using predefined grouping delimiters. " + "If this flag is enabled, then the content between the delimiters is considered to be one entity and " + - "is not parsed for key-value pairs. Default is `false`. If `value_grouping` is `true`, then " + - "`{\"key1=[a=b,c=d]&key2=value2\"}` parses to `{\"key1\": \"[a=b,c=d]\", \"key2\": \"value2\"}`.") + "they are not parsed as key-value pairs. The following characters are used a group delimiters: " + + "{...}, [...], <...>, (...), \"...\", '...', http://... (space), and https:// (space). " + + "Default is false. For example, if value_grouping is true, then " + + "{\"key1=[a=b,c=d]&key2=value2\"} parses to {\"key1\": \"[a=b,c=d]\", \"key2\": \"value2\"}.") private boolean valueGrouping = DEFAULT_VALUE_GROUPING; @JsonProperty("recursive") @JsonPropertyDescription("Specifies whether to recursively obtain additional key-value pairs from values. " + - "The extra key-value pairs will be stored as sub-keys of the root key. Default is `false`. " + + "The extra key-value pairs will be stored as nested objects within the destination object. Default is false. " + "The levels of recursive parsing must be defined by different brackets for each level: " + - "`[]`, `()`, and `<>`, in this order. Any other configurations specified will only be applied " + - "to the outmost keys.\n" + - "When `recursive` is `true`:\n" + - "`remove_brackets` cannot also be `true`;\n" + - "`skip_duplicate_values` will always be `true`;\n" + - "`whitespace` will always be `\"strict\"`.") + "[], (), and <>, in this order. Any other configurations specified will only be applied " + + "to the outermost keys.\n" + + "When recursive is true:\n" + + "remove_brackets cannot also be true;\n" + + "skip_duplicate_values will always be true;\n" + + "whitespace will always be \"strict\".") @NotNull private boolean recursive = DEFAULT_RECURSIVE; - - @JsonProperty("tags_on_failure") - @JsonPropertyDescription("When a `kv` operation causes a runtime exception within the processor, " + - "the operation is safely stopped without crashing the processor, and the event is tagged " + - "with the provided tags.") - private List tagsOnFailure; - + @JsonProperty("overwrite_if_destination_exists") @JsonPropertyDescription("Specifies whether to overwrite existing fields if there are key conflicts " + - "when writing parsed fields to the event. Default is `true`.") + "when writing parsed fields to the event. Default is true.") private boolean overwriteIfDestinationExists = true; @JsonProperty("drop_keys_with_no_value") - @JsonPropertyDescription("Specifies whether keys should be dropped if they have a null value. Default is `false`. " + - "If `drop_keys_with_no_value` is set to `true`, " + - "then `{\"key1=value1&key2\"}` parses to `{\"key1\": \"value1\"}`.") + @JsonPropertyDescription("Specifies whether keys should be dropped if they have a null value. Default is false. " + + "For example, if drop_keys_with_no_value is set to true, " + + "then {\"key1=value1&key2\"} parses to {\"key1\": \"value1\"}.") private boolean dropKeysWithNoValue = false; - @JsonProperty("key_value_when") - @JsonPropertyDescription("Allows you to specify a Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"`, that will be evaluated to determine whether " + - "the processor should be applied to the event.") - private String keyValueWhen; - @JsonProperty("strict_grouping") @JsonPropertyDescription("When enabled, groups with unmatched end characters yield errors. " + "The event is ignored after the errors are logged. " + - "Specifies whether strict grouping should be enabled when the `value_grouping` " + - "or `string_literal_character` options are used. Default is `false`.") + "Specifies whether strict grouping should be enabled when the value_grouping" + + "or string_literal_character options are used. Default is false.") private boolean strictGrouping = false; @JsonProperty("string_literal_character") - @JsonPropertyDescription("When this option is used, any text contained within the specified quotation " + - "mark character will be ignored and excluded from key-value parsing. " + - "Can be set to either a single quotation mark (`'`) or a double quotation mark (`\"`). " + - "Default is `null`.") + @JsonPropertyDescription("When this option is used, any text contained within the specified literal " + + "character will be ignored and excluded from key-value parsing. " + + "Can be set to either a single quotation mark (') or a double quotation mark (\"). " + + "Default is null.") @Size(min = 0, max = 1, message = "string_literal_character may only have character") private String stringLiteralCharacter = null; + @JsonProperty("tags_on_failure") + @JsonPropertyDescription("The tags to add to the event metadata if the key_value processor fails to parse the source string.") + private List tagsOnFailure; + + @JsonProperty("key_value_when") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the key_value processor will only run on events when the expression evaluates to true. ") + private String keyValueWhen; + @AssertTrue(message = "Invalid Configuration. value_grouping option and field_delimiter_regex are mutually exclusive") boolean isValidValueGroupingAndFieldDelimiterRegex() { return (!valueGrouping || fieldDelimiterRegex == null); @@ -284,11 +288,11 @@ public String getDeleteValueRegex() { return deleteValueRegex; } - public String getTransformKey() { + public TransformOption getTransformKey() { return transformKey; } - public String getWhitespace() { + public WhitespaceOption getWhitespace() { return whitespace; } diff --git a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOption.java b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOption.java new file mode 100644 index 0000000000..bf12807666 --- /dev/null +++ b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOption.java @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.keyvalue; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +public enum TransformOption { + NONE("", key -> key), + LOWERCASE("lowercase", String::toLowerCase), + UPPERCASE("uppercase", String::toUpperCase), + CAPITALIZE("capitalize", key -> key.substring(0, 1).toUpperCase() + key.substring(1)); + + private static final Map NAMES_MAP = Arrays.stream(TransformOption.values()) + .collect(Collectors.toMap( + value -> value.transformName, + value -> value + )); + + private final String transformName; + private final Function transformFunction; + + TransformOption(final String transformName, final Function transformFunction) { + this.transformName = transformName; + this.transformFunction = transformFunction; + } + + @JsonValue + public String getTransformName() { + return transformName; + } + + Function getTransformFunction() { + return transformFunction; + } + + @JsonCreator + public static TransformOption fromTransformName(final String transformName) { + return NAMES_MAP.get(transformName); + } +} diff --git a/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOption.java b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOption.java new file mode 100644 index 0000000000..171a1bf05c --- /dev/null +++ b/data-prepper-plugins/key-value-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOption.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.keyvalue; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +import java.util.Arrays; +import java.util.Map; +import java.util.stream.Collectors; + +public enum WhitespaceOption { + LENIENT("lenient"), + STRICT("strict"); + + private static final Map NAMES_MAP = Arrays.stream(WhitespaceOption.values()) + .collect(Collectors.toMap( + value -> value.optionName, + value -> value + )); + + private final String optionName; + + WhitespaceOption(final String optionName) { + this.optionName = optionName; + } + + @JsonValue + public String getWhitespaceName() { + return optionName; + } + + @JsonCreator + public static WhitespaceOption fromWhitespaceName(final String optionName) { + return NAMES_MAP.get(optionName); + } +} diff --git a/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorTests.java b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorTests.java index 505e236ea9..c8fa597697 100644 --- a/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorTests.java +++ b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/KeyValueProcessorTests.java @@ -778,7 +778,7 @@ void testDeleteValueAndKeyRegexKvProcessor() { @Test void testLowercaseTransformKvProcessor() { - when(mockConfig.getTransformKey()).thenReturn("lowercase"); + when(mockConfig.getTransformKey()).thenReturn(TransformOption.LOWERCASE); final Record record = getMessage("Key1=value1"); final List> editedRecords = (List>) keyValueProcessor.doExecute(Collections.singletonList(record)); @@ -790,7 +790,7 @@ void testLowercaseTransformKvProcessor() { @Test void testUppercaseTransformKvProcessor() { - when(mockConfig.getTransformKey()).thenReturn("uppercase"); + when(mockConfig.getTransformKey()).thenReturn(TransformOption.UPPERCASE); final Record record = getMessage("key1=value1"); final List> editedRecords = (List>) keyValueProcessor.doExecute(Collections.singletonList(record)); @@ -802,7 +802,7 @@ void testUppercaseTransformKvProcessor() { @Test void testCapitalizeTransformKvProcessor() { - when(mockConfig.getTransformKey()).thenReturn("capitalize"); + when(mockConfig.getTransformKey()).thenReturn(TransformOption.CAPITALIZE); final Record record = getMessage("key1=value1"); final List> editedRecords = (List>) keyValueProcessor.doExecute(Collections.singletonList(record)); @@ -814,7 +814,7 @@ void testCapitalizeTransformKvProcessor() { @Test void testStrictWhitespaceKvProcessor() { - when(mockConfig.getWhitespace()).thenReturn("strict"); + when(mockConfig.getWhitespace()).thenReturn(WhitespaceOption.STRICT); final Record record = getMessage("key1 = value1"); final List> editedRecords = (List>) keyValueProcessor.doExecute(Collections.singletonList(record)); @@ -934,7 +934,7 @@ void testMultiRecursiveKvProcessor() { @Test void testTransformKeyRecursiveKvProcessor() { when(mockConfig.getRecursive()).thenReturn(true); - when(mockConfig.getTransformKey()).thenReturn("capitalize"); + when(mockConfig.getTransformKey()).thenReturn(TransformOption.CAPITALIZE); final Record record = getMessage("item1=[item1-subitem1=item1-subitem1-value&item1-subitem2=item1-subitem2-value]&item2=item2-value"); final List> editedRecords = (List>) keyValueProcessor.doExecute(Collections.singletonList(record)); diff --git a/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOptionTest.java b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOptionTest.java new file mode 100644 index 0000000000..53d3b13164 --- /dev/null +++ b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/TransformOptionTest.java @@ -0,0 +1,85 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.keyvalue; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.EnumSource; + +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +class TransformOptionTest { + @ParameterizedTest + @EnumSource(TransformOption.class) + void fromTransformName_returns_expected_value(final TransformOption transformOption) { + assertThat(TransformOption.fromTransformName(transformOption.getTransformName()), equalTo(transformOption)); + } + + @ParameterizedTest + @EnumSource(TransformOption.class) + void getTransformName_returns_non_empty_null_for_all_types(final TransformOption transformOption) { + assertThat(transformOption.getTransformName(), notNullValue()); + } + + @ParameterizedTest + @EnumSource(value = TransformOption.class, mode = EnumSource.Mode.EXCLUDE, names = {"NONE"}) + void getTransformName_returns_non_empty_string_for_all_types_except_none(final TransformOption transformOption) { + assertThat(transformOption.getTransformName(), notNullValue()); + assertThat(transformOption.getTransformName(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(TransformOptionToKnownName.class) + void getTransformName_returns_expected_name(final TransformOption transformOption, final String expectedString) { + assertThat(transformOption.getTransformName(), equalTo(expectedString)); + } + + static class TransformOptionToKnownName implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(TransformOption.NONE, ""), + arguments(TransformOption.UPPERCASE, "uppercase"), + arguments(TransformOption.LOWERCASE, "lowercase"), + arguments(TransformOption.CAPITALIZE, "capitalize") + ); + } + } + + @ParameterizedTest + @ArgumentsSource(TransformationArguments.class) + void getTransformFunction_performs_expected_transformation(final TransformOption transformOption, final String inputString, final String outputString) { + assertThat(transformOption.getTransformFunction().apply(inputString), equalTo(outputString)); + } + + static class TransformationArguments implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(TransformOption.NONE, "hello", "hello"), + arguments(TransformOption.NONE, "Hello", "Hello"), + arguments(TransformOption.NONE, "hello world", "hello world"), + arguments(TransformOption.UPPERCASE, "hello", "HELLO"), + arguments(TransformOption.UPPERCASE, "Hello", "HELLO"), + arguments(TransformOption.LOWERCASE, "hello", "hello"), + arguments(TransformOption.LOWERCASE, "Hello", "hello"), + arguments(TransformOption.LOWERCASE, "HELLO", "hello"), + arguments(TransformOption.CAPITALIZE, "hello", "Hello"), + arguments(TransformOption.CAPITALIZE, "hello world", "Hello world") + ); + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOptionTest.java b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOptionTest.java new file mode 100644 index 0000000000..665826f1ce --- /dev/null +++ b/data-prepper-plugins/key-value-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/keyvalue/WhitespaceOptionTest.java @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.keyvalue; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.EnumSource; + +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +class WhitespaceOptionTest { + @ParameterizedTest + @EnumSource(WhitespaceOption.class) + void fromWhitespaceName_returns_expected_value(final WhitespaceOption whitespaceOption) { + assertThat(WhitespaceOption.fromWhitespaceName(whitespaceOption.getWhitespaceName()), equalTo(whitespaceOption)); + } + + @ParameterizedTest + @EnumSource(WhitespaceOption.class) + void getWhitespaceName_returns_non_empty_string_for_all_types(final WhitespaceOption whitespaceOption) { + assertThat(whitespaceOption.getWhitespaceName(), notNullValue()); + assertThat(whitespaceOption.getWhitespaceName(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(WhitespaceOptionToKnownName.class) + void getWhitespaceName_returns_expected_name(final WhitespaceOption whitespaceOption, final String expectedString) { + assertThat(whitespaceOption.getWhitespaceName(), equalTo(expectedString)); + } + + static class WhitespaceOptionToKnownName implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(WhitespaceOption.LENIENT, "lenient"), + arguments(WhitespaceOption.STRICT, "strict") + ); + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/kinesis-source/build.gradle b/data-prepper-plugins/kinesis-source/build.gradle index 5214c9c8ab..87fb22f3a0 100644 --- a/data-prepper-plugins/kinesis-source/build.gradle +++ b/data-prepper-plugins/kinesis-source/build.gradle @@ -42,6 +42,7 @@ dependencies { testImplementation project(':data-prepper-test-common') testImplementation project(':data-prepper-test-event') testImplementation project(':data-prepper-core') + testImplementation project(':data-prepper-event') testImplementation project(':data-prepper-plugin-framework') testImplementation project(':data-prepper-pipeline-parser') testImplementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml' diff --git a/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverter.java b/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverter.java index c21dec4c21..9cfcceb088 100644 --- a/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverter.java +++ b/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverter.java @@ -42,6 +42,8 @@ public List> convert(List kinesisClientRecord eventMetadata.setAttribute(MetadataKeyAttributes.KINESIS_STREAM_NAME_METADATA_ATTRIBUTE, streamName.toLowerCase()); eventMetadata.setAttribute(MetadataKeyAttributes.KINESIS_PARTITION_KEY_METADATA_ATTRIBUTE, kinesisClientRecord.partitionKey()); + eventMetadata.setAttribute(MetadataKeyAttributes.KINESIS_SEQUENCE_NUMBER_METADATA_ATTRIBUTE, kinesisClientRecord.sequenceNumber()); + eventMetadata.setAttribute(MetadataKeyAttributes.KINESIS_SUB_SEQUENCE_NUMBER_METADATA_ATTRIBUTE, kinesisClientRecord.subSequenceNumber()); final Instant externalOriginationTime = kinesisClientRecord.approximateArrivalTimestamp(); event.getEventHandle().setExternalOriginationTime(externalOriginationTime); event.getMetadata().setExternalOriginationTime(externalOriginationTime); diff --git a/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/MetadataKeyAttributes.java b/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/MetadataKeyAttributes.java index 6ef99ddcd3..4335193cfb 100644 --- a/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/MetadataKeyAttributes.java +++ b/data-prepper-plugins/kinesis-source/src/main/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/MetadataKeyAttributes.java @@ -13,4 +13,6 @@ public class MetadataKeyAttributes { public static final String KINESIS_STREAM_NAME_METADATA_ATTRIBUTE = "stream_name"; public static final String KINESIS_PARTITION_KEY_METADATA_ATTRIBUTE = "partition_key"; + public static final String KINESIS_SEQUENCE_NUMBER_METADATA_ATTRIBUTE = "sequence_number"; + public static final String KINESIS_SUB_SEQUENCE_NUMBER_METADATA_ATTRIBUTE = "sub_sequence_number"; } diff --git a/data-prepper-plugins/kinesis-source/src/test/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverterTest.java b/data-prepper-plugins/kinesis-source/src/test/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverterTest.java index 2598745b2f..caff23dd92 100644 --- a/data-prepper-plugins/kinesis-source/src/test/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverterTest.java +++ b/data-prepper-plugins/kinesis-source/src/test/java/org/opensearch/dataprepper/plugins/kinesis/source/converter/KinesisRecordConverterTest.java @@ -28,6 +28,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.UUID; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -44,7 +45,7 @@ public class KinesisRecordConverterTest { private static final String streamId = "stream-1"; @Test - void setup() throws IOException { + void testRecordConverter() throws IOException { InputCodec codec = mock(InputCodec.class); KinesisRecordConverter kinesisRecordConverter = new KinesisRecordConverter(codec); doNothing().when(codec).parse(any(InputStream.class), any(Consumer.class)); @@ -77,12 +78,25 @@ public void testRecordConverterWithNdJsonInputCodec() throws IOException { KinesisRecordConverter kinesisRecordConverter = new KinesisRecordConverter( new NdjsonInputCodec(new NdjsonInputConfig(), TestEventFactory.getTestEventFactory())); + final String partitionKey = UUID.randomUUID().toString(); + final String sequenceNumber = UUID.randomUUID().toString(); + final Random random = new Random(); + final long subsequenceNumber = random.nextLong(); + KinesisClientRecord kinesisClientRecord = KinesisClientRecord.builder() .data(ByteBuffer.wrap(writer.toString().getBytes())) + .sequenceNumber(sequenceNumber) + .subSequenceNumber(subsequenceNumber) + .partitionKey(partitionKey) .build(); List> events = kinesisRecordConverter.convert(List.of(kinesisClientRecord), streamId); assertEquals(events.size(), numRecords); + events.forEach(eventRecord -> { + assertEquals(eventRecord.getData().getMetadata().getAttribute(MetadataKeyAttributes.KINESIS_PARTITION_KEY_METADATA_ATTRIBUTE), partitionKey); + assertEquals(eventRecord.getData().getMetadata().getAttribute(MetadataKeyAttributes.KINESIS_SEQUENCE_NUMBER_METADATA_ATTRIBUTE), sequenceNumber); + assertEquals(eventRecord.getData().getMetadata().getAttribute(MetadataKeyAttributes.KINESIS_SUB_SEQUENCE_NUMBER_METADATA_ATTRIBUTE), subsequenceNumber); + }); } private static Map generateJson() { diff --git a/data-prepper-plugins/mongodb/src/main/resources/org/opensearch/dataprepper/transforms/templates/documentdb-template.yaml b/data-prepper-plugins/mongodb/src/main/resources/org/opensearch/dataprepper/transforms/templates/documentdb-template.yaml index 38bb70d8ca..80e179ff93 100644 --- a/data-prepper-plugins/mongodb/src/main/resources/org/opensearch/dataprepper/transforms/templates/documentdb-template.yaml +++ b/data-prepper-plugins/mongodb/src/main/resources/org/opensearch/dataprepper/transforms/templates/documentdb-template.yaml @@ -66,6 +66,7 @@ delete_s3_objects_on_read: true disable_s3_metadata_in_event: true scan: + acknowledgment_timeout: "PT10M" folder_partitions: depth: "<>.source.documentdb.s3_prefix>>" max_objects_per_ownership: 50 @@ -75,7 +76,7 @@ filter: include_prefix: ["<>.source.documentdb.s3_prefix>>"] scheduling: - interval: "60s" + interval: "20s" processor: "<<$.<>.processor>>" sink: "<<$.<>.sink>>" routes: "<<$.<>.routes>>" # In placeholder, routes or route (defined as alias) will be transformed to route in json as route will be primarily picked in pipelineModel. \ No newline at end of file diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/AddEntryProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/AddEntryProcessorConfig.java index 0c0f80be47..c3034bd50c 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/AddEntryProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/AddEntryProcessorConfig.java @@ -18,18 +18,18 @@ import java.util.stream.Stream; @JsonPropertyOrder -@JsonClassDescription("The `add_entries` processor adds entries to an event.") +@JsonClassDescription("The add_entries processor adds entries to an event.") public class AddEntryProcessorConfig { + @JsonPropertyOrder public static class Entry { - - @JsonPropertyDescription("The key of the new entry to be added. Some examples of keys include `my_key`, " + - "`myKey`, and `object/sub_Key`. The key can also be a format expression, for example, `${/key1}` to " + - "use the value of field `key1` as the key.") + @JsonPropertyDescription("The key of the new entry to be added. Some examples of keys include my_key, " + + "myKey, and object/sub_Key. The key can also be a format expression, for example, ${/key1} to " + + "use the value of field key1 as the key.") private String key; @JsonProperty("metadata_key") @JsonPropertyDescription("The key for the new metadata attribute. The argument must be a literal string key " + - "and not a JSON Pointer. Either one string key or `metadata_key` is required.") + "and not a JSON Pointer. Either one of key or metadata_key is required.") private String metadataKey; @JsonPropertyDescription("The value of the new entry to be added, which can be used with any of the " + @@ -37,33 +37,33 @@ public static class Entry { private Object value; @JsonPropertyDescription("A format string to use as the value of the new entry, for example, " + - "`${key1}-${key2}`, where `key1` and `key2` are existing keys in the event. Required if neither " + - "`value` nor `value_expression` is specified.") + "${key1}-${key2}, where key1 and key2 are existing keys in the event. Required if neither" + + "value nor value_expression is specified.") private String format; @JsonProperty("value_expression") - @JsonPropertyDescription("An expression string to use as the value of the new entry. For example, `/key` " + + @JsonPropertyDescription("An expression string to use as the value of the new entry. For example, /key " + "is an existing key in the event with a type of either a number, a string, or a Boolean. " + "Expressions can also contain functions returning number/string/integer. For example, " + - "`length(/key)` will return the length of the key in the event when the key is a string. For more " + - "information about keys, see [Expression syntax](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/).") + "length(/key) will return the length of the key in the event when the key is a string. For more " + + "information about keys, see Expression syntax.") private String valueExpression; - @JsonProperty("add_when") - @JsonPropertyDescription("A [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be run on the event.") - private String addWhen; - @JsonProperty("overwrite_if_key_exists") - @JsonPropertyDescription("When set to `true`, the existing value is overwritten if `key` already exists " + - "in the event. The default value is `false`.") + @JsonPropertyDescription("When set to true, the existing value is overwritten if key already exists " + + "in the event. The default value is false.") private boolean overwriteIfKeyExists = false; @JsonProperty("append_if_key_exists") - @JsonPropertyDescription("When set to `true`, the existing value will be appended if a `key` already " + - "exists in the event. An array will be created if the existing value is not an array. Default is `false`.") + @JsonPropertyDescription("When set to true, the existing value will be appended if a key already " + + "exists in the event. An array will be created if the existing value is not an array. Default is false.") private boolean appendIfKeyExists = false; + @JsonProperty("add_when") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") + private String addWhen; + public String getKey() { return key; } diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ConvertEntryTypeProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ConvertEntryTypeProcessorConfig.java index b2b09ccfca..784a101fcd 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ConvertEntryTypeProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ConvertEntryTypeProcessorConfig.java @@ -15,40 +15,40 @@ import java.util.Optional; @JsonPropertyOrder -@JsonClassDescription("The `convert_entry_type` processor converts a value type associated with the specified key in " + - "a event to the specified type. It is a casting processor that changes the types of some fields in events.") +@JsonClassDescription("The convert_entry_type processor converts a value associated with the specified key in " + + "a event to the specified type. It is a casting processor that changes the types of specified fields in events.") public class ConvertEntryTypeProcessorConfig implements ConverterArguments { @JsonProperty("key") @JsonPropertyDescription("Key whose value needs to be converted to a different type.") private String key; @JsonProperty("keys") - @JsonPropertyDescription("List of keys whose value needs to be converted to a different type.") + @JsonPropertyDescription("List of keys whose values needs to be converted to a different type.") private List keys; @JsonProperty("type") - @JsonPropertyDescription("Target type for the key-value pair. Possible values are integer, long, double, big_decimal, string, and boolean. Default value is integer.") + @JsonPropertyDescription("Target type for the values. Default value is integer.") private TargetType type = TargetType.INTEGER; + @JsonProperty("null_values") + @JsonPropertyDescription("String representation of what constitutes a null value. If the field value equals one of these strings, then the value is considered null and is converted to null.") + private List nullValues; + /** * Optional scale value used only in the case of BigDecimal converter */ @JsonProperty("scale") - @JsonPropertyDescription("Modifies the scale of the big_decimal when converting to a big_decimal. The default value is 0.") + @JsonPropertyDescription("Modifies the scale of the big_decimal when converting to a big_decimal. The default value is 0.") private int scale = 0; - @JsonProperty("convert_when") - @JsonPropertyDescription("Specifies a condition using a [Data Prepper expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/) for performing the convert_entry_type operation. If specified, the convert_entry_type operation runs only when the expression evaluates to true.") - private String convertWhen; - - @JsonProperty("null_values") - @JsonPropertyDescription("String representation of what constitutes a null value. If the field value equals one of these strings, then the value is considered null and is converted to null.") - private List nullValues; - @JsonProperty("tags_on_failure") @JsonPropertyDescription("A list of tags to be added to the event metadata when the event fails to convert.") private List tagsOnFailure; + @JsonProperty("convert_when") + @JsonPropertyDescription("Specifies a condition using a conditional expression for performing the convert_entry_type operation. If specified, the convert_entry_type operation runs only when the expression evaluates to true. Example: /mykey != \"---\"") + private String convertWhen; + public String getKey() { return key; } diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/CopyValueProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/CopyValueProcessorConfig.java index 1b3afeed0c..651faeadfd 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/CopyValueProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/CopyValueProcessorConfig.java @@ -17,9 +17,9 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `copy_values` processor copies values within an event and is a [mutate event]" + - "(https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/processors/mutate-event/) processor.") +@JsonClassDescription("The copy_values processor copies values within an event to other fields within the event.") public class CopyValueProcessorConfig { + @JsonPropertyOrder public static class Entry { @NotEmpty @NotNull @@ -33,16 +33,16 @@ public static class Entry { @JsonPropertyDescription("The key of the new entry to be added.") private String toKey; - @JsonProperty("copy_when") - @JsonPropertyDescription("A [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be run on the event.") - private String copyWhen; - @JsonProperty("overwrite_if_to_key_exists") - @JsonPropertyDescription("When set to `true`, the existing value is overwritten if `key` already exists in " + - "the event. The default value is `false`.") + @JsonPropertyDescription("When set to true, the existing value is overwritten if key already exists in " + + "the event. The default value is false.") private boolean overwriteIfToKeyExists = false; + @JsonProperty("copy_when") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") + private String copyWhen; + public String getFromKey() { return fromKey; } @@ -84,7 +84,7 @@ public Entry() { private String toList; @JsonProperty("overwrite_if_to_list_exists") - @JsonPropertyDescription("When set to `true`, the existing value is overwritten if `key` already exists in the event. The default value is `false`.") + @JsonPropertyDescription("When set to true, the existing value is overwritten if key already exists in the event. The default value is false.") private boolean overwriteIfToListExists = false; @AssertTrue(message = "Both from_list and to_list should be specified when copying entries between lists.") diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/DeleteEntryProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/DeleteEntryProcessorConfig.java index 2cfc3c5aba..6ebc7ae106 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/DeleteEntryProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/DeleteEntryProcessorConfig.java @@ -18,20 +18,20 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `delete_entries` processor deletes entries, such as key-value pairs, from an event. " + - "You can define the keys you want to delete in the `with-keys` field following `delete_entries` in the YAML " + - "configuration file. Those keys and their values are deleted.") +@JsonClassDescription("The delete_entries processor deletes fields from events. " + + "You can define the keys you want to delete in the with_keys configuration." + + "Those keys and their values are deleted from events.") public class DeleteEntryProcessorConfig { @NotEmpty @NotNull @JsonProperty("with_keys") @EventKeyConfiguration(EventKeyFactory.EventAction.DELETE) - @JsonPropertyDescription("An array of keys for the entries to be deleted.") + @JsonPropertyDescription("A list of keys to be deleted.") private List<@NotNull @NotEmpty EventKey> withKeys; @JsonProperty("delete_when") - @JsonPropertyDescription("Specifies under what condition the `delete_entries` processor should perform deletion. " + - "Default is no condition.") + @JsonPropertyDescription("Specifies under what condition the delete_entries processor should perform deletion. " + + "By default, keys are always deleted. Example: /mykey == \"---\"") private String deleteWhen; public List getWithKeys() { diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig.java index 7185576084..4bcb2067a5 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig.java @@ -10,6 +10,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.fasterxml.jackson.annotation.JsonValue; import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; @@ -19,10 +20,10 @@ import java.util.stream.Collectors; @JsonPropertyOrder -@JsonClassDescription("The `list_to_map` processor converts a list of objects from an event, " + - "where each object contains a `key` field, into a map of target keys.") +@JsonClassDescription("The list_to_map processor converts a list of objects from an event, " + + "where each object contains a key field, into a map of target keys.") public class ListToMapProcessorConfig { - enum FlattenedElement { + public enum FlattenedElement { FIRST("first"), LAST("last"); @@ -42,12 +43,17 @@ enum FlattenedElement { static FlattenedElement fromOptionValue(final String option) { return ACTIONS_MAP.get(option); } + + @JsonValue + public String getOptionValue() { + return name; + } } @NotEmpty @NotNull @JsonProperty("source") - @JsonPropertyDescription("The list of objects with `key` fields to be converted into keys for the generated map.") + @JsonPropertyDescription("The list of objects with key fields to be converted into keys for the generated map.") private String source; @JsonProperty("target") @@ -57,47 +63,47 @@ static FlattenedElement fromOptionValue(final String option) { @JsonProperty("key") @JsonPropertyDescription("The key of the fields to be extracted as keys in the generated mappings. Must be " + - "specified if `use_source_key` is `false`.") + "specified if use_source_key is false.") private String key; @JsonProperty("value_key") - @JsonPropertyDescription("When specified, values given a `value_key` in objects contained in the source list " + + @JsonPropertyDescription("When specified, values given a value_key in objects contained in the source list " + "will be extracted and converted into the value specified by this option based on the generated map. " + "When not specified, objects contained in the source list retain their original value when mapped.") private String valueKey = null; @JsonProperty("use_source_key") - @JsonPropertyDescription("When `true`, keys in the generated map will use original keys from the source. " + - "Default is `false`.") + @JsonPropertyDescription("When true, keys in the generated map will use original keys from the source. " + + "Default is false.") private boolean useSourceKey = false; @JsonProperty("extract_value") - @JsonPropertyDescription("When `true`, object values from the source list will be extracted and added to " + - "the generated map. When `false`, object values from the source list are added to the generated map " + - "as they appear in the source list. Default is `false`") + @JsonPropertyDescription("When true, object values from the source list will be extracted and added to " + + "the generated map. When false, object values from the source list are added to the generated map " + + "as they appear in the source list. Default is false") private boolean extractValue = false; @NotNull @JsonProperty("flatten") - @JsonPropertyDescription("When `true`, values in the generated map output flatten into single items based on " + - "the `flattened_element`. Otherwise, objects mapped to values from the generated map appear as lists.") + @JsonPropertyDescription("When true, values in the generated map output flatten into single items based on " + + "the flattened_element. Otherwise, objects mapped to values from the generated map appear as lists.") private boolean flatten = false; @NotNull @JsonProperty("flattened_element") - @JsonPropertyDescription("The element to keep, either `first` or `last`, when `flatten` is set to `true`.") + @JsonPropertyDescription("The element to keep, either first or last, when flatten is set to true.") private FlattenedElement flattenedElement = FlattenedElement.FIRST; - @JsonProperty("list_to_map_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be " + - "run on the event. Default is `null`. All events will be processed unless otherwise stated.") - private String listToMapWhen; - @JsonProperty("tags_on_failure") @JsonPropertyDescription("A list of tags to add to the event metadata when the event fails to process.") private List tagsOnFailure; + @JsonProperty("list_to_map_when") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be " + + "run on the event. By default, all events will be processed unless otherwise stated.") + private String listToMapWhen; + public String getSource() { return source; } diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/MapToListProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/MapToListProcessorConfig.java index d914d987fc..70f1ae4f3c 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/MapToListProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/MapToListProcessorConfig.java @@ -16,7 +16,7 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `map_to_list` processor converts a map of key-value pairs to a list of objects. " + +@JsonClassDescription("The map_to_list processor converts a map of key-value pairs to a list of objects. " + "Each object contains the key and value in separate fields.") public class MapToListProcessorConfig { private static final String DEFAULT_KEY_NAME = "key"; @@ -27,7 +27,7 @@ public class MapToListProcessorConfig { @NotNull @JsonProperty("source") @JsonPropertyDescription("The source map used to perform the mapping operation. When set to an empty " + - "string (`\"\"`), it will use the root of the event as the `source`.") + "string (\"\"), it will use the root of the event as the source.") private String source; @NotEmpty @@ -37,38 +37,38 @@ public class MapToListProcessorConfig { private String target; @JsonProperty("key_name") - @JsonPropertyDescription("The name of the field in which to store the original key. Default is `key`.") + @JsonPropertyDescription("The name of the field in which to store the original key. Default is key.") private String keyName = DEFAULT_KEY_NAME; @JsonProperty("value_name") - @JsonPropertyDescription("The name of the field in which to store the original value. Default is `value`.") + @JsonPropertyDescription("The name of the field in which to store the original value. Default is value.") private String valueName = DEFAULT_VALUE_NAME; - @JsonProperty("map_to_list_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will " + - "be run on the event. Default is `null`. All events will be processed unless otherwise stated.") - private String mapToListWhen; - - @JsonProperty("exclude_keys") - @JsonPropertyDescription("The keys in the source map that will be excluded from processing. Default is an " + - "empty list (`[]`).") - private List excludeKeys = DEFAULT_EXCLUDE_KEYS; - @JsonProperty("remove_processed_fields") - @JsonPropertyDescription("When `true`, the processor will remove the processed fields from the source map. " + - "Default is `false`.") + @JsonPropertyDescription("When true, the processor will remove the processed fields from the source map. " + + "Default is false.") private boolean removeProcessedFields = DEFAULT_REMOVE_PROCESSED_FIELDS; @JsonProperty("convert_field_to_list") - @JsonPropertyDescription("If `true`, the processor will convert the fields from the source map into lists and " + - "place them in fields in the target list. Default is `false`.") + @JsonPropertyDescription("If true, the processor will convert the fields from the source map into lists and " + + "place them in fields in the target list. Default is false.") private boolean convertFieldToList = false; + @JsonProperty("exclude_keys") + @JsonPropertyDescription("The keys in the source map that will be excluded from processing. Default is an " + + "empty list ([]).") + private List excludeKeys = DEFAULT_EXCLUDE_KEYS; + @JsonProperty("tags_on_failure") @JsonPropertyDescription("A list of tags to add to the event metadata when the event fails to process.") private List tagsOnFailure; + @JsonProperty("map_to_list_when") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will " + + "be run on the event. By default, all events will be processed unless otherwise stated.") + private String mapToListWhen; + public String getSource() { return source; } diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/RenameKeyProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/RenameKeyProcessorConfig.java index 99cfad5a23..ee967f32cc 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/RenameKeyProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/RenameKeyProcessorConfig.java @@ -19,8 +19,9 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `rename_keys` processor renames keys in an event.") +@JsonClassDescription("The rename_keys processor renames keys in an event.") public class RenameKeyProcessorConfig { + @JsonPropertyOrder public static class Entry { @NotEmpty @NotNull @@ -41,9 +42,9 @@ public static class Entry { private boolean overwriteIfToKeyExists = false; @JsonProperty("rename_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be " + - "run on the event. Default is `null`. All events will be processed unless otherwise stated.") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be " + + "run on the event. By default, all events will be processed unless otherwise stated.") private String renameWhen; public EventKey getFromKey() { diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/SelectEntriesProcessorConfig.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/SelectEntriesProcessorConfig.java index b0e6a98f26..7fec6ae032 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/SelectEntriesProcessorConfig.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/SelectEntriesProcessorConfig.java @@ -15,7 +15,7 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `select_entries` processor selects entries from a Data Prepper event.") +@JsonClassDescription("The select_entries processor selects entries from an event.") public class SelectEntriesProcessorConfig { @NotEmpty @NotNull @@ -24,9 +24,9 @@ public class SelectEntriesProcessorConfig { private List includeKeys; @JsonProperty("select_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be " + - "run on the event. Default is `null`. All events will be processed unless otherwise stated.") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be " + + "run on the event. Default is null. All events will be processed unless otherwise stated.") private String selectWhen; public List getIncludeKeys() { diff --git a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetType.java b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetType.java index acf67e8702..32998e89ff 100644 --- a/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetType.java +++ b/data-prepper-plugins/mutate-event-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetType.java @@ -6,6 +6,7 @@ package org.opensearch.dataprepper.plugins.processor.mutateevent; import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; import org.opensearch.dataprepper.model.event.DataType; import org.opensearch.dataprepper.typeconverter.BigDecimalConverter; import org.opensearch.dataprepper.typeconverter.BooleanConverter; @@ -27,9 +28,9 @@ public enum TargetType { LONG(DataType.LONG, new LongConverter()), BIG_DECIMAL(DataType.BIG_DECIMAL, new BigDecimalConverter()); - private static final Map OPTIONS_MAP = Arrays.stream(TargetType.values()) + private static final Map OPTIONS_MAP = Arrays.stream(TargetType.values()) .collect(Collectors.toMap( - value -> value.dataType.getTypeName(), + value -> value.dataType, value -> value )); @@ -51,6 +52,11 @@ DataType getDataType() { @JsonCreator static TargetType fromOptionValue(final String option) { - return OPTIONS_MAP.get(option.toLowerCase()); + return OPTIONS_MAP.get(DataType.fromTypeName(option)); + } + + @JsonValue + public String getOptionValue() { + return dataType.getTypeName(); } } diff --git a/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig_FlattenedElementTest.java b/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig_FlattenedElementTest.java new file mode 100644 index 0000000000..c5413c49fd --- /dev/null +++ b/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/ListToMapProcessorConfig_FlattenedElementTest.java @@ -0,0 +1,54 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.mutateevent; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.EnumSource; + +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyString; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +class ListToMapProcessorConfig_FlattenedElementTest { + + @ParameterizedTest + @EnumSource(ListToMapProcessorConfig.FlattenedElement.class) + void fromOptionValue_returns_expected_value(final ListToMapProcessorConfig.FlattenedElement flattenedElement) { + assertThat(ListToMapProcessorConfig.FlattenedElement.fromOptionValue(flattenedElement.getOptionValue()), equalTo(flattenedElement)); + } + + @ParameterizedTest + @EnumSource(ListToMapProcessorConfig.FlattenedElement.class) + void getOptionValue_returns_non_empty_string_for_all_types(final ListToMapProcessorConfig.FlattenedElement flattenedElement) { + assertThat(flattenedElement.getOptionValue(), notNullValue()); + assertThat(flattenedElement.getOptionValue(), not(emptyString())); + } + + @ParameterizedTest + @ArgumentsSource(FlattenedElementToKnownName.class) + void getOptionValue_returns_expected_name(final ListToMapProcessorConfig.FlattenedElement flattenedElement, final String expectedString) { + assertThat(flattenedElement.getOptionValue(), equalTo(expectedString)); + } + + static class FlattenedElementToKnownName implements ArgumentsProvider { + @Override + public Stream provideArguments(final ExtensionContext extensionContext) { + return Stream.of( + arguments(ListToMapProcessorConfig.FlattenedElement.FIRST, "first"), + arguments(ListToMapProcessorConfig.FlattenedElement.LAST, "last") + ); + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetTypeTest.java b/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetTypeTest.java index 0b653fc766..b7d497fe66 100644 --- a/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetTypeTest.java +++ b/data-prepper-plugins/mutate-event-processors/src/test/java/org/opensearch/dataprepper/plugins/processor/mutateevent/TargetTypeTest.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.plugins.processor.mutateevent; +import com.fasterxml.jackson.core.JsonProcessingException; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -25,12 +26,20 @@ class TargetTypeTest { void fromTypeName_returns_expected_value(final TargetType targetType) { assertThat(TargetType.fromOptionValue(targetType.getDataType().getTypeName()), equalTo(targetType)); } + @ParameterizedTest @ArgumentsSource(DataTypeToTargetTypeArgumentsProvider.class) void fromTypeName_returns_expected_value_based_on_DataType(final String typeName, final TargetType targetType) { assertThat(TargetType.fromOptionValue(typeName), equalTo(targetType)); } + + @ParameterizedTest + @ArgumentsSource(DataTypeToTargetTypeArgumentsProvider.class) + void getOptionValue_returns_data_type_name(final String typeName, final TargetType targetType) throws JsonProcessingException { + assertThat(targetType.getOptionValue(), equalTo(typeName)); + } + static class DataTypeToTargetTypeArgumentsProvider implements ArgumentsProvider { @Override public Stream provideArguments(final ExtensionContext extensionContext) { diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessor.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessor.java index c2c2071e95..b1f4d54d8f 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessor.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessor.java @@ -18,7 +18,7 @@ * This processor takes in a key and changes its value to a lowercase string. If the value is not a string, * no action is performed. */ -@DataPrepperPlugin(name = "lowercase_string", pluginType = Processor.class, pluginConfigurationType = WithKeysConfig.class) +@DataPrepperPlugin(name = "lowercase_string", pluginType = Processor.class, pluginConfigurationType = LowercaseStringProcessorConfig.class) public class LowercaseStringProcessor extends AbstractStringProcessor { @DataPrepperPluginConstructor public LowercaseStringProcessor(final PluginMetrics pluginMetrics, final WithKeysConfig config) { diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessorConfig.java new file mode 100644 index 0000000000..d6c599420c --- /dev/null +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/LowercaseStringProcessorConfig.java @@ -0,0 +1,14 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.mutatestring; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonPropertyOrder +@JsonClassDescription("Makes all characters in strings lowercase.") +public class LowercaseStringProcessorConfig extends WithKeysConfig { +} diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/ReplaceStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/ReplaceStringProcessorConfig.java index 6d049910fb..28f7762278 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/ReplaceStringProcessorConfig.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/ReplaceStringProcessorConfig.java @@ -14,21 +14,22 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `replace_string` processor replaces all occurrence of substring in key’s value with a " + +@JsonClassDescription("The replace_string processor replaces all occurrence of substring in key’s value with a " + "replacement string.") public class ReplaceStringProcessorConfig implements StringProcessorConfig { + @JsonPropertyOrder public static class Entry { - @JsonPropertyDescription("The key to modify.") + @JsonPropertyDescription("The key of the field to modify.") private EventKey source; @JsonPropertyDescription("The substring to be replaced in the source.") private String from; - @JsonPropertyDescription("The string to be replaced for each match of `from`.") + @JsonPropertyDescription("The string to be replaced for each match of from.") private String to; @JsonProperty("replace_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be " + - "run on the event. Default is `null`. All events will be processed unless otherwise stated.") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be " + + "run on the event. By default, all events will be processed unless otherwise stated.") private String replaceWhen; public EventKey getSource() { @@ -55,7 +56,7 @@ public Entry(final EventKey source, final String from, final String to, final St public Entry() {} } - @JsonPropertyDescription("List of entries. Valid values are `source`, `from`, and `to`, and `substitute_when`.") + @JsonPropertyDescription("List of entries. Each entry defines a replacement.") private List entries; public List getEntries() { diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SplitStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SplitStringProcessorConfig.java index 7995c8e63e..79bb04fe4b 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SplitStringProcessorConfig.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SplitStringProcessorConfig.java @@ -19,29 +19,28 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `split_string` processor splits a field into an array using a delimiting character and is a " + - "[mutate string](https://github.com/opensearch-project/data-prepper/tree/main/data-prepper-plugins/mutate-string-processors#mutate-string-processors) processor.") +@JsonClassDescription("The split_string processor splits a field into an array using a delimiting character.") public class SplitStringProcessorConfig implements StringProcessorConfig { + @JsonPropertyOrder public static class Entry { - @NotEmpty @NotNull - @JsonPropertyDescription("The key to split.") + @JsonPropertyDescription("The key name of the field to split.") private EventKey source; - @JsonProperty("delimiter_regex") - @JsonPropertyDescription("The regex string responsible for the split. Cannot be defined at the same time as `delimiter`. " + - "At least `delimiter` or `delimiter_regex` must be defined.") - private String delimiterRegex; - @Size(min = 1, max = 1) @JsonPropertyDescription("The separator character responsible for the split. " + - "Cannot be defined at the same time as `delimiter_regex`. " + - "At least `delimiter` or `delimiter_regex` must be defined.") + "Cannot be defined at the same time as delimiter_regex. " + + "At least delimiter or delimiter_regex must be defined.") private String delimiter; + @JsonProperty("delimiter_regex") + @JsonPropertyDescription("The regex string responsible for the split. Cannot be defined at the same time as delimiter. " + + "At least delimiter or delimiter_regex must be defined.") + private String delimiterRegex; + @JsonProperty("split_when") - @JsonPropertyDescription("Specifies under what condition the `split_string` processor should perform splitting. " + + @JsonPropertyDescription("Specifies under what condition the split_string processor should perform splitting. " + "Default is no condition.") private String splitWhen; @@ -75,7 +74,7 @@ public List getIterativeConfig() { return entries; } - @JsonPropertyDescription("List of entries. Valid values are `source`, `delimiter`, and `delimiter_regex`.") + @JsonPropertyDescription("List of entries. Each entry defines a split.") @NotNull private List<@Valid Entry> entries; diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SubstituteStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SubstituteStringProcessorConfig.java index b9f09e21a8..07d22d72f8 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SubstituteStringProcessorConfig.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/SubstituteStringProcessorConfig.java @@ -15,24 +15,27 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `substitute_string` processor matches a key’s value against a regular expression and " + +@JsonClassDescription("The substitute_string processor matches a key’s value against a regular expression and " + "replaces all matches with a replacement string.") public class SubstituteStringProcessorConfig implements StringProcessorConfig { + @JsonPropertyOrder public static class Entry { - @JsonPropertyDescription("The key to modify.") + @JsonPropertyDescription("The key of the field to modify.") private EventKey source; - @JsonPropertyDescription("The Regex String to be replaced. Special regex characters such as `[` and `]` must " + - "be escaped using `\\\\` when using double quotes and `\\ ` when using single quotes. " + - "See [Java Patterns](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/regex/Pattern.html) " + + + @JsonPropertyDescription("The regular expression to match on for replacement. Special regex characters such as [ and ] must " + + "be escaped using \\\\ when using double quotes and \\ when using single quotes. " + + "See Java Patterns" + "for more information.") private String from; - @JsonPropertyDescription("The String to be substituted for each match of `from`.") + + @JsonPropertyDescription("The string to be substituted for each match of from.") private String to; @JsonProperty("substitute_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), " + - "such as `/some-key == \"test\"'`, that will be evaluated to determine whether the processor will be " + - "run on the event. Default is `null`. All events will be processed unless otherwise stated.") + @JsonPropertyDescription("A conditional expression, " + + "such as /some-key == \"test\"', that will be evaluated to determine whether the processor will be " + + "run on the event. By default, all events will be processed unless otherwise stated.") private String substituteWhen; public EventKey getSource() { @@ -59,7 +62,7 @@ public Entry(final EventKey source, final String from, final String to, final St public Entry() {} } - @JsonPropertyDescription("List of entries. Valid values are `source`, `from`, and `to`, and `substitute_when`.") + @JsonPropertyDescription("List of entries. Each entry defines a substitution.") @NotNull private List entries; diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessor.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessor.java index 2a1213f30f..5f8aa87bb1 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessor.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessor.java @@ -16,7 +16,7 @@ * This processor takes in a key and changes its value to a string with the leading and trailing spaces trimmed. * If the value is not a string, no action is performed. */ -@DataPrepperPlugin(name = "trim_string", pluginType = Processor.class, pluginConfigurationType = WithKeysConfig.class) +@DataPrepperPlugin(name = "trim_string", pluginType = Processor.class, pluginConfigurationType = TrimStringProcessorConfig.class) public class TrimStringProcessor extends AbstractStringProcessor { @DataPrepperPluginConstructor public TrimStringProcessor(final PluginMetrics pluginMetrics, final WithKeysConfig config) { diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessorConfig.java new file mode 100644 index 0000000000..9200b98ad9 --- /dev/null +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/TrimStringProcessorConfig.java @@ -0,0 +1,14 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.mutatestring; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonPropertyOrder +@JsonClassDescription("Trims strings by removing whitespace from the beginning and end of the strings.") +public class TrimStringProcessorConfig extends WithKeysConfig { +} diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/UppercaseStringProcessorConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/UppercaseStringProcessorConfig.java new file mode 100644 index 0000000000..975c90a444 --- /dev/null +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/UppercaseStringProcessorConfig.java @@ -0,0 +1,14 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.mutatestring; + +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonPropertyOrder +@JsonClassDescription("Makes all characters in strings uppercase.") +public class UppercaseStringProcessorConfig extends WithKeysConfig { +} diff --git a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/WithKeysConfig.java b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/WithKeysConfig.java index a8087954d1..c9a7e6b52c 100644 --- a/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/WithKeysConfig.java +++ b/data-prepper-plugins/mutate-string-processors/src/main/java/org/opensearch/dataprepper/plugins/processor/mutatestring/WithKeysConfig.java @@ -5,33 +5,24 @@ package org.opensearch.dataprepper.plugins.processor.mutatestring; -import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; -import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; import org.opensearch.dataprepper.model.event.EventKey; import java.util.List; -@JsonPropertyOrder -@JsonClassDescription("This processor is a [mutate string]" + - "(https://github.com/opensearch-project/data-prepper/tree/main/data-prepper-plugins/mutate-string-processors#mutate-string-processors) processor.") -public class WithKeysConfig implements StringProcessorConfig { +public abstract class WithKeysConfig implements StringProcessorConfig { @NotNull @NotEmpty @JsonProperty("with_keys") - @JsonPropertyDescription("A list of keys to trim the white space from.") + @JsonPropertyDescription("A list of keys to modify.") private List withKeys; @Override public List getIterativeConfig() { return withKeys; } - - public List getWithKeys() { - return withKeys; - } } diff --git a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/ObfuscationProcessorConfig.java b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/ObfuscationProcessorConfig.java index fa4df27ce1..faa22913cc 100644 --- a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/ObfuscationProcessorConfig.java +++ b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/ObfuscationProcessorConfig.java @@ -12,13 +12,15 @@ import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; import org.opensearch.dataprepper.expression.ExpressionEvaluator; +import org.opensearch.dataprepper.model.annotations.UsesDataPrepperPlugin; import org.opensearch.dataprepper.model.configuration.PluginModel; import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; +import org.opensearch.dataprepper.plugins.processor.obfuscation.action.ObfuscationAction; import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `obfuscate` process enables obfuscation of fields inside your documents in order to " + +@JsonClassDescription("The obfuscate process enables obfuscation of fields inside your documents in order to " + "protect sensitive data.") public class ObfuscationProcessorConfig { @@ -28,35 +30,35 @@ public class ObfuscationProcessorConfig { @NotNull private String source; - @JsonProperty("patterns") - @JsonPropertyDescription("A list of regex patterns that allow you to obfuscate specific parts of a field. Only parts that match the regex pattern will obfuscate. When not provided, the processor obfuscates the whole field.") - private List patterns; - @JsonProperty("target") @JsonPropertyDescription("The new field in which to store the obfuscated value. " + "This leaves the original source field unchanged. " + - "When no `target` is provided, the source field updates with the obfuscated value.") + "When no target is provided, the source field updates with the obfuscated value.") private String target; + @JsonProperty("patterns") + @JsonPropertyDescription("A list of regex patterns that allow you to obfuscate specific parts of a field. Only parts that match the regex pattern will obfuscate. When not provided, the processor obfuscates the whole field.") + private List patterns; + @JsonProperty("action") - @JsonPropertyDescription("The obfuscation action. Available actions include 'hash' and 'mask'.") + @JsonPropertyDescription("The obfuscation action. Available actions include hash and mask.") + @UsesDataPrepperPlugin(pluginType = ObfuscationAction.class) private PluginModel action; - @JsonProperty("obfuscate_when") - @JsonPropertyDescription("Specifies under what condition the Obfuscate processor should perform matching. " + - "Default is no condition.") - private String obfuscateWhen; + @JsonProperty("single_word_only") + @JsonPropertyDescription("When set to true, a word boundary \b is added to the pattern, " + + "which causes obfuscation to be applied only to words that are standalone in the input text. " + + "By default, it is false, meaning obfuscation patterns are applied to all occurrences.") + private boolean singleWordOnly = false; @JsonProperty("tags_on_match_failure") - @JsonPropertyDescription("The tag to add to an event if the obfuscate processor fails to match the pattern.") + @JsonPropertyDescription("The tag to add to an event if the obfuscate processor fails to match the pattern.") private List tagsOnMatchFailure; - @JsonProperty("single_word_only") - @JsonPropertyDescription("When set to `true`, a word boundary `\b` is added to the pattern, " + - "which causes obfuscation to be applied only to words that are standalone in the input text. " + - "By default, it is false, meaning obfuscation patterns are applied to all occurrences. " + - "Can be used for Data Prepper 2.8 or greater.") - private boolean singleWordOnly = false; + @JsonProperty("obfuscate_when") + @JsonPropertyDescription("A conditional expression such as '/is_testing_data == true'. " + + "If specified, the obfuscate processor will only run on events when the expression evaluates to true. ") + private String obfuscateWhen; public ObfuscationProcessorConfig() { } diff --git a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/MaskActionConfig.java b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/MaskActionConfig.java index 01e5fd01a3..809f28397e 100644 --- a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/MaskActionConfig.java +++ b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/MaskActionConfig.java @@ -5,20 +5,27 @@ package org.opensearch.dataprepper.plugins.processor.obfuscation.action; +import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import jakarta.validation.constraints.Max; import jakarta.validation.constraints.Min; import jakarta.validation.constraints.Pattern; +@JsonClassDescription("Obfuscates data by masking data. Without any configuration this will replace text with ***") +@JsonPropertyOrder public class MaskActionConfig { @JsonProperty("mask_character") @Pattern(regexp = "[*#!%&@]", message = "Valid characters are *, #, $, %, &, ! and @") + @JsonPropertyDescription("The character to use to mask text. By default, this is *") private String maskCharacter = "*"; @JsonProperty("mask_character_length") @Min(1) @Max(10) + @JsonPropertyDescription("The length of the character mask to apply. By default, this is three characters.") private int maskCharacterLength = 3; public MaskActionConfig() { diff --git a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/OneWayHashActionConfig.java b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/OneWayHashActionConfig.java index 3e3ab622c5..9e2fd64988 100644 --- a/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/OneWayHashActionConfig.java +++ b/data-prepper-plugins/obfuscate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/obfuscation/action/OneWayHashActionConfig.java @@ -5,6 +5,8 @@ package org.opensearch.dataprepper.plugins.processor.obfuscation.action; +import com.fasterxml.jackson.annotation.JsonClassDescription; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.opensearch.dataprepper.model.event.EventKeyConfiguration; import org.opensearch.dataprepper.model.event.EventKeyFactory; @@ -15,10 +17,9 @@ import jakarta.validation.constraints.Size; import org.opensearch.dataprepper.model.event.EventKey; -import org.opensearch.dataprepper.model.event.EventKeyConfiguration; -import org.opensearch.dataprepper.model.event.EventKeyFactory; - +@JsonClassDescription("Obfuscates data by performing a one-way hash.") +@JsonPropertyOrder public class OneWayHashActionConfig { @JsonProperty("salt") @@ -27,17 +28,17 @@ public class OneWayHashActionConfig { @Size(max = 64, message = "Maximum size of salt string is 64") private String salt; - @JsonProperty("format") - @Pattern(regexp = "SHA-512", message = "Valid values: SHA-512") - @JsonPropertyDescription("Format of one way hash to generate. Default to SHA-512.") - private String format = "SHA-512"; - @JsonProperty("salt_key") - @JsonPropertyDescription("A key to compute salt based on a value provided as part of a record." + - "If key or value was not found in the record(s), a salt defined in the pipeline configuration will be used instead.") + @JsonPropertyDescription("A key to compute salt based on a value provided as part of a record. " + + "If key or value was not found in the event, a salt defined in the pipeline configuration will be used instead.") @EventKeyConfiguration(EventKeyFactory.EventAction.GET) private EventKey saltKey; - + + @JsonProperty("format") + @Pattern(regexp = "SHA-512", message = "Valid values: SHA-512") + @JsonPropertyDescription("Format of one way hash to generate. Default to SHA-512.") + private String format = "SHA-512"; + public OneWayHashActionConfig(){ } diff --git a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java index 496df187e6..a481f0a0ab 100644 --- a/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java +++ b/data-prepper-plugins/otel-metrics-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/otelmetrics/OtelMetricsRawProcessorConfig.java @@ -13,12 +13,12 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder; @JsonPropertyOrder -@JsonClassDescription("The `otel_metrics` processor serializes a collection of `ExportMetricsServiceRequest` records " + - "sent from the [OTel metrics source](https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/sources/otel-metrics-source/) into a collection of string records.") +@JsonClassDescription("The otel_metrics processor serializes a collection of ExportMetricsServiceRequest records " + + "sent from the OTel metrics source into a collection of string records.") public class OtelMetricsRawProcessorConfig { @JsonProperty("flatten_attributes") - @JsonPropertyDescription("Whether or not to flatten the `attributes` field in the JSON data.") + @JsonPropertyDescription("Whether or not to flatten the attributes field in the JSON data.") boolean flattenAttributesFlag = true; @JsonPropertyDescription("Whether or not to calculate histogram buckets.") @@ -27,7 +27,7 @@ public class OtelMetricsRawProcessorConfig { @JsonPropertyDescription("Whether or not to calculate exponential histogram buckets.") private Boolean calculateExponentialHistogramBuckets = true; - @JsonPropertyDescription("Maximum allowed scale in exponential histogram calculation.") + @JsonPropertyDescription("Maximum allowed scale in exponential histogram calculation. By default, the maximum allowed scale is 10.") private Integer exponentialHistogramMaxAllowedScale = DEFAULT_EXPONENTIAL_HISTOGRAM_MAX_ALLOWED_SCALE; public Boolean getCalculateExponentialHistogramBuckets() { diff --git a/data-prepper-plugins/otel-trace-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/oteltrace/OtelTraceRawProcessorConfig.java b/data-prepper-plugins/otel-trace-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/oteltrace/OtelTraceRawProcessorConfig.java index 16b3017a81..ddbad4b4d3 100644 --- a/data-prepper-plugins/otel-trace-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/oteltrace/OtelTraceRawProcessorConfig.java +++ b/data-prepper-plugins/otel-trace-raw-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/oteltrace/OtelTraceRawProcessorConfig.java @@ -13,24 +13,27 @@ import java.time.Duration; @JsonPropertyOrder -@JsonClassDescription("The `otel_trace` processor completes trace-group-related fields in all incoming Data Prepper " + - "span records by state caching the root span information for each `traceId`.") +@JsonClassDescription("The otel_traces processor completes trace-group-related fields in all incoming " + + "span records by state caching the root span information for each traceId.") public class OtelTraceRawProcessorConfig { static final long DEFAULT_TG_FLUSH_INTERVAL_SEC = 180L; static final Duration DEFAULT_TRACE_ID_TTL = Duration.ofSeconds(15L); static final long MAX_TRACE_ID_CACHE_SIZE = 1_000_000L; + @JsonProperty("trace_flush_interval") @JsonPropertyDescription("Represents the time interval in seconds to flush all the descendant spans without any " + - "root span. Default is 180.") + "root span. Default is 180.") private long traceFlushInterval = DEFAULT_TG_FLUSH_INTERVAL_SEC; @JsonProperty("trace_group_cache_ttl") - @JsonPropertyDescription("Represents the time-to-live to cache a trace group details. Default is 15 seconds.") + @JsonPropertyDescription("Represents the time-to-live to cache a trace group details. " + + "The value may be an ISO 8601 notation such as PT1M30S or a duration and unit such as 45s" + + "Default is 15 seconds.") private Duration traceGroupCacheTimeToLive = DEFAULT_TRACE_ID_TTL; @JsonProperty("trace_group_cache_max_size") @JsonPropertyDescription("Represents the maximum size of the cache to store the trace group details from root spans. " + - "Default is 1000000.") + "Default is 1000000.") private long traceGroupCacheMaxSize = MAX_TRACE_ID_CACHE_SIZE; public long getTraceFlushIntervalSeconds() { diff --git a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/ion/ParseIonProcessorConfig.java b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/ion/ParseIonProcessorConfig.java index 38301dfb31..4c6b9915b4 100644 --- a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/ion/ParseIonProcessorConfig.java +++ b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/ion/ParseIonProcessorConfig.java @@ -21,48 +21,50 @@ import java.util.Objects; @JsonPropertyOrder -@JsonClassDescription("The `parse_ion` processor parses [Amazon Ion](https://amazon-ion.github.io/ion-docs/) data.") +@JsonClassDescription("The parse_ion processor parses Amazon Ion data.") public class ParseIonProcessorConfig implements CommonParseConfig { static final String DEFAULT_SOURCE = "message"; @NotBlank @JsonProperty("source") - @JsonPropertyDescription("The field in the event that will be parsed. Default value is message.") + @JsonPropertyDescription("The field in the event that will be parsed. The default value is message.") private String source = DEFAULT_SOURCE; @JsonProperty("destination") - @JsonPropertyDescription("The destination field of the parsed JSON. Defaults to the root of the event. Cannot be an empty string, /, or any white-space-only string because these are not valid event fields.") + @JsonPropertyDescription("The destination field of the structured object from the parsed ION. Defaults to the root of the event. Cannot be an empty string, /, or any whitespace-only string because these are not valid event fields.") private String destination; @JsonProperty("pointer") - @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") + @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. " + + "If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") private String pointer; - @JsonProperty("parse_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), such as '/some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") - private String parseWhen; - @JsonProperty("depth") @Min(0) @Max(10) @JsonPropertyDescription("Indicates the depth at which the nested values of the event are not parsed any more. Default is 0, which means all levels of nested values are parsed. If the depth is 1, only the top level keys are parsed and all its nested values are represented as strings") private int depth = 0; - @JsonProperty("tags_on_failure") - @JsonPropertyDescription("A list of strings specifying the tags to be set in the event that the processor fails or an unknown exception occurs during parsing.") - private List tagsOnFailure; - @JsonProperty("overwrite_if_destination_exists") @JsonPropertyDescription("Overwrites the destination if set to true. Set to false to prevent changing a destination value that exists. Defaults to true.") private boolean overwriteIfDestinationExists = true; @JsonProperty - @JsonPropertyDescription("If true, the configured source field will be deleted after the JSON data is parsed into separate fields.") + @JsonPropertyDescription("If true, the configured source field will be deleted after the ION data is parsed into separate fields.") private boolean deleteSource = false; + @JsonProperty("tags_on_failure") + @JsonPropertyDescription("A list of strings specifying the tags to be set in the event when the processor fails or an unknown exception occurs during parsing.") + private List tagsOnFailure; + + @JsonProperty("parse_when") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the parse_ion processor will only run on events when the expression evaluates to true. ") + private String parseWhen; + @JsonProperty("handle_failed_events") @JsonPropertyDescription("Determines how to handle events with ION processing errors. Options include 'skip', " + - "which will log the error and send the Event downstream to the next processor, and 'skip_silently', " + + "which will log the error and send the event downstream to the next processor, and 'skip_silently', " + "which will send the Event downstream to the next processor without logging the error. " + "Default is 'skip'.") @NotNull diff --git a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/json/ParseJsonProcessorConfig.java b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/json/ParseJsonProcessorConfig.java index b424aa6d87..f3049da854 100644 --- a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/json/ParseJsonProcessorConfig.java +++ b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/json/ParseJsonProcessorConfig.java @@ -21,18 +21,17 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `parse_json` processor parses JSON data for an event, including any nested fields. " + - "The processor extracts the JSON pointer data and adds the input event to the extracted fields.") +@JsonClassDescription("The parse_json processor parses JSON data from fields in events.") public class ParseJsonProcessorConfig implements CommonParseConfig { static final String DEFAULT_SOURCE = "message"; @NotBlank @JsonProperty("source") - @JsonPropertyDescription("The field in the event that will be parsed. Default value is message.") + @JsonPropertyDescription("The field in the event that will be parsed. The default value is message.") private String source = DEFAULT_SOURCE; @JsonProperty("destination") - @JsonPropertyDescription("The destination field of the parsed JSON. Defaults to the root of the event. Cannot be an empty string, /, or any white-space-only string because these are not valid event fields.") + @JsonPropertyDescription("The destination field of the structured object from the parsed JSON. Defaults to the root of the event. Cannot be an empty string, /, or any whitespace-only string because these are not valid event fields.") private String destination; @JsonProperty("depth") @@ -42,28 +41,30 @@ public class ParseJsonProcessorConfig implements CommonParseConfig { private int depth = 0; @JsonProperty("pointer") - @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") + @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. " + + "If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") private String pointer; - @JsonProperty("parse_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), such as '/some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") - private String parseWhen; - - @JsonProperty("tags_on_failure") - @JsonPropertyDescription("A list of strings specifying the tags to be set in the event that the processor fails or an unknown exception occurs during parsing.") - private List tagsOnFailure; - @JsonProperty("overwrite_if_destination_exists") @JsonPropertyDescription("Overwrites the destination if set to true. Set to false to prevent changing a destination value that exists. Defaults to true.") private boolean overwriteIfDestinationExists = true; @JsonProperty - @JsonPropertyDescription("If true, the configured source field will be deleted after the JSON data is parsed into separate fields.") + @JsonPropertyDescription("If true, the configured source field will be deleted after the JSON data is parsed into separate fields.") private boolean deleteSource = false; + @JsonProperty("tags_on_failure") + @JsonPropertyDescription("A list of strings specifying the tags to be set in the event when the processor fails or an unknown exception occurs during parsing.") + private List tagsOnFailure; + + @JsonProperty("parse_when") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the parse_json processor will only run on events when the expression evaluates to true. ") + private String parseWhen; + @JsonProperty("handle_failed_events") @JsonPropertyDescription("Determines how to handle events with JSON processing errors. Options include 'skip', " + - "which will log the error and send the Event downstream to the next processor, and 'skip_silently', " + + "which will log the error and send the event downstream to the next processor, and 'skip_silently', " + "which will send the Event downstream to the next processor without logging the error. " + "Default is 'skip'.") @NotNull diff --git a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/xml/ParseXmlProcessorConfig.java b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/xml/ParseXmlProcessorConfig.java index 907c219806..de0375d05d 100644 --- a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/xml/ParseXmlProcessorConfig.java +++ b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parse/xml/ParseXmlProcessorConfig.java @@ -14,42 +14,44 @@ import java.util.Objects; @JsonPropertyOrder -@JsonClassDescription("The `parse_xml` processor parses XML data for an event.") +@JsonClassDescription("The parse_xml processor parses XML data for an event.") public class ParseXmlProcessorConfig implements CommonParseConfig { static final String DEFAULT_SOURCE = "message"; @NotBlank @JsonProperty("source") - @JsonPropertyDescription("The field in the event that will be parsed. Default value is message.") + @JsonPropertyDescription("The field in the event that will be parsed. The default value is message.") private String source = DEFAULT_SOURCE; @JsonProperty("destination") - @JsonPropertyDescription("The destination field of the parsed JSON. Defaults to the root of the event. Cannot be an empty string, /, or any white-space-only string because these are not valid event fields.") + @JsonPropertyDescription("The destination field of the structured object from the parsed XML. Defaults to the root of the event. Cannot be an empty string, /, or any whitespace-only string because these are not valid event fields.") private String destination; @JsonProperty("pointer") - @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") + @JsonPropertyDescription("A JSON pointer to the field to be parsed. There is no pointer by default, meaning the entire source is parsed. The pointer can access JSON array indexes as well. " + + "If the JSON pointer is invalid then the entire source data is parsed into the outgoing event. If the key that is pointed to already exists in the event and the destination is the root, then the pointer uses the entire path of the key.") private String pointer; - @JsonProperty("parse_when") - @JsonPropertyDescription("A Data Prepper [conditional expression](https://opensearch.org/docs/latest/data-prepper/pipelines/expression-syntax/), such as '/some-key == \"test\"', that will be evaluated to determine whether the processor will be run on the event.") - private String parseWhen; - - @JsonProperty("tags_on_failure") - @JsonPropertyDescription("A list of strings specifying the tags to be set in the event that the processor fails or an unknown exception occurs during parsing.") - private List tagsOnFailure; - @JsonProperty("overwrite_if_destination_exists") @JsonPropertyDescription("Overwrites the destination if set to true. Set to false to prevent changing a destination value that exists. Defaults to true.") private boolean overwriteIfDestinationExists = true; @JsonProperty - @JsonPropertyDescription("If true, the configured source field will be deleted after the JSON data is parsed into separate fields.") + @JsonPropertyDescription("If true, the configured source field will be deleted after the XML data is parsed into separate fields.") private boolean deleteSource = false; + @JsonProperty("parse_when") + @JsonPropertyDescription("A conditional expression such as /some_key == \"test\". " + + "If specified, the parse_xml processor will only run on events when the expression evaluates to true. ") + private String parseWhen; + + @JsonProperty("tags_on_failure") + @JsonPropertyDescription("A list of strings specifying the tags to be set in the event when the processor fails or an unknown exception occurs during parsing.") + private List tagsOnFailure; + @JsonProperty("handle_failed_events") @JsonPropertyDescription("Determines how to handle events with XML processing errors. Options include 'skip', " + - "which will log the error and send the Event downstream to the next processor, and 'skip_silently', " + + "which will log the error and send the event downstream to the next processor, and 'skip_silently', " + "which will send the Event downstream to the next processor without logging the error. " + "Default is 'skip'.") @NotNull diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/JsonOutputScenario.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/JsonOutputScenario.java new file mode 100644 index 0000000000..cf62024081 --- /dev/null +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/JsonOutputScenario.java @@ -0,0 +1,56 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.sink.s3; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.opensearch.dataprepper.model.codec.OutputCodec; +import org.opensearch.dataprepper.plugins.codec.json.JsonOutputCodec; +import org.opensearch.dataprepper.plugins.codec.json.JsonOutputCodecConfig; + +import java.io.File; +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +public class JsonOutputScenario implements OutputScenario { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Override + public OutputCodec getCodec() { + return new JsonOutputCodec(new JsonOutputCodecConfig()); + } + + @Override + public void validate(int expectedRecords, List> sampleEventData, File actualContentFile, CompressionScenario compressionScenario) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public void validateDynamicPartition(int expectedRecords, int partitionNumber, File actualContentFile, CompressionScenario compressionScenario) throws IOException { + JsonNode jsonArray = OBJECT_MAPPER.readTree(actualContentFile).get("events"); + + int count = 0; + for (final JsonNode eventNode : jsonArray) { + final Integer sequence = eventNode.get("sequence").intValue(); + assertThat(sequence, equalTo(partitionNumber)); + count++; + } + + if (expectedRecords != -1) { + assertThat(count, equalTo(expectedRecords)); + } + } + + @Override + public String toString() { + return "JSON"; + } +} diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/NdjsonOutputScenario.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/NdjsonOutputScenario.java index 7604945c9e..eb1bf22440 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/NdjsonOutputScenario.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/NdjsonOutputScenario.java @@ -56,6 +56,7 @@ public void validate(int expectedRecords, final List> sample assertThat(sampledData, equalTo(sampleEventData.size())); } + @Override public void validateDynamicPartition(int expectedRecords, int partitionNumber, final File actualContentFile, final CompressionScenario compressionScenario) throws IOException { final InputStream inputStream = new BufferedInputStream(new FileInputStream(actualContentFile), 64 * 1024); diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/OutputScenario.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/OutputScenario.java index e60fce299d..8a7de5e6d9 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/OutputScenario.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/OutputScenario.java @@ -50,4 +50,6 @@ default Set getIncompatibleBufferTypes() { * @throws IOException Some IOException */ void validate(int expectedRecords, List> sampleEventData, File actualContentFile, CompressionScenario compressionScenario) throws IOException; + + void validateDynamicPartition(int expectedRecords, int partitionNumber, File actualContentFile, CompressionScenario compressionScenario) throws IOException; } diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java index e01c61fe09..8c895d530a 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/ParquetOutputScenario.java @@ -101,6 +101,11 @@ public void validate(int expectedRecords, final List> sample assertThat("Not all the sample data was validated.", validatedRecords, equalTo(sampleEventData.size())); } + @Override + public void validateDynamicPartition(int expectedRecords, int partitionNumber, File actualContentFile, CompressionScenario compressionScenario) throws IOException { + throw new UnsupportedOperationException(); + } + private static void validateParquetStructure(int expectedRecords, final List> allEventData, final InputFile inputFile, CompressionCodecName expectedCompressionCodec) throws IOException { // This test assumes that the data all has the same keys. final Map sampleEvent = allEventData.iterator().next(); diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkIT.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkIT.java index f61d53c4a0..7d078dbd16 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkIT.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/s3/S3SinkIT.java @@ -249,11 +249,11 @@ void test(final OutputScenario outputScenario, outputScenario.validate(expectedTotalSize, sampleEventData, actualContentFile, compressionScenario); } - @Test - void testWithDynamicGroups() throws IOException { + @ParameterizedTest + @ArgumentsSource(OutputScenarioArguments.class) + void testWithDynamicGroups(final OutputScenario outputScenario) throws IOException { final BufferScenario bufferScenario = new InMemoryBufferScenario(); final CompressionScenario compressionScenario = new NoneCompressionScenario(); - final NdjsonOutputScenario outputScenario = new NdjsonOutputScenario(); final SizeCombination sizeCombination = SizeCombination.MEDIUM_SMALLER; BufferTypeOptions bufferTypeOptions = bufferScenario.getBufferType(); @@ -275,7 +275,7 @@ void testWithDynamicGroups() throws IOException { when(expressionEvaluator.extractDynamicExpressionsFromFormatExpression(objectKeyOptions.getNamePattern())) .thenReturn(Collections.emptyList()); - when(pluginFactory.loadPlugin(eq(OutputCodec.class), any())).thenReturn(outputScenario.getCodec()); + when(pluginFactory.loadPlugin(eq(OutputCodec.class), any())).thenAnswer(invocation -> outputScenario.getCodec()); when(s3SinkConfig.getBufferType()).thenReturn(bufferTypeOptions); when(s3SinkConfig.getCompression()).thenReturn(compressionScenario.getCompressionOption()); int expectedTotalSize = sizeCombination.getTotalSize(); @@ -361,7 +361,7 @@ void testWithDynamicGroupsAndAggregateThreshold() throws IOException { when(expressionEvaluator.extractDynamicExpressionsFromFormatExpression(objectKeyOptions.getNamePattern())) .thenReturn(Collections.emptyList()); - when(pluginFactory.loadPlugin(eq(OutputCodec.class), any())).thenReturn(outputScenario.getCodec()); + when(pluginFactory.loadPlugin(eq(OutputCodec.class), any())).thenAnswer(invocation -> outputScenario.getCodec()); when(s3SinkConfig.getBufferType()).thenReturn(bufferTypeOptions); when(s3SinkConfig.getCompression()).thenReturn(compressionScenario.getCompressionOption()); int expectedTotalSize = sizeCombination.getTotalSize(); @@ -542,6 +542,16 @@ public Stream provideArguments(final ExtensionContext conte } } + static class OutputScenarioArguments implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext extensionContext) throws Exception { + return Stream.of( + arguments(new NdjsonOutputScenario()), + arguments(new JsonOutputScenario())); + } + } + private static Stream generateCombinedArguments( final List bufferScenarios, final List outputScenarios, diff --git a/data-prepper-plugins/s3-source/build.gradle b/data-prepper-plugins/s3-source/build.gradle index 06818d8eaa..abc89a71aa 100644 --- a/data-prepper-plugins/s3-source/build.gradle +++ b/data-prepper-plugins/s3-source/build.gradle @@ -42,6 +42,7 @@ dependencies { testImplementation project(':data-prepper-plugins:avro-codecs') testImplementation project(':data-prepper-plugins:in-memory-source-coordination-store') testImplementation project(':data-prepper-core') + testImplementation project(':data-prepper-event') testImplementation project(':data-prepper-plugins:parquet-codecs') testImplementation project(':data-prepper-test-event') testImplementation libs.avro.core diff --git a/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/ScanObjectWorker.java b/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/ScanObjectWorker.java index cb9f17115f..4ecd17c584 100644 --- a/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/ScanObjectWorker.java +++ b/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/ScanObjectWorker.java @@ -56,8 +56,6 @@ public class ScanObjectWorker implements Runnable { static final Duration NO_OBJECTS_FOUND_BEFORE_PARTITION_DELETION_DURATION = Duration.ofHours(1); private static final int RETRY_BACKOFF_ON_EXCEPTION_MILLIS = 5_000; - - static final Duration ACKNOWLEDGEMENT_SET_TIMEOUT = Duration.ofHours(2); static final String ACKNOWLEDGEMENT_SET_CALLBACK_METRIC_NAME = "acknowledgementSetCallbackCounter"; static final String NO_OBJECTS_FOUND_FOR_FOLDER_PARTITION = "folderPartitionNoObjectsFound"; @@ -99,6 +97,8 @@ public class ScanObjectWorker implements Runnable { private final Map acknowledgmentsRemainingForPartitions; + private final Duration acknowledgmentSetTimeout; + public ScanObjectWorker(final S3Client s3Client, final List scanOptionsBuilderList, final S3ObjectHandler s3ObjectHandler, @@ -127,6 +127,7 @@ public ScanObjectWorker(final S3Client s3Client, this.sourceCoordinator.initialize(); this.partitionKeys = new ArrayList<>(); this.folderPartitioningOptions = s3SourceConfig.getS3ScanScanOptions().getPartitioningOptions(); + this.acknowledgmentSetTimeout = s3SourceConfig.getS3ScanScanOptions().getAcknowledgmentTimeout(); this.partitionCreationSupplier = new S3ScanPartitionCreationSupplier(s3Client, bucketOwnerProvider, scanOptionsBuilderList, s3ScanSchedulingOptions, s3SourceConfig.getS3ScanScanOptions().getPartitioningOptions()); this.acknowledgmentsRemainingForPartitions = new ConcurrentHashMap<>(); @@ -214,7 +215,7 @@ private void startProcessingObject(final long waitTimeMillis) { sourceCoordinator.giveUpPartition(objectToProcess.get().getPartitionKey()); } partitionKeys.remove(objectToProcess.get().getPartitionKey()); - }, ACKNOWLEDGEMENT_SET_TIMEOUT); + }, acknowledgmentSetTimeout); addProgressCheck(acknowledgementSet, objectToProcess.get()); } @@ -226,7 +227,7 @@ private void startProcessingObject(final long waitTimeMillis) { if (endToEndAcknowledgementsEnabled) { deleteObjectRequest.ifPresent(deleteRequest -> objectsToDeleteForAcknowledgmentSets.put(objectToProcess.get().getPartitionKey(), Set.of(deleteRequest))); try { - sourceCoordinator.updatePartitionForAcknowledgmentWait(objectToProcess.get().getPartitionKey(), ACKNOWLEDGEMENT_SET_TIMEOUT); + sourceCoordinator.updatePartitionForAcknowledgmentWait(objectToProcess.get().getPartitionKey(), acknowledgmentSetTimeout); } catch (final PartitionUpdateException e) { LOG.debug("Failed to update the partition for the acknowledgment wait."); } @@ -375,7 +376,7 @@ private void processObjectsForFolderPartition(final List obje objectIndex++; } - sourceCoordinator.updatePartitionForAcknowledgmentWait(folderPartition.getPartitionKey(), ACKNOWLEDGEMENT_SET_TIMEOUT); + sourceCoordinator.updatePartitionForAcknowledgmentWait(folderPartition.getPartitionKey(), acknowledgmentSetTimeout); if (acknowledgementSet != null) { acknowledgementSet.complete(); @@ -402,7 +403,7 @@ private AcknowledgementSet createAcknowledgmentSetForFolderPartition(final Sourc LOG.info("Received all acknowledgments for folder partition {}, giving up this partition", folderPartition.getPartitionKey()); sourceCoordinator.giveUpPartition(folderPartition.getPartitionKey(), Instant.now()); } - }, ACKNOWLEDGEMENT_SET_TIMEOUT); + }, acknowledgmentSetTimeout); } private void addProgressCheck(final AcknowledgementSet acknowledgementSet, final SourcePartition objectToProcess) { diff --git a/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/configuration/S3ScanScanOptions.java b/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/configuration/S3ScanScanOptions.java index c0a5d50711..e43e798ea9 100644 --- a/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/configuration/S3ScanScanOptions.java +++ b/data-prepper-plugins/s3-source/src/main/java/org/opensearch/dataprepper/plugins/source/s3/configuration/S3ScanScanOptions.java @@ -22,6 +22,9 @@ */ public class S3ScanScanOptions { + @JsonProperty("acknowledgment_timeout") + private Duration acknowledgmentTimeout = Duration.ofHours(2); + @JsonProperty("folder_partitions") @Valid private FolderPartitioningOptions folderPartitioningOptions; @@ -84,4 +87,6 @@ public S3ScanSchedulingOptions getSchedulingOptions() { } public FolderPartitioningOptions getPartitioningOptions() { return folderPartitioningOptions; } + + public Duration getAcknowledgmentTimeout() { return acknowledgmentTimeout; } } diff --git a/data-prepper-plugins/s3-source/src/test/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerTest.java b/data-prepper-plugins/s3-source/src/test/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerTest.java index 0a81795647..ddfa74023f 100644 --- a/data-prepper-plugins/s3-source/src/test/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerTest.java +++ b/data-prepper-plugins/s3-source/src/test/java/org/opensearch/dataprepper/plugins/source/s3/S3ScanObjectWorkerTest.java @@ -69,7 +69,6 @@ import static org.mockito.Mockito.when; import static org.opensearch.dataprepper.model.source.s3.S3ScanEnvironmentVariables.STOP_S3_SCAN_PROCESSING_PROPERTY; import static org.opensearch.dataprepper.plugins.source.s3.ScanObjectWorker.ACKNOWLEDGEMENT_SET_CALLBACK_METRIC_NAME; -import static org.opensearch.dataprepper.plugins.source.s3.ScanObjectWorker.ACKNOWLEDGEMENT_SET_TIMEOUT; import static org.opensearch.dataprepper.plugins.source.s3.ScanObjectWorker.CHECKPOINT_OWNERSHIP_INTERVAL; import static org.opensearch.dataprepper.plugins.source.s3.ScanObjectWorker.NO_OBJECTS_FOUND_BEFORE_PARTITION_DELETION_DURATION; import static org.opensearch.dataprepper.plugins.source.s3.ScanObjectWorker.NO_OBJECTS_FOUND_FOR_FOLDER_PARTITION; @@ -125,10 +124,14 @@ class S3ScanObjectWorkerTest { private List scanOptionsList; + @Mock + private Duration acknowledgmentSetTimeout; + @BeforeEach void setup() { scanOptionsList = new ArrayList<>(); when(s3ScanScanOptions.getPartitioningOptions()).thenReturn(null); + when(s3ScanScanOptions.getAcknowledgmentTimeout()).thenReturn(acknowledgmentSetTimeout); } private ScanObjectWorker createObjectUnderTest() { @@ -232,7 +235,7 @@ void buildDeleteObjectRequest_should_be_invoked_after_processing_when_deleteS3Ob final InOrder inOrder = inOrder(sourceCoordinator, acknowledgementSet, s3ObjectDeleteWorker); inOrder.verify(s3ObjectDeleteWorker).buildDeleteObjectRequest(bucket, objectKey); - inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, ACKNOWLEDGEMENT_SET_TIMEOUT); + inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, acknowledgmentSetTimeout); inOrder.verify(acknowledgementSet).complete(); inOrder.verify(sourceCoordinator).renewPartitionOwnership(partitionKey); inOrder.verify(sourceCoordinator).completePartition(partitionKey, true); @@ -289,7 +292,7 @@ void acknowledgment_progress_check_increments_ownership_error_metric_when_partit final InOrder inOrder = inOrder(sourceCoordinator, acknowledgementSet, s3ObjectDeleteWorker); inOrder.verify(s3ObjectDeleteWorker).buildDeleteObjectRequest(bucket, objectKey); - inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, ACKNOWLEDGEMENT_SET_TIMEOUT); + inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, acknowledgmentSetTimeout); inOrder.verify(acknowledgementSet).complete(); inOrder.verify(sourceCoordinator).renewPartitionOwnership(partitionKey); inOrder.verify(sourceCoordinator).completePartition(partitionKey, true); @@ -536,7 +539,7 @@ void processing_with_folder_partition_processes_objects_in_folder_and_deletes_th inOrder.verify(s3ObjectDeleteWorker).buildDeleteObjectRequest(bucket, firstObject.key()); inOrder.verify(acknowledgementSet1).complete(); inOrder.verify(s3ObjectDeleteWorker).buildDeleteObjectRequest(bucket, secondObject.key()); - inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, ACKNOWLEDGEMENT_SET_TIMEOUT); + inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, acknowledgmentSetTimeout); inOrder.verify(acknowledgementSet2).complete(); final Consumer firstAckCallback = ackCallbacks.get(0); @@ -616,7 +619,7 @@ void processing_with_folder_partition_processes_objects_in_folder_until_max_obje final InOrder inOrder = inOrder(sourceCoordinator, acknowledgementSet1, s3ObjectDeleteWorker); inOrder.verify(s3ObjectDeleteWorker).buildDeleteObjectRequest(bucket, firstObject.key()); - inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, ACKNOWLEDGEMENT_SET_TIMEOUT); + inOrder.verify(sourceCoordinator).updatePartitionForAcknowledgmentWait(partitionKey, acknowledgmentSetTimeout); inOrder.verify(acknowledgementSet1).complete(); final Consumer ackCallback = consumerArgumentCaptor.getValue(); diff --git a/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfig.java b/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfig.java index faf98b2133..49aae094d9 100644 --- a/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfig.java +++ b/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfig.java @@ -9,21 +9,33 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import jakarta.validation.constraints.NotEmpty; @JsonPropertyOrder -@JsonClassDescription("The `service_map` processor uses OpenTelemetry data to create a distributed service map for " + +@JsonClassDescription("The service_map processor uses OpenTelemetry data to create a distributed service map for " + "visualization in OpenSearch Dashboards.") public class ServiceMapProcessorConfig { private static final String WINDOW_DURATION = "window_duration"; static final int DEFAULT_WINDOW_DURATION = 180; static final String DEFAULT_DB_PATH = "data/service-map/"; + static final String DB_PATH = "db_path"; @JsonProperty(WINDOW_DURATION) @JsonPropertyDescription("Represents the fixed time window, in seconds, " + - "during which service map relationships are evaluated. Default value is 180.") + "during which service map relationships are evaluated. Default value is 180.") private int windowDuration = DEFAULT_WINDOW_DURATION; + @NotEmpty + @JsonProperty(DB_PATH) + @JsonPropertyDescription("Represents folder path for creating database files storing transient data off heap memory" + + "when processing service-map data. Default value is data/service-map/") + private String dbPath = DEFAULT_DB_PATH; + public int getWindowDuration() { return windowDuration; } + + public String getDbPath() { + return dbPath; + } } diff --git a/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapStatefulProcessor.java b/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapStatefulProcessor.java index 667b8ea882..a5be56724f 100644 --- a/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapStatefulProcessor.java +++ b/data-prepper-plugins/service-map-stateful/src/main/java/org/opensearch/dataprepper/plugins/processor/ServiceMapStatefulProcessor.java @@ -84,7 +84,7 @@ public ServiceMapStatefulProcessor( final PluginMetrics pluginMetrics, final PipelineDescription pipelineDescription) { this((long) serviceMapProcessorConfig.getWindowDuration() * TO_MILLIS, - new File(ServiceMapProcessorConfig.DEFAULT_DB_PATH), + new File(serviceMapProcessorConfig.getDbPath()), Clock.systemUTC(), pipelineDescription.getNumberOfProcessWorkers(), pluginMetrics); diff --git a/data-prepper-plugins/service-map-stateful/src/test/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfigTest.java b/data-prepper-plugins/service-map-stateful/src/test/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfigTest.java index 35ef3b0c07..cc6d5b3c8a 100644 --- a/data-prepper-plugins/service-map-stateful/src/test/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfigTest.java +++ b/data-prepper-plugins/service-map-stateful/src/test/java/org/opensearch/dataprepper/plugins/processor/ServiceMapProcessorConfigTest.java @@ -5,6 +5,7 @@ import org.opensearch.dataprepper.test.helper.ReflectivelySetField; import java.util.Random; +import java.util.UUID; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,6 +24,7 @@ void setUp() { @Test void testDefaultConfig() { assertThat(serviceMapProcessorConfig.getWindowDuration(), equalTo(DEFAULT_WINDOW_DURATION)); + assertThat(serviceMapProcessorConfig.getDbPath(), equalTo(ServiceMapProcessorConfig.DEFAULT_DB_PATH)); } @Test @@ -33,6 +35,12 @@ void testGetter() throws NoSuchFieldException, IllegalAccessException { serviceMapProcessorConfig, "windowDuration", windowDuration); - assertThat(serviceMapProcessorConfig.getWindowDuration(), equalTo(windowDuration)); + final String testDbPath = UUID.randomUUID().toString(); + ReflectivelySetField.setField( + ServiceMapProcessorConfig.class, + serviceMapProcessorConfig, + "dbPath", + testDbPath); + assertThat(serviceMapProcessorConfig.getDbPath(), equalTo(testDbPath)); } } \ No newline at end of file diff --git a/data-prepper-plugins/split-event-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/splitevent/SplitEventProcessorConfig.java b/data-prepper-plugins/split-event-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/splitevent/SplitEventProcessorConfig.java index 140e280710..2c9ce2d030 100644 --- a/data-prepper-plugins/split-event-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/splitevent/SplitEventProcessorConfig.java +++ b/data-prepper-plugins/split-event-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/splitevent/SplitEventProcessorConfig.java @@ -19,23 +19,23 @@ import jakarta.validation.constraints.Size; @JsonPropertyOrder -@JsonClassDescription("The `split_event` processor is used to split events based on a delimiter and " + +@JsonClassDescription("The split_event processor is used to split events based on a delimiter and " + "generates multiple events from a user-specified field.") public class SplitEventProcessorConfig { @NotEmpty @NotNull @JsonProperty("field") - @JsonPropertyDescription("The event field to be split") + @JsonPropertyDescription("The event field to be split.") private String field; - @JsonProperty("delimiter_regex") - @JsonPropertyDescription("The regular expression used as the delimiter for splitting the field") - private String delimiterRegex; - @Size(min = 1, max = 1) - @JsonPropertyDescription("The delimiter used for splitting the field. If not specified, the default delimiter is used") + @JsonPropertyDescription("The delimiter character used for splitting the field. You must provide either the delimiter or the delimiter_regex.") private String delimiter; + @JsonProperty("delimiter_regex") + @JsonPropertyDescription("The regular expression used as the delimiter for splitting the field. You must provide either the delimiter or the delimiter_regex.") + private String delimiterRegex; + public String getField() { return field; } diff --git a/data-prepper-plugins/trace-peer-forwarder-processor/README.md b/data-prepper-plugins/trace-peer-forwarder-processor/README.md index 94d0479507..b7dce43c35 100644 --- a/data-prepper-plugins/trace-peer-forwarder-processor/README.md +++ b/data-prepper-plugins/trace-peer-forwarder-processor/README.md @@ -1,7 +1,7 @@ # Trace Peer Forwarder Processor This processor is used to reduce the number of Events that will be forwarded in a Trace Analytics pipeline by half when using [Peer Forwarder](https://github.com/opensearch-project/data-prepper/blob/main/docs/peer_forwarder.md). -It groups the events based on `trace_id` similar to `service_map_stateful` and `otel_trace_raw ` processors. +It groups the events based on `trace_id` similar to `service_map` and `otel_traces ` processors. In [Trace Analytics pipeline](https://github.com/opensearch-project/data-prepper/blob/main/docs/trace_analytics.md#trace-analytics-pipeline) each event is duplicated, when it is sent from `otel-trace-pipeline` to `raw-pipeline` and `service-map-pipeline`. So, the event will be forwarded once in each pipeline. Using this processor event will be forwarded only once in `otel-trace-pipeline` to correct peer. @@ -26,7 +26,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: service-map-pipeline: @@ -35,7 +35,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: ``` diff --git a/data-prepper-plugins/trace-peer-forwarder-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/TracePeerForwarderProcessorConfig.java b/data-prepper-plugins/trace-peer-forwarder-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/TracePeerForwarderProcessorConfig.java index 2c53383606..b4b75ccae1 100644 --- a/data-prepper-plugins/trace-peer-forwarder-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/TracePeerForwarderProcessorConfig.java +++ b/data-prepper-plugins/trace-peer-forwarder-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/TracePeerForwarderProcessorConfig.java @@ -3,6 +3,6 @@ import com.fasterxml.jackson.annotation.JsonClassDescription; @JsonClassDescription("The trace_peer_forwarder processor is used with peer forwarder to reduce by half " + - "the number of events forwarded in a Trace Analytics pipeline. ") + "the number of events forwarded in a Trace Analytics pipeline.") public class TracePeerForwarderProcessorConfig { } diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/FileParameterConfig.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/FileParameterConfig.java index 83dc30952e..06b488745e 100644 --- a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/FileParameterConfig.java +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/FileParameterConfig.java @@ -19,7 +19,7 @@ public class FileParameterConfig { private String fileName; @JsonProperty("aws") - @JsonPropertyDescription("The AWS configuration when the file is an S3 object. ") + @JsonPropertyDescription("The AWS configuration when the file is an S3 object.") @Valid private S3ObjectConfig awsConfig; diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TargetsParameterConfig.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TargetsParameterConfig.java index 18e796b593..2c6d30271f 100644 --- a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TargetsParameterConfig.java +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TargetsParameterConfig.java @@ -30,15 +30,15 @@ public class TargetsParameterConfig { @JsonProperty("map") @JsonPropertyDescription("A list of key-value pairs that define the translations. Each key represents a possible " + "value in the source field, and the corresponding value represents what it should be translated to. " + - "For examples, see [map option](#map-option). At least one of `map` and `regex` should be configured.") + "At least one of map and regex should be configured.") private Map map; @JsonProperty("translate_when") - @JsonPropertyDescription("Uses a [Data Prepper expression]({{site.url}}{{site.baseurl}}/data-prepper/pipelines/expression-syntax/) " + + @JsonPropertyDescription("Uses a conditional expression " + "to specify a condition for performing the translation. When specified, the expression will only translate when the condition is met.") private String translateWhen; @JsonProperty("regex") - @JsonPropertyDescription("A map of keys that defines the translation map. For more options, see [regex option](#regex-option). " + - "At least one of `map` and `regex` should be configured.") + @JsonPropertyDescription("A map of keys that defines the translation map. " + + "At least one of map and regex should be configured.") private RegexParameterConfiguration regexParameterConfig; @JsonProperty("default") @JsonPropertyDescription("The default value to use when no match is found during translation.") diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java index b4a9df2f85..9a519f76f9 100644 --- a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java @@ -19,16 +19,16 @@ import java.util.Objects; @JsonPropertyOrder -@JsonClassDescription("The `translate` processor transforms values in events into preconfigured values.") +@JsonClassDescription("The translate processor transforms values in events into preconfigured values.") public class TranslateProcessorConfig { @JsonProperty("file") - @JsonPropertyDescription("Points to the file that contains mapping configurations. For more information, see [file](#file).") + @JsonPropertyDescription("Points to the file that contains mapping configurations.") @Valid private FileParameterConfig fileParameterConfig; @JsonProperty("mappings") - @JsonPropertyDescription("Defines inline mappings. For more information, see [mappings](#mappings).") + @JsonPropertyDescription("Defines inline mappings.") @Valid private List mappingsParameterConfigs = new ArrayList<>(); diff --git a/data-prepper-plugins/truncate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/truncate/TruncateProcessorConfig.java b/data-prepper-plugins/truncate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/truncate/TruncateProcessorConfig.java index ce713d061e..bb4631dc3d 100644 --- a/data-prepper-plugins/truncate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/truncate/TruncateProcessorConfig.java +++ b/data-prepper-plugins/truncate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/truncate/TruncateProcessorConfig.java @@ -17,7 +17,7 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `truncate` processor truncates a key’s value at the beginning, the end, " + +@JsonClassDescription("The truncate processor truncates a key's value at the beginning, the end, " + "or on both sides of the value string, based on the processor’s configuration.") public class TruncateProcessorConfig { public static class Entry { @@ -27,8 +27,8 @@ public static class Entry { private List sourceKeys; @JsonProperty("start_at") - @JsonPropertyDescription("Where in the string value to start truncation. " + - "Default is `0`, which specifies to start truncation at the beginning of each key's value.") + @JsonPropertyDescription("The index into the string value to start truncation. " + + "Default is 0, which specifies to start truncation at the beginning of each key's value.") private Integer startAt; @JsonProperty("length") @@ -37,11 +37,12 @@ public static class Entry { private Integer length; @JsonProperty("recursive") - @JsonPropertyDescription("Recursively truncates the fields. If the value of a field is a map (json object), then it recursively applies truncate operation on the fields in the map.") + @JsonPropertyDescription("Recursively truncates the fields. If the value of a field is a map, then it recursively applies truncate operation on the fields in the map.") private Boolean recurse = false; @JsonProperty("truncate_when") - @JsonPropertyDescription("A condition that, when met, determines when the truncate operation is performed.") + @JsonPropertyDescription("A conditional expression such as '/test != false'. " + + "If specified, the truncate processor will only run on events when the expression evaluates to true. ") private String truncateWhen; public Entry(final List sourceKeys, final Integer startAt, final Integer length, final String truncateWhen, final Boolean recurse) { diff --git a/data-prepper-plugins/user-agent-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/useragent/UserAgentProcessorConfig.java b/data-prepper-plugins/user-agent-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/useragent/UserAgentProcessorConfig.java index df343d9b0a..b985d585e6 100644 --- a/data-prepper-plugins/user-agent-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/useragent/UserAgentProcessorConfig.java +++ b/data-prepper-plugins/user-agent-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/useragent/UserAgentProcessorConfig.java @@ -18,8 +18,8 @@ import java.util.List; @JsonPropertyOrder -@JsonClassDescription("The `user_agent` processor parses any user agent (UA) string in an event and then adds the " + - "parsing results to the event’s write data.") +@JsonClassDescription("The user_agent processor parses any user agent (UA) string in an event and then adds the " + + "parsed results to the event.") public class UserAgentProcessorConfig { private static final int DEFAULT_CACHE_SIZE = 1000; @@ -33,20 +33,20 @@ public class UserAgentProcessorConfig { @NotNull @JsonProperty("target") - @JsonPropertyDescription("The field to which the parsed event will write. Default is `user_agent`.") + @JsonPropertyDescription("The field to which the parsed event will write. Default is user_agent.") private String target = "user_agent"; @NotNull @JsonProperty("exclude_original") - @JsonPropertyDescription("Determines whether to exclude the original UA string from the parsing result. Defaults to `false`. ") + @JsonPropertyDescription("Determines whether to exclude the original UA string from the parsing result. Defaults to false.") private boolean excludeOriginal = false; @JsonProperty("cache_size") - @JsonPropertyDescription("The cache size of the parser in megabytes. Defaults to `1000`.") + @JsonPropertyDescription("The cache size of the parser in megabytes. Defaults to 1000.") private int cacheSize = DEFAULT_CACHE_SIZE; @JsonProperty("tags_on_parse_failure") - @JsonPropertyDescription("The tag to add to an event if the `user_agent` processor fails to parse the UA string.") + @JsonPropertyDescription("The tag to add to an event if the user_agent processor fails to parse the UA string.") private List tagsOnParseFailure; public EventKey getSource() { diff --git a/data-prepper-plugins/write-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/write_json/WriteJsonProcessorConfig.java b/data-prepper-plugins/write-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/write_json/WriteJsonProcessorConfig.java index f93e53bc24..157a9ac074 100644 --- a/data-prepper-plugins/write-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/write_json/WriteJsonProcessorConfig.java +++ b/data-prepper-plugins/write-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/write_json/WriteJsonProcessorConfig.java @@ -12,7 +12,7 @@ import jakarta.validation.constraints.NotNull; @JsonPropertyOrder -@JsonClassDescription("The `write_json` processor converts an object in an event into a JSON string.") +@JsonClassDescription("The write_json processor converts an object in an event into a JSON string.") public class WriteJsonProcessorConfig { @JsonProperty("source") @JsonPropertyDescription("Specifies the name of the field in the event containing the message or object to be parsed.") diff --git a/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/core/event/TestEventConfigurationContainer.java b/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/core/event/TestEventConfigurationContainer.java new file mode 100644 index 0000000000..cb28168146 --- /dev/null +++ b/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/core/event/TestEventConfigurationContainer.java @@ -0,0 +1,14 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.core.event; + +public class TestEventConfigurationContainer { + public static EventConfiguration testEventConfiguration() { + final EventConfiguration eventConfiguration = new EventConfiguration(); + eventConfiguration.setMaximumCachedKeys(0); + return eventConfiguration; + } +} diff --git a/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/event/TestEventContext.java b/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/event/TestEventContext.java index 6c5b001129..12ffeadadb 100644 --- a/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/event/TestEventContext.java +++ b/data-prepper-test-event/src/main/java/org/opensearch/dataprepper/event/TestEventContext.java @@ -5,7 +5,9 @@ package org.opensearch.dataprepper.event; +import org.opensearch.dataprepper.core.event.EventConfigurationContainer; import org.opensearch.dataprepper.core.event.EventFactoryApplicationContextMarker; +import org.opensearch.dataprepper.core.event.TestEventConfigurationContainer; import org.springframework.context.annotation.AnnotationConfigApplicationContext; class TestEventContext { @@ -16,6 +18,7 @@ private TestEventContext() {} static T getFromContext(final Class targetClass) { if(APPLICATION_CONTEXT == null) { APPLICATION_CONTEXT = new AnnotationConfigApplicationContext(); + APPLICATION_CONTEXT.registerBean(EventConfigurationContainer.class, () -> TestEventConfigurationContainer::testEventConfiguration); APPLICATION_CONTEXT.scan(EventFactoryApplicationContextMarker.class.getPackageName()); APPLICATION_CONTEXT.refresh(); } diff --git a/docs/configuration.md b/docs/configuration.md index 9a3a58bdde..2a3035bc7f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -52,7 +52,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - stdout: service-map-pipeline: @@ -62,7 +62,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - stdout: ``` diff --git a/docs/core_apis.md b/docs/core_apis.md index d4acc03caa..10ee66c905 100644 --- a/docs/core_apis.md +++ b/docs/core_apis.md @@ -95,7 +95,7 @@ authentication: ### Peer Forwarder Peer forwarder can be configured to enable stateful aggregation across multiple Data Prepper nodes. For more information on configuring Peer Forwarder, see [Peer Forwarder Configuration](https://github.com/opensearch-project/data-prepper/blob/main/docs/peer_forwarder.md). -It is supported by `service_map_stateful`, `otel_trace_raw` and `aggregate` processors. +It is supported by `service_map`, `otel_traces` and `aggregate` processors. ### Shutdown Timeouts When the DataPrepper `shutdown` API is invoked, the sink and processor `ExecutorService`'s are given time to gracefully shutdown and clear any in-flight data. The default graceful shutdown timeout for these `ExecutorService`'s is 10 seconds. This can be configured with the following optional parameters: diff --git a/docs/peer_forwarder.md b/docs/peer_forwarder.md index 6fa5fd2ca8..e1d4ee33ac 100644 --- a/docs/peer_forwarder.md +++ b/docs/peer_forwarder.md @@ -1,9 +1,9 @@ ## Peer Forwarder -An HTTP service which performs peer forwarding of `Event` between Data Prepper nodes for aggregation. Currently, supported by `aggregate`, `service_map_stateful`, `otel_trace_raw` processors. +An HTTP service which performs peer forwarding of `Event` between Data Prepper nodes for aggregation. Currently, supported by `aggregate`, `service_map`, `otel_traces` processors. Peer Forwarder groups events based on the identification keys provided the processors. -For `service_map_stateful` and `otel_trace_raw` it's `traceId` by default and can not be configured. +For `service_map` and `otel_traces` it's `traceId` by default and can not be configured. It's configurable for `aggregate` processor using `identification_keys` configuration option. You can find more information about identification keys [here](https://github.com/opensearch-project/data-prepper/tree/main/data-prepper-plugins/aggregate-processor#identification_keys). --- diff --git a/docs/trace_analytics.md b/docs/trace_analytics.md index a9a667a0be..a7cfa5df16 100644 --- a/docs/trace_analytics.md +++ b/docs/trace_analytics.md @@ -33,9 +33,9 @@ The [OpenTelemetry source](../data-prepper-plugins/otel-trace-source/README.md) ### Processor We have two processor for the Trace Analytics feature, -* *otel_trace_raw* - This is a processor that receives collection of [Span](../data-prepper-api/src/main/java/org/opensearch/dataprepper/model/trace/Span.java) records sent from [otel-trace-source](../data-prepper-plugins/otel-trace-source/README.md), does stateful processing on extracting and filling-in trace group related fields. +* *otel_traces* - This is a processor that receives collection of [Span](../data-prepper-api/src/main/java/org/opensearch/dataprepper/model/trace/Span.java) records sent from [otel-trace-source](../data-prepper-plugins/otel-trace-source/README.md), does stateful processing on extracting and filling-in trace group related fields. * *otel_trace_group* - This is a processor that fills in the missing trace group related fields in the collection of [Span](../data-prepper-api/src/main/java/org/opensearch/dataprepper/model/trace/Span.java) records by looking up the opensearch backend. -* *service_map_stateful* - This processor performs the required preprocessing on the trace data and build metadata to display the service-map OpenSearch Dashboards dashboards. +* *service_map* - This processor performs the required preprocessing on the trace data and build metadata to display the service-map OpenSearch Dashboards dashboards. ### OpenSearch sink @@ -118,7 +118,7 @@ raw-pipeline: # The raw processor does bulk request to your OpenSearch sink, so configure the batch_size higher. batch_size: 3200 processor: - - otel_trace_raw: + - otel_traces: - otel_trace_group: hosts: [ "https://localhost:9200" ] # Change to your credentials @@ -152,7 +152,7 @@ service-map-pipeline: pipeline: name: "otel-trace-pipeline" processor: - - service_map_stateful: + - service_map: # The window duration is the maximum length of time the data prepper stores the most recent trace data to evaluvate service-map relationships. # The default is 3 minutes, this means we can detect relationships between services from spans reported in last 3 minutes. # Set higher value if your applications have higher latency. @@ -250,7 +250,7 @@ pipeline authors the ability to configure other processors to modify spans or tr To provide a migration path, Data Prepper 1.4 introduced the following changes. * The `otel_trace_source` has an optional parameter `record_type` which can be set to `event`. When configured, it will output event objects. -* The `otel_trace_raw` replaces `otel_trace_raw_prepper` for event-based spans. +* The `otel_traces` replaces `otel_trace_raw_prepper` for event-based spans. * The `otel_trace_group` replaces `otel_trace_group_prepper` for event-based spans. In Data Prepper 2.0, the `otel_trace_source` will only output Events. Data Prepper 2.0 also removes diff --git a/examples/config/example-pipelines.yaml b/examples/config/example-pipelines.yaml index 40ea9df0c9..f965e87415 100644 --- a/examples/config/example-pipelines.yaml +++ b/examples/config/example-pipelines.yaml @@ -13,7 +13,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://..es.amazonaws.com" ] @@ -26,7 +26,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: [ "https://..es.amazonaws.com" ] diff --git a/examples/dev/data-prepper-emf-monitoring/data-prepper-emf-demo-cfn.yaml b/examples/dev/data-prepper-emf-monitoring/data-prepper-emf-demo-cfn.yaml index dc1a82c471..78255de1d7 100644 --- a/examples/dev/data-prepper-emf-monitoring/data-prepper-emf-demo-cfn.yaml +++ b/examples/dev/data-prepper-emf-monitoring/data-prepper-emf-demo-cfn.yaml @@ -52,7 +52,7 @@ Resources: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - stdout: - Name: DATA_PREPPER_CONFIG_YAML diff --git a/examples/dev/data-prepper-emf-monitoring/pipelines-raw-trace-stdout.yaml b/examples/dev/data-prepper-emf-monitoring/pipelines-raw-trace-stdout.yaml index e71252a71c..948ddf532d 100644 --- a/examples/dev/data-prepper-emf-monitoring/pipelines-raw-trace-stdout.yaml +++ b/examples/dev/data-prepper-emf-monitoring/pipelines-raw-trace-stdout.yaml @@ -11,7 +11,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: # trace_flush_interval: 6 sink: - stdout: diff --git a/examples/dev/k8s/data-prepper.yaml b/examples/dev/k8s/data-prepper.yaml index 3b90587aef..323ae29592 100644 --- a/examples/dev/k8s/data-prepper.yaml +++ b/examples/dev/k8s/data-prepper.yaml @@ -26,7 +26,7 @@ data: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://opensearch:9200" ] @@ -40,7 +40,7 @@ data: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://opensearch:9200"] diff --git a/examples/dev/trace-analytics-sample-app/resources/pipelines.yaml b/examples/dev/trace-analytics-sample-app/resources/pipelines.yaml index 535419167f..3c61358af4 100644 --- a/examples/dev/trace-analytics-sample-app/resources/pipelines.yaml +++ b/examples/dev/trace-analytics-sample-app/resources/pipelines.yaml @@ -16,7 +16,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://node-0.example.com:9200" ] @@ -30,7 +30,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://node-0.example.com:9200"] diff --git a/examples/jaeger-hotrod/pipelines.yaml b/examples/jaeger-hotrod/pipelines.yaml index ef9ec67dba..8d628a6406 100644 --- a/examples/jaeger-hotrod/pipelines.yaml +++ b/examples/jaeger-hotrod/pipelines.yaml @@ -13,7 +13,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://node-0.example.com:9200" ] @@ -27,7 +27,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://node-0.example.com:9200"] diff --git a/examples/metrics-ingestion-otel/README.md b/examples/metrics-ingestion-otel/README.md new file mode 100644 index 0000000000..87a99ff314 --- /dev/null +++ b/examples/metrics-ingestion-otel/README.md @@ -0,0 +1,26 @@ +# DataPrepper Metrics Ingestion from OpenTelemetry Collector + +This is an example of using the OpenTelemetry Collector to send metrics data to Data Prepper and then to OpenSearch. +The Data Prepper OTLP/gRPC endpoint is exposed at port 21891. +The same protocol can be used with the OpenTelemetry Collector, which listens at the OTLP default port 4317. +This setup allows to compare both endpoints. +The Collector will forward any data to Data Prepper for indexing in OpenSearch. + +To generate some demo data, the OpenTelemetry Collector uses its host metrics receiver to acquire cpu and memory metrics on the machine it is running on. +Additionally, it scrapes the Prometheus metrics endpoint of the Data Prepper instance. +This also let's you investigate the Data Prepper metrics in OpenSearch. + +To run: + +1. Run `docker compose up` +2. Wait for everything to come up. +3. Log into OpenSearch Dashboards at using username `admin` and password `Developer@123`. +4. Create an Index Pattern for index `otel_metrics` choosing `time` as the time field. +5. Inspect the data in the Discovery plugin. + +Useful changes and additions: + +1. The OpenTelemetry Collector has its [Logging Exporter](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/loggingexporter/README.md) in use. Changing the `loglevel` to `debug` or setting the `verbosity` to `detailed` will log all data to stdout. This is useful for troubleshooting. +2. The OpenTelemetry Collector can push its own metrics to Data Prepper. Follow its documentation in [Internal telemetry](https://opentelemetry.io/docs/collector/internal-telemetry/#use-internal-telemetry-to-monitor-the-collector) for details. These metrics allow comparing the event counts between the Collector and Data Prepper. +3. The OpenTelemetry Collector can be configured to translate between OTLP/HTTP and OTLP/gRPC. It can be used to proxy between sources only capable of OTLP/HTTP and Data Prepper, which only supports OTLP/gRPC. +4. The OpenTelemetry Collector can receive data from the Docker host. It can attach metadata describing the containers. Unfortunately, the required processor does not work with MacOS, so this config was not provided in this example. diff --git a/examples/metrics-ingestion-otel/docker-compose.yaml b/examples/metrics-ingestion-otel/docker-compose.yaml new file mode 100644 index 0000000000..ac639fe615 --- /dev/null +++ b/examples/metrics-ingestion-otel/docker-compose.yaml @@ -0,0 +1,69 @@ +version: '3' +services: + data-prepper: + image: opensearchproject/data-prepper + container_name: data-prepper + volumes: + - ./metric_pipeline.yaml:/usr/share/data-prepper/pipelines/metric_pipeline.yaml + - ../data-prepper-config.yaml:/usr/share/data-prepper/config/data-prepper-config.yaml + ports: + - 2021:2021 + - 21891:21891 + - 4900:4900 + expose: + - "2021" + - "4900" + - "21891" + networks: + - opensearch-net + depends_on: + - opensearch + opensearch: + container_name: opensearch + image: docker.io/opensearchproject/opensearch:latest + environment: + - discovery.type=single-node + - bootstrap.memory_lock=true # along with the memlock settings below, disables swapping + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" # minimum and maximum Java heap size, recommend setting both to 50% of system RAM + - "OPENSEARCH_INITIAL_ADMIN_PASSWORD=Developer@123" + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 # maximum number of open files for the OpenSearch user, set to at least 65536 on modern systems + hard: 65536 + ports: + - 9200:9200 + - 9600:9600 # required for Performance Analyzer + networks: + - opensearch-net + dashboards: + image: docker.io/opensearchproject/opensearch-dashboards:latest + container_name: opensearch-dashboards + ports: + - 5601:5601 + expose: + - "5601" + environment: + OPENSEARCH_HOSTS: '["https://opensearch:9200"]' + depends_on: + - opensearch + networks: + - opensearch-net + otel-collector: + image: otel/opentelemetry-collector-contrib + container_name: otel-collector + command: ["--config=/etc/otel-collector-config.yml"] + volumes: + - ./otel-collector-config.yml:/etc/otel-collector-config.yml + environment: + OTEL_RESOURCE_ATTRIBUTES: service.name=otel-collector + ports: + - 4317:4317 + depends_on: + - data-prepper + networks: + - opensearch-net +networks: + opensearch-net: diff --git a/examples/metrics-ingestion-otel/metric_pipeline.yaml b/examples/metrics-ingestion-otel/metric_pipeline.yaml new file mode 100644 index 0000000000..613227522f --- /dev/null +++ b/examples/metrics-ingestion-otel/metric_pipeline.yaml @@ -0,0 +1,13 @@ +metric-pipeline: + source: + otel_metrics_source: + ssl: false + processor: + - otel_metrics: + sink: + - opensearch: + hosts: [ "https://opensearch:9200" ] + insecure: true + username: admin + password: Developer@123 + index: otel_metrics diff --git a/examples/metrics-ingestion-otel/otel-collector-config.yml b/examples/metrics-ingestion-otel/otel-collector-config.yml new file mode 100644 index 0000000000..5b6cb2ee2b --- /dev/null +++ b/examples/metrics-ingestion-otel/otel-collector-config.yml @@ -0,0 +1,37 @@ +receivers: + hostmetrics: + collection_interval: 60s + scrapers: + cpu: + memory: + prometheus: + config: + scrape_configs: + - job_name: data-prepper + metrics_path: /metrics/sys + scrape_interval: 60s + static_configs: + - targets: ['data-prepper:4900'] + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 +exporters: + otlp/metrics: + endpoint: data-prepper:21891 + tls: + insecure: true + otlphttp/metrics: + metrics_endpoint: http://data-prepper:21891/opentelemetry.proto.collector.metrics.v1.MetricsService/Export + logging: +processors: + resourcedetection/env: + detectors: [env] + timeout: 2s + override: false +service: + pipelines: + metrics: + receivers: [otlp,hostmetrics,prometheus] + processors: [resourcedetection/env] + exporters: [logging, otlp/metrics] diff --git a/examples/trace_analytics.yml b/examples/trace_analytics.yml index b3f9be8b31..f5b9110409 100644 --- a/examples/trace_analytics.yml +++ b/examples/trace_analytics.yml @@ -15,7 +15,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://node-0.example.com:9200" ] @@ -29,7 +29,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://node-0.example.com:9200"] diff --git a/examples/trace_analytics_no_ssl.yml b/examples/trace_analytics_no_ssl.yml index c82928b0d8..b7e11343f8 100644 --- a/examples/trace_analytics_no_ssl.yml +++ b/examples/trace_analytics_no_ssl.yml @@ -14,7 +14,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://node-0.example.com:9200" ] @@ -28,7 +28,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://node-0.example.com:9200"] diff --git a/examples/trace_analytics_no_ssl_2x.yml b/examples/trace_analytics_no_ssl_2x.yml index c6b813cffd..a1ce76575a 100644 --- a/examples/trace_analytics_no_ssl_2x.yml +++ b/examples/trace_analytics_no_ssl_2x.yml @@ -13,7 +13,7 @@ raw-pipeline: pipeline: name: "entry-pipeline" processor: - - otel_trace_raw: + - otel_traces: sink: - opensearch: hosts: [ "https://node-0.example.com:9200" ] @@ -27,7 +27,7 @@ service-map-pipeline: pipeline: name: "entry-pipeline" processor: - - service_map_stateful: + - service_map: sink: - opensearch: hosts: ["https://node-0.example.com:9200"] diff --git a/settings.gradle b/settings.gradle index 4328fa9aac..ec5a86f255 100644 --- a/settings.gradle +++ b/settings.gradle @@ -54,7 +54,7 @@ dependencyResolutionManagement { library('bouncycastle-bcpkix', 'org.bouncycastle', 'bcpkix-jdk18on').versionRef('bouncycastle') version('guava', '32.1.2-jre') library('guava-core', 'com.google.guava', 'guava').versionRef('guava') - version('reflections', '0.9.12') + version('reflections', '0.10.2') library('reflections-core', 'org.reflections', 'reflections').versionRef('reflections') library('commons-lang3', 'org.apache.commons', 'commons-lang3').version('3.14.0') library('commons-io', 'commons-io', 'commons-io').version('2.15.1')