From ba9273f8fae8b9c29811d96cdf6dd0dcdb8bdc0b Mon Sep 17 00:00:00 2001 From: dan-s1 Date: Fri, 6 Dec 2024 17:56:02 -0500 Subject: [PATCH] NIFI-14051 Removed unnecessary generic type declarations (#9562) Signed-off-by: David Handermann --- .../language/compile/ExpressionCompiler.java | 2 +- .../org/apache/nifi/hl7/query/HL7Query.java | 2 +- .../nifi/metrics/jvm/JmxJvmMetrics.java | 2 +- .../apache/nifi/util/NiFiPropertiesTest.java | 94 +++-- .../serialization/record/RecordFieldType.java | 2 +- .../record/TestDataTypeUtils.java | 74 ++-- .../stream/io/LeakyBucketStreamThrottler.java | 4 +- .../util/search/ahocorasick/SearchState.java | 2 +- .../stream/io/TestLeakyBucketThrottler.java | 2 +- .../nifi/util/timebuffer/TestRingBuffer.java | 8 +- .../nifi/wali/TestLengthDelimitedJournal.java | 18 +- .../nifi/amqp/processors/TestChannel.java | 2 +- .../utils/GenericAsanaObjectFetcher.java | 2 +- .../StandardRecordModelIteratorProvider.java | 2 +- ...TestJASN1RecordReaderWithComplexTypes.java | 113 +++--- .../TestJASN1RecordReaderWithSimpleTypes.java | 46 +-- .../processors/avro/ExtractAvroMetadata.java | 2 +- .../nifi/processors/avro/SplitAvro.java | 6 +- .../nifi/processors/avro/TestSplitAvro.java | 10 +- ...SCredentialsProviderControllerService.java | 2 +- .../apache/nifi/processors/aws/s3/ListS3.java | 12 +- .../StandardS3EncryptionService.java | 16 +- .../document/PutAzureCosmosDBRecordTest.java | 8 +- ...reLogAnalyticsProvenanceReportingTask.java | 4 +- .../elasticsearch/ElasticsearchException.java | 9 +- .../ElasticSearchLookupService.java | 43 +-- .../AbstractJsonQueryElasticsearch.java | 17 +- .../elasticsearch/ConsumeElasticsearch.java | 2 +- .../ElasticsearchRestProcessor.java | 6 +- .../AbstractJsonQueryElasticsearchTest.java | 3 +- .../util/bin/BinProcessingResult.java | 2 +- .../AbstractListenEventBatchingProcessor.java | 2 +- .../util/file/transfer/GetFileTransfer.java | 4 +- .../processors/hadoop/CompressionType.java | 2 +- .../util/list/AbstractListProcessor.java | 8 +- .../apache/nifi/avro/TestAvroTypeUtil.java | 21 +- .../bigquery/AbstractBigQueryProcessor.java | 2 +- .../factory/CredentialsFactory.java | 4 +- .../AbstractBooleanCredentialsStrategy.java | 2 +- .../gcp/drive/FetchGoogleDriveIT.java | 8 +- .../graph/ExecuteGraphQueryRecordTest.java | 8 +- .../graph/MockCypherClientService.java | 10 +- .../graph/TestExecuteGraphQuery.java | 5 +- .../graph/util/InMemoryGraphClient.java | 2 +- .../hadoop/TestGetHDFSFileInfo.java | 2 +- .../processors/hl7/ExtractHL7Attributes.java | 2 +- .../jms/cf/JMSConnectionFactoryHandler.java | 2 +- .../nifi/jms/processors/ConsumeJMSIT.java | 2 +- .../processors/JMSPublisherConsumerIT.java | 6 +- .../nifi/jms/processors/PublishJMSIT.java | 5 +- .../jolt/TestJoltTransformRecord.java | 52 +-- .../transformjson/TransformJSONResource.java | 2 +- .../processors/mongodb/GetMongoRecord.java | 5 +- .../nifi/processors/mongodb/GetMongoIT.java | 8 +- .../processors/mongodb/PutMongoRecordIT.java | 247 ++++--------- .../mongodb/gridfs/FetchGridFSIT.java | 4 +- .../mongodb/gridfs/PutGridFSIT.java | 10 +- .../nifi/mongodb/StubSchemaRegistry.java | 5 +- .../parquet/CalculateParquetOffsets.java | 10 +- .../CalculateParquetRowGroupOffsets.java | 13 +- .../apache/nifi/parquet/ParquetTestUtils.java | 9 +- .../nifi/parquet/TestParquetReader.java | 38 +- .../parquet/CalculateParquetOffsetsTest.java | 83 +++-- .../CalculateParquetRowGroupOffsetsTest.java | 34 +- .../processors/script/TestExecuteClojure.java | 7 +- .../processors/script/TestExecuteGroovy.java | 7 +- .../SiteToSiteStatusReportingTask.java | 2 +- .../TestSiteToSiteStatusReportingTask.java | 39 +-- .../processors/slack/TestConsumeSlack.java | 2 +- .../nifi/processors/smb/GetSmbFile.java | 4 +- .../nifi/processors/smb/PutSmbFile.java | 41 +-- .../apache/nifi/snmp/utils/UsmJsonParser.java | 2 +- .../TestDatabaseParameterProvider.java | 80 ++--- .../processors/standard/GenerateFlowFile.java | 2 +- .../processors/standard/GetFileResource.java | 2 +- .../nifi/processors/standard/ListFile.java | 6 +- .../nifi/processors/standard/ListenTCP.java | 2 +- .../nifi/processors/standard/ValidateCsv.java | 16 +- .../standard/sql/DefaultAvroSqlWriter.java | 2 +- .../standard/TestAttributesToCSV.java | 260 +++++++------- .../processors/standard/TestDebugFlow.java | 2 +- .../standard/TestDuplicateFlowFile.java | 6 +- .../processors/standard/TestExecuteSQL.java | 104 +++--- .../processors/standard/TestForkRecord.java | 2 +- .../standard/TestGenerateTableFetch.java | 209 +++++------ .../processors/standard/TestMergeContent.java | 324 ++++++++---------- .../processors/standard/TestModifyBytes.java | 53 ++- .../nifi/processors/standard/TestPutFile.java | 2 +- .../processors/standard/TestSplitJson.java | 8 +- .../processors/standard/TestSplitXml.java | 26 +- .../standard/TestUpdateCounter.java | 2 +- .../cache/server/set/PersistentSetCache.java | 2 +- .../TestDistributedMapCacheLookupService.java | 2 +- .../EmbeddedAvroSchemaAccessStrategy.java | 2 +- .../org/apache/nifi/grok/TestGrokReader.java | 47 +-- ...tFreeFormTextRecordSetWriterProcessor.java | 10 +- .../nifi/xml/TestWriteXMLResultUtils.java | 100 +++--- .../nifi/xml/TestXMLReaderProcessor.java | 3 +- .../apache/nifi/xml/TestXMLRecordReader.java | 101 +++--- .../xml/TestXMLRecordSetWriterProcessor.java | 5 +- .../TestAbstractPolicyBasedAuthorizer.java | 10 +- .../nifi/provenance/IndexConfiguration.java | 2 +- ...tandardAuthorizerConfigurationContext.java | 2 +- .../endpoints/CountersEndpointMerger.java | 2 +- .../SystemDiagnosticsEndpointMerger.java | 2 +- .../PropertyDescriptorDtoMergerTest.java | 33 +- .../controller/StandardProcessorNode.java | 2 +- .../repository/StandardProcessSession.java | 4 +- .../events/VolatileBulletinRepository.java | 2 +- .../org/apache/nifi/util/ReflectionUtils.java | 2 +- .../client/async/nio/TestPeerChannel.java | 4 +- .../repository/TestFileSystemRepository.java | 14 +- .../TestFlowConfigurationArchiveManager.java | 2 +- .../status/history/NodeStatusDescriptor.java | 4 +- .../nifi/audit/RemoteProcessGroupAuditor.java | 4 +- .../apache/nifi/web/api/dto/DtoFactory.java | 4 +- .../web/util/LocalComponentLifecycle.java | 2 +- .../apache/nifi/web/api/TestFlowResource.java | 3 +- .../revision/TestNaiveRevisionManager.java | 6 +- .../x509/X509AuthenticationProviderTest.java | 2 +- .../StandardComponentManifestBuilder.java | 10 +- .../nifi/util/MockBulletinRepository.java | 2 +- .../apache/nifi/util/MockPropertyValue.java | 2 +- .../util/StandardProcessorTestRunner.java | 8 +- ...tandardAuthorizerConfigurationContext.java | 2 +- .../ldap/tenants/LdapUserGroupProvider.java | 4 +- .../jackson/JacksonExtensionSerializer.java | 3 +- .../jackson/JacksonFlowContentSerializer.java | 3 +- .../jackson/JacksonSerializer.java | 3 +- ...acksonVersionedProcessGroupSerializer.java | 3 +- .../command/nifi/pg/PGChangeAllVersions.java | 4 +- .../command/nifi/pg/PGListProcessors.java | 5 +- .../processors/ChangeVersionProcessor.java | 2 +- .../cli/impl/client/TestClientTimeout.java | 2 +- 134 files changed, 1117 insertions(+), 1660 deletions(-) diff --git a/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/compile/ExpressionCompiler.java b/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/compile/ExpressionCompiler.java index 9ed86ada8319..aec7166de2c9 100644 --- a/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/compile/ExpressionCompiler.java +++ b/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/compile/ExpressionCompiler.java @@ -815,7 +815,7 @@ private Evaluator buildFunctionEvaluator(final Tree tree, final Evaluator toStringEvaluator(argEvaluators.get(0), "first argument to contains")), "contains"); } case IN: { - List> list = new ArrayList>(); + List> list = new ArrayList<>(); for (int i = 0; i < argEvaluators.size(); i++) { list.add(toStringEvaluator(argEvaluators.get(i), i + "th argument to in")); } diff --git a/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/query/HL7Query.java b/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/query/HL7Query.java index 3734b4fe0844..874a428dd3e4 100644 --- a/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/query/HL7Query.java +++ b/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/query/HL7Query.java @@ -304,7 +304,7 @@ public QueryResult evaluate(final HL7Message message) { if (value instanceof List) { possibleValues = (List) value; } else if (value instanceof Collection) { - possibleValues = new ArrayList((Collection) value); + possibleValues = new ArrayList<>((Collection) value); } else { possibleValues = new ArrayList<>(1); possibleValues.add(value); diff --git a/nifi-commons/nifi-metrics/src/main/java/org/apache/nifi/metrics/jvm/JmxJvmMetrics.java b/nifi-commons/nifi-metrics/src/main/java/org/apache/nifi/metrics/jvm/JmxJvmMetrics.java index ed2c3269f1ea..bbc9d14cbf34 100644 --- a/nifi-commons/nifi-metrics/src/main/java/org/apache/nifi/metrics/jvm/JmxJvmMetrics.java +++ b/nifi-commons/nifi-metrics/src/main/java/org/apache/nifi/metrics/jvm/JmxJvmMetrics.java @@ -210,7 +210,7 @@ public Set deadlockedThreads() { @Override public Map threadStatePercentages() { int totalThreadCount = (Integer) getMetric(THREADS_COUNT); - final Map threadStatePercentages = new HashMap(); + final Map threadStatePercentages = new HashMap<>(); for (Thread.State state : Thread.State.values()) { threadStatePercentages.put(state, (Integer) getMetric(REGISTRY_METRICSET_THREADS + "." + state.name().toLowerCase() + ".count") / (double) totalThreadCount); } diff --git a/nifi-commons/nifi-properties/src/test/java/org/apache/nifi/util/NiFiPropertiesTest.java b/nifi-commons/nifi-properties/src/test/java/org/apache/nifi/util/NiFiPropertiesTest.java index ebfa318cbe50..ef1898c19cc4 100644 --- a/nifi-commons/nifi-properties/src/test/java/org/apache/nifi/util/NiFiPropertiesTest.java +++ b/nifi-commons/nifi-properties/src/test/java/org/apache/nifi/util/NiFiPropertiesTest.java @@ -243,17 +243,14 @@ public void testShouldVerifyExceptionThrownWhenPortValueIsZero() { @Test public void testShouldHaveReasonableMaxContentLengthValues() { // Arrange with default values: - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap<>()); // Assert defaults match expectations: assertNull(properties.getWebMaxContentSize()); // Re-arrange with specific values: final String size = "size value"; - properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.WEB_MAX_CONTENT_SIZE, size); - }}); + properties = NiFiProperties.createBasicNiFiProperties(null, Map.of(NiFiProperties.WEB_MAX_CONTENT_SIZE, size)); // Assert specific values are used: assertEquals(properties.getWebMaxContentSize(), size); @@ -261,15 +258,14 @@ public void testShouldHaveReasonableMaxContentLengthValues() { @Test public void testIsZooKeeperTlsConfigurationPresent() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, "password"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_PASSWD, "password"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, "password", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_PASSWD, "password", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS")); assertTrue(properties.isZooKeeperClientSecure()); assertTrue(properties.isZooKeeperTlsConfigurationPresent()); @@ -277,13 +273,12 @@ public void testIsZooKeeperTlsConfigurationPresent() { @Test public void testSomeZooKeeperTlsConfigurationIsMissing() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, "password"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, "password", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS")); assertTrue(properties.isZooKeeperClientSecure()); assertFalse(properties.isZooKeeperTlsConfigurationPresent()); @@ -291,15 +286,14 @@ public void testSomeZooKeeperTlsConfigurationIsMissing() { @Test public void testZooKeeperTlsPasswordsBlank() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks"); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, ""); - put(NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_PASSWD, ""); - put(NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.ZOOKEEPER_CLIENT_SECURE, "true", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_PASSWD, "", + NiFiProperties.ZOOKEEPER_SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_PASSWD, "", + NiFiProperties.ZOOKEEPER_SECURITY_TRUSTSTORE_TYPE, "JKS")); assertTrue(properties.isZooKeeperClientSecure()); assertTrue(properties.isZooKeeperTlsConfigurationPresent()); @@ -307,46 +301,42 @@ public void testZooKeeperTlsPasswordsBlank() { @Test public void testKeystorePasswordIsMissing() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks"); - put(NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - put(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD, ""); - put(NiFiProperties.SECURITY_TRUSTSTORE_TYPE, "JKS"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks", + NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks", + NiFiProperties.SECURITY_TRUSTSTORE_PASSWD, "", + NiFiProperties.SECURITY_TRUSTSTORE_TYPE, "JKS")); assertFalse(properties.isTlsConfigurationPresent()); } @Test public void testTlsConfigurationIsPresentWithEmptyPasswords() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks"); - put(NiFiProperties.SECURITY_KEYSTORE_PASSWD, ""); - put(NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - put(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD, ""); - put(NiFiProperties.SECURITY_TRUSTSTORE_TYPE, "JKS"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.SECURITY_KEYSTORE, "/a/keystore/filepath/keystore.jks", + NiFiProperties.SECURITY_KEYSTORE_PASSWD, "", + NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks", + NiFiProperties.SECURITY_TRUSTSTORE_PASSWD, "", + NiFiProperties.SECURITY_TRUSTSTORE_TYPE, "JKS")); assertTrue(properties.isTlsConfigurationPresent()); } @Test public void testTlsConfigurationIsNotPresentWithPropertiesMissing() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - put(NiFiProperties.SECURITY_KEYSTORE_PASSWD, "password"); - put(NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS"); - put(NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks"); - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, Map.of( + NiFiProperties.SECURITY_KEYSTORE_PASSWD, "password", + NiFiProperties.SECURITY_KEYSTORE_TYPE, "JKS", + NiFiProperties.SECURITY_TRUSTSTORE, "/a/truststore/filepath/truststore.jks")); assertFalse(properties.isTlsConfigurationPresent()); } @Test public void testTlsConfigurationIsNotPresentWithNoProperties() { - NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap() {{ - }}); + NiFiProperties properties = NiFiProperties.createBasicNiFiProperties(null, new HashMap<>()); assertFalse(properties.isTlsConfigurationPresent()); } diff --git a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/RecordFieldType.java b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/RecordFieldType.java index beceead60454..8c9b84b462e5 100644 --- a/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/RecordFieldType.java +++ b/nifi-commons/nifi-record/src/main/java/org/apache/nifi/serialization/record/RecordFieldType.java @@ -225,7 +225,7 @@ public enum RecordFieldType { MAP("map", null, new MapDataType(null)); - private static final Map SIMPLE_NAME_MAP = new HashMap(); + private static final Map SIMPLE_NAME_MAP = new HashMap<>(); static { for (RecordFieldType value : values()) { diff --git a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/TestDataTypeUtils.java b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/TestDataTypeUtils.java index db0177368b6c..0a383eb9c3f6 100644 --- a/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/TestDataTypeUtils.java +++ b/nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/TestDataTypeUtils.java @@ -48,12 +48,12 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.DoubleAdder; import java.util.function.Function; -import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -251,18 +251,14 @@ public void testConvertRecordMapToJavaMap() { int[] intArray = {3, 2, 1}; - Map inputMap = new HashMap() {{ - put("field1", "hello"); - put("field2", 1); - put("field3", intArray); - }}; + Map inputMap = Map.of("field1", "hello", "field2", 1, "field3", intArray); resultMap = DataTypeUtils.convertRecordMapToJavaMap(inputMap, RecordFieldType.STRING.getDataType()); assertNotNull(resultMap); assertFalse(resultMap.isEmpty()); assertEquals("hello", resultMap.get("field1")); assertEquals(1, resultMap.get("field2")); - assertTrue(resultMap.get("field3") instanceof int[]); + assertInstanceOf(int[].class, resultMap.get("field3")); assertNull(resultMap.get("field4")); } @@ -273,7 +269,7 @@ public void testUUIDStringToUUIDObject() { String uuidString = generated.toString(); Object result = DataTypeUtils.convertType(uuidString, RecordFieldType.UUID.getDataType(), "uuid_test"); - assertTrue(result instanceof UUID); + assertInstanceOf(UUID.class, result); assertEquals(generated, result); } @@ -283,7 +279,7 @@ public void testUUIDObjectToUUIDString() { String uuid = generated.toString(); Object result = DataTypeUtils.convertType(generated, RecordFieldType.STRING.getDataType(), "uuid_test"); - assertTrue(result instanceof String); + assertInstanceOf(String.class, result); assertEquals(uuid, result); } @@ -296,7 +292,7 @@ public void testUUIDToByteArray() { byte[] expected = buffer.array(); Object result = DataTypeUtils.convertType(expected, RecordFieldType.UUID.getDataType(), "uuid_test"); - assertTrue(result instanceof UUID); + assertInstanceOf(UUID.class, result); assertEquals(generated, result); } @@ -309,7 +305,7 @@ public void testByteArrayToUUID() { byte[] expected = buffer.array(); Object result = DataTypeUtils.convertType(expected, RecordFieldType.ARRAY.getDataType(), "uuid_test"); - assertTrue(result instanceof Byte[]); + assertInstanceOf(Byte[].class, result); assertEquals( 16, ((Byte[]) result).length); Byte[] bytes = (Byte[]) result; for (int x = 0; x < bytes.length; x++) { @@ -326,7 +322,7 @@ public void testConvertRecordArrayToJavaArray() { Object[] resultArray = DataTypeUtils.convertRecordArrayToJavaArray(stringArray, RecordFieldType.STRING.getDataType()); assertNotNull(resultArray); for (Object o : resultArray) { - assertTrue(o instanceof String); + assertInstanceOf(String.class, o); } } @@ -351,10 +347,10 @@ public void testConvertArrayOfRecordsToJavaArray() { Object[] recordArray = {inputRecord1, inputRecord2}; Object resultObj = DataTypeUtils.convertRecordFieldtoObject(recordArray, RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.RECORD.getRecordDataType(schema))); assertNotNull(resultObj); - assertTrue(resultObj instanceof Object[]); + assertInstanceOf(Object[].class, resultObj); Object[] resultArray = (Object[]) resultObj; for (Object o : resultArray) { - assertTrue(o instanceof Map); + assertInstanceOf(Map.class, o); } } @@ -367,17 +363,17 @@ void testConvertRecordFieldToObjectWithNestedRecord() { }}, ""); final Object obj = DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getDataType()); - assertTrue(obj instanceof Map); + assertInstanceOf(Map.class, obj); final Map map = (Map) obj; assertEquals("John", map.get("firstName")); assertEquals(30, map.get("age")); - assertTrue(map.get("addresses") instanceof Object[]); + assertInstanceOf(Object[].class, map.get("addresses")); final Object[] objArray = (Object[]) map.get("addresses"); assertEquals(2, objArray.length); assertEquals("some string", objArray[0]); - assertTrue(objArray[1] instanceof Map); + assertInstanceOf(Map.class, objArray[1]); final Map addressMap = (Map) objArray[1]; assertEquals("123 Fake Street", addressMap.get("address_1")); } @@ -433,53 +429,53 @@ public void testConvertRecordFieldToObject() { final Record inputRecord = new MapRecord(schema, values); Object o = DataTypeUtils.convertRecordFieldtoObject(inputRecord, RecordFieldType.RECORD.getRecordDataType(schema)); - assertTrue(o instanceof Map); + assertInstanceOf(Map.class, o); final Map outputMap = (Map) o; assertEquals("hello", outputMap.get("defaultOfHello")); assertEquals("world", outputMap.get("noDefault")); o = outputMap.get("intField"); assertEquals(5, o); o = outputMap.get("intArray"); - assertTrue(o instanceof Integer[]); + assertInstanceOf(Integer[].class, o); final Integer[] intArray = (Integer[]) o; assertEquals(3, intArray.length); assertEquals((Integer) 3, intArray[0]); o = outputMap.get("objArray"); - assertTrue(o instanceof Object[]); + assertInstanceOf(Object[].class, o); final Object[] objArray = (Object[]) o; assertEquals(4, objArray.length); assertEquals(3, objArray[0]); assertEquals("2", objArray[1]); o = outputMap.get("choiceArray"); - assertTrue(o instanceof Object[]); + assertInstanceOf(Object[].class, o); final Object[] choiceArray = (Object[]) o; assertEquals(2, choiceArray.length); assertEquals("foo", choiceArray[0]); - assertTrue(choiceArray[1] instanceof Object[]); + assertInstanceOf(Object[].class, choiceArray[1]); final Object[] strArray = (Object[]) choiceArray[1]; assertEquals(2, strArray.length); assertEquals("bar", strArray[0]); assertEquals("baz", strArray[1]); o = outputMap.get("complex"); - assertTrue(o instanceof Map); + assertInstanceOf(Map.class, o); final Map nestedOutputMap = (Map) o; o = nestedOutputMap.get("complex1"); - assertTrue(o instanceof Map); + assertInstanceOf(Map.class, o); final Map complex1 = (Map) o; o = complex1.get("a"); - assertTrue(o instanceof Integer[]); + assertInstanceOf(Integer[].class, o); assertEquals((Integer) 2, ((Integer[]) o)[1]); o = complex1.get("b"); - assertTrue(o instanceof Integer[]); + assertInstanceOf(Integer[].class, o); assertEquals((Integer) 3, ((Integer[]) o)[2]); o = nestedOutputMap.get("complex2"); - assertTrue(o instanceof Map); + assertInstanceOf(Map.class, o); final Map complex2 = (Map) o; o = complex2.get("a"); - assertTrue(o instanceof String[]); + assertInstanceOf(String[].class, o); assertEquals("hello", ((String[]) o)[0]); o = complex2.get("b"); - assertTrue(o instanceof String[]); + assertInstanceOf(String[].class, o); assertEquals("4", ((String[]) o)[1]); } @@ -498,7 +494,7 @@ public void testToArray() { @Test public void testStringToBytes() { Object bytes = DataTypeUtils.convertType("Hello", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType()), null, StandardCharsets.UTF_8); - assertTrue(bytes instanceof Byte[]); + assertInstanceOf(Byte[].class, bytes); assertNotNull(bytes); Byte[] b = (Byte[]) bytes; assertEquals((long) 72, (long) b[0], "Conversion from String to byte[] failed"); // H @@ -512,7 +508,7 @@ public void testStringToBytes() { public void testBytesToString() { Object s = DataTypeUtils.convertType("Hello".getBytes(StandardCharsets.UTF_16), RecordFieldType.STRING.getDataType(), null, StandardCharsets.UTF_16); assertNotNull(s); - assertTrue(s instanceof String); + assertInstanceOf(String.class, s); assertEquals("Hello", s, "Conversion from byte[] to String failed"); } @@ -520,7 +516,7 @@ public void testBytesToString() { public void testBytesToBytes() { Object b = DataTypeUtils.convertType("Hello".getBytes(StandardCharsets.UTF_16), RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BYTE.getDataType()), null, StandardCharsets.UTF_16); assertNotNull(b); - assertTrue(b instanceof Byte[]); + assertInstanceOf(Byte[].class, b); assertEquals((Object) "Hello".getBytes(StandardCharsets.UTF_16)[0], ((Byte[]) b)[0], "Conversion from byte[] to String failed at char 0"); } @@ -827,7 +823,7 @@ public void testChooseDataTypeWhenHasChoiceThenShouldReturnSingleMatchingFromCho testChooseDataTypeAlsoReverseTypes(value, dataTypes, expected); } - private void testChooseDataTypeAlsoReverseTypes(Object value, List dataTypes, DataType expected) { + private void testChooseDataTypeAlsoReverseTypes(Object value, List dataTypes, DataType expected) { testChooseDataType(dataTypes, value, expected); Collections.reverse(dataTypes); testChooseDataType(dataTypes, value, expected); @@ -881,12 +877,12 @@ public void testFindMostSuitableTypeWithBoolean() { @Test public void testFindMostSuitableTypeWithByte() { - testFindMostSuitableType(Byte.valueOf((byte) 123), RecordFieldType.BYTE.getDataType()); + testFindMostSuitableType((byte) 123, RecordFieldType.BYTE.getDataType()); } @Test public void testFindMostSuitableTypeWithShort() { - testFindMostSuitableType(Short.valueOf((short) 123), RecordFieldType.SHORT.getDataType()); + testFindMostSuitableType((short) 123, RecordFieldType.SHORT.getDataType()); } @Test @@ -955,7 +951,7 @@ public void testFindMostSuitableTypeWithArray() { } private void testFindMostSuitableType(Object value, DataType expected, DataType... filtered) { - List filteredOutDataTypes = Arrays.stream(filtered).collect(Collectors.toList()); + List filteredOutDataTypes = Arrays.stream(filtered).toList(); // GIVEN List unexpectedTypes = Arrays.stream(RecordFieldType.values()) @@ -972,7 +968,7 @@ private void testFindMostSuitableType(Object value, DataType expected, DataType. }) .filter(dataType -> !dataType.equals(expected)) .filter(dataType -> !filteredOutDataTypes.contains(dataType)) - .collect(Collectors.toList()); + .toList(); IntStream.rangeClosed(0, unexpectedTypes.size()).forEach(insertIndex -> { List allTypes = new LinkedList<>(unexpectedTypes); @@ -1186,7 +1182,7 @@ public void testConvertDateToUTC() { @Test public void testConvertTypeStringToDateDefaultTimeZoneFormat() { final Object converted = DataTypeUtils.convertType(ISO_8601_YEAR_MONTH_DAY, RecordFieldType.DATE.getDataType(), DATE_FIELD); - assertTrue(converted instanceof java.sql.Date, "Converted value is not java.sql.Date"); + assertInstanceOf(Date.class, converted, "Converted value is not java.sql.Date"); assertEquals(ISO_8601_YEAR_MONTH_DAY, converted.toString()); } @@ -1198,7 +1194,7 @@ public void testConvertTypeStringToDateConfiguredSystemDefaultTimeZoneFormat() { final Object converted = DataTypeUtils.convertType( CUSTOM_MONTH_DAY_YEAR, RecordFieldType.DATE.getDataType(), Optional.of(CUSTOM_MONTH_DAY_YEAR_PATTERN), Optional.empty(), Optional.empty(), "date" ); - assertTrue(converted instanceof java.sql.Date, "Converted value is not java.sql.Date"); + assertInstanceOf(Date.class, converted, "Converted value is not java.sql.Date"); assertEquals(ISO_8601_YEAR_MONTH_DAY, converted.toString()); } diff --git a/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java b/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java index 729ac8794d35..5dc4bbe1a1f3 100644 --- a/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java +++ b/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/stream/io/LeakyBucketStreamThrottler.java @@ -31,7 +31,7 @@ public class LeakyBucketStreamThrottler implements StreamThrottler { private final int maxBytesPerSecond; - private final BlockingQueue requestQueue = new LinkedBlockingQueue(); + private final BlockingQueue requestQueue = new LinkedBlockingQueue<>(); private final ScheduledExecutorService executorService; private final AtomicBoolean shutdown = new AtomicBoolean(false); @@ -304,7 +304,7 @@ public Request(final InputStream in, final OutputStream out, final long maxBytes this.out = out; this.in = in; this.maxBytesToCopy = maxBytesToCopy; - this.responseQueue = new LinkedBlockingQueue(1); + this.responseQueue = new LinkedBlockingQueue<>(1); } public BlockingQueue getResponseQueue() { diff --git a/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/ahocorasick/SearchState.java b/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/ahocorasick/SearchState.java index 6d36ad0f4f1b..55fa1faf5b37 100644 --- a/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/ahocorasick/SearchState.java +++ b/nifi-commons/nifi-utils/src/main/java/org/apache/nifi/util/search/ahocorasick/SearchState.java @@ -52,7 +52,7 @@ public Map, List> getResults() { } void addResult(final SearchTerm matchingTerm) { - final List indexes = (resultMap.containsKey(matchingTerm)) ? resultMap.get(matchingTerm) : new ArrayList(5); + final List indexes = (resultMap.containsKey(matchingTerm)) ? resultMap.get(matchingTerm) : new ArrayList<>(5); indexes.add(bytesRead); resultMap.put(matchingTerm, indexes); } diff --git a/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/stream/io/TestLeakyBucketThrottler.java b/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/stream/io/TestLeakyBucketThrottler.java index fcf2f221acc6..a28c92e1d4cd 100644 --- a/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/stream/io/TestLeakyBucketThrottler.java +++ b/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/stream/io/TestLeakyBucketThrottler.java @@ -91,7 +91,7 @@ public void testDirectInterface() throws IOException, InterruptedException { try (final LeakyBucketStreamThrottler throttler = new LeakyBucketStreamThrottler(1024 * 1024); final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { // create 3 threads, each sending ~2 MB - final List threads = new ArrayList(); + final List threads = new ArrayList<>(); for (int i = 0; i < 3; i++) { final Thread t = new WriterThread(i, throttler, baos); threads.add(t); diff --git a/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/util/timebuffer/TestRingBuffer.java b/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/util/timebuffer/TestRingBuffer.java index 66934f584119..afa57e490e38 100644 --- a/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/util/timebuffer/TestRingBuffer.java +++ b/nifi-commons/nifi-utils/src/test/java/org/apache/nifi/util/timebuffer/TestRingBuffer.java @@ -81,7 +81,7 @@ public void testIterateForwards() { } final AtomicInteger countHolder = new AtomicInteger(0); - ringBuffer.forEach(new ForEachEvaluator() { + ringBuffer.forEach(new ForEachEvaluator<>() { int counter = 0; @Override @@ -110,7 +110,7 @@ public void testIterateForwardsAfterFull() { ringBuffer.add(v); } - ringBuffer.forEach(new ForEachEvaluator() { + ringBuffer.forEach(new ForEachEvaluator<>() { int counter = 0; @Override @@ -139,7 +139,7 @@ public void testIterateBackwards() { } final AtomicInteger countHolder = new AtomicInteger(0); - ringBuffer.forEach(new ForEachEvaluator() { + ringBuffer.forEach(new ForEachEvaluator<>() { int counter = 0; @Override @@ -171,7 +171,7 @@ public void testIterateBackwardsAfterFull() { ringBuffer.add(v); } - ringBuffer.forEach(new ForEachEvaluator() { + ringBuffer.forEach(new ForEachEvaluator<>() { int counter = 0; @Override diff --git a/nifi-commons/nifi-write-ahead-log/src/test/java/org/apache/nifi/wali/TestLengthDelimitedJournal.java b/nifi-commons/nifi-write-ahead-log/src/test/java/org/apache/nifi/wali/TestLengthDelimitedJournal.java index b0bead3b6129..2e283fbdc7da 100644 --- a/nifi-commons/nifi-write-ahead-log/src/test/java/org/apache/nifi/wali/TestLengthDelimitedJournal.java +++ b/nifi-commons/nifi-write-ahead-log/src/test/java/org/apache/nifi/wali/TestLengthDelimitedJournal.java @@ -224,7 +224,7 @@ public void testPoisonedJournalNotWritableAfterIOE() throws IOException { final Collection records = Collections.singleton(thirdRecord); for (int i = 0; i < 10; i++) { assertThrows(IOException.class, () -> journal.update(records, lookup)); - assertThrows(IOException.class, () -> journal.fsync()); + assertThrows(IOException.class, journal::fsync); } } } @@ -251,7 +251,7 @@ public void testPoisonedJournalNotWritableAfterOOME() throws IOException { final Collection records = Collections.singleton(thirdRecord); for (int i = 0; i < 10; i++) { assertThrows(IOException.class, () -> journal.update(records, lookup)); - assertThrows(IOException.class, () -> journal.fsync()); + assertThrows(IOException.class, journal::fsync); } } } @@ -289,7 +289,7 @@ public void testSuccessfulRoundTrip() throws IOException { @Test public void testMultipleThreadsCreatingOverflowDirectory() throws IOException, InterruptedException { - final LengthDelimitedJournal journal = new LengthDelimitedJournal(journalFile, serdeFactory, streamPool, 3820L, 100) { + final LengthDelimitedJournal journal = new LengthDelimitedJournal<>(journalFile, serdeFactory, streamPool, 3820L, 100) { @Override protected void createOverflowDirectory(final Path path) throws IOException { // Create the overflow directory. @@ -475,7 +475,7 @@ public DataOutputStream getDataOutputStream() { }; - final Supplier badosSupplier = new Supplier() { + final Supplier badosSupplier = new Supplier<>() { private final AtomicInteger count = new AtomicInteger(0); @Override @@ -497,15 +497,15 @@ public ByteArrayDataOutputStream get() { final Thread[] threads = new Thread[2]; - final LengthDelimitedJournal journal = new LengthDelimitedJournal(journalFile, serdeFactory, corruptingStreamPool, 0L) { + final LengthDelimitedJournal journal = new LengthDelimitedJournal<>(journalFile, serdeFactory, corruptingStreamPool, 0L) { private final AtomicInteger count = new AtomicInteger(0); @Override - protected void poison(final Throwable t) { + protected void poison(final Throwable t) { if (count.getAndIncrement() == 0) { // it is only important that we sleep the first time. If we sleep every time, it just slows the test down. try { Thread.sleep(3000L); - } catch (InterruptedException e) { + } catch (InterruptedException ignore) { } } @@ -524,7 +524,7 @@ protected void poison(final Throwable t) { final Thread t1 = new Thread(() -> { try { journal.update(Collections.singleton(firstRecord), key -> null); - } catch (final IOException ioe) { + } catch (final IOException ignore) { } }); @@ -532,7 +532,7 @@ protected void poison(final Throwable t) { final Thread t2 = new Thread(() -> { try { journal.update(Collections.singleton(secondRecord), key -> firstRecord); - } catch (final IOException ioe) { + } catch (final IOException ignore) { } }); diff --git a/nifi-extension-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/test/java/org/apache/nifi/amqp/processors/TestChannel.java b/nifi-extension-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/test/java/org/apache/nifi/amqp/processors/TestChannel.java index 22b4ed74a2e5..94041c15d86e 100644 --- a/nifi-extension-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/test/java/org/apache/nifi/amqp/processors/TestChannel.java +++ b/nifi-extension-bundles/nifi-amqp-bundle/nifi-amqp-processors/src/test/java/org/apache/nifi/amqp/processors/TestChannel.java @@ -86,7 +86,7 @@ public TestChannel(Map exchangeToRoutingKeyMappings, if (this.routingKeyToQueueMappings != null) { for (List queues : routingKeyToQueueMappings.values()) { for (String queue : queues) { - this.enqueuedMessages.put(queue, new ArrayBlockingQueue(100)); + this.enqueuedMessages.put(queue, new ArrayBlockingQueue<>(100)); } } } diff --git a/nifi-extension-bundles/nifi-asana-bundle/nifi-asana-processors/src/main/java/org/apache/nifi/processors/asana/utils/GenericAsanaObjectFetcher.java b/nifi-extension-bundles/nifi-asana-bundle/nifi-asana-processors/src/main/java/org/apache/nifi/processors/asana/utils/GenericAsanaObjectFetcher.java index df8d072d6a1a..559ae987b864 100644 --- a/nifi-extension-bundles/nifi-asana-bundle/nifi-asana-processors/src/main/java/org/apache/nifi/processors/asana/utils/GenericAsanaObjectFetcher.java +++ b/nifi-extension-bundles/nifi-asana-bundle/nifi-asana-processors/src/main/java/org/apache/nifi/processors/asana/utils/GenericAsanaObjectFetcher.java @@ -108,7 +108,7 @@ protected Iterator fetch() { }); return new FilterIterator<>( - new Iterator() { + new Iterator<>() { Iterator it = currentObjects.iterator(); Set unseenIds = new HashSet<>(lastFingerprints.keySet()); // copy all previously seen ids. diff --git a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/main/java/org/apache/nifi/jasn1/StandardRecordModelIteratorProvider.java b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/main/java/org/apache/nifi/jasn1/StandardRecordModelIteratorProvider.java index c222d0c134cf..802a5e7a8fc9 100644 --- a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/main/java/org/apache/nifi/jasn1/StandardRecordModelIteratorProvider.java +++ b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/main/java/org/apache/nifi/jasn1/StandardRecordModelIteratorProvider.java @@ -36,7 +36,7 @@ public class StandardRecordModelIteratorProvider implements RecordModelIteratorP @SuppressWarnings("unchecked") public Iterator iterator(InputStream inputStream, ComponentLog logger, Class rootClass, String recordField, Field seqOfField) { if (StringUtils.isEmpty(recordField)) { - return new Iterator() { + return new Iterator<>() { @Override public boolean hasNext() { boolean hasNext; diff --git a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithComplexTypes.java b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithComplexTypes.java index 73739d790006..b3ec5b71d92d 100644 --- a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithComplexTypes.java +++ b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithComplexTypes.java @@ -43,7 +43,6 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -63,9 +62,8 @@ public void testSequenceOfInteger() throws Exception { SequenceOfIntegerWrapper berValue = new SequenceOfIntegerWrapper(); berValue.setValue(value); - Map expectedValues = new HashMap() {{ - put("value", new BigInteger[]{BigInteger.valueOf(1234), BigInteger.valueOf(567)}); - }}; + Map expectedValues = + Map.of("value", new BigInteger[]{BigInteger.valueOf(1234), BigInteger.valueOf(567)}); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.BIGINT.getDataType()))) @@ -85,13 +83,12 @@ public void testBasicTypes() throws Exception { basicTypes.setUtf8Str(new BerUTF8String("Some UTF-8 String. こんにちは世界。")); basicTypes.setBitStr(new BerBitString(new boolean[] {true, false, true, true})); - Map expectedValues = new HashMap() {{ - put("b", true); - put("i", BigInteger.valueOf(789)); - put("octStr", "0102030405"); - put("utf8Str", "Some UTF-8 String. こんにちは世界。"); - put("bitStr", "1011"); - }}; + Map expectedValues = + Map.of("b", true, + "i", BigInteger.valueOf(789), + "octStr", "0102030405", + "utf8Str", "Some UTF-8 String. こんにちは世界。", + "bitStr", "1011"); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("b", RecordFieldType.BOOLEAN.getDataType()), @@ -189,57 +186,37 @@ public void testComposite() throws Exception { return expectedSchema; }; - Function> expectedValuesProvider = __ -> new HashMap() {{ - put( + Function> expectedValuesProvider = __ -> Map.of( "child", - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", true); - put("i", BigInteger.valueOf(789)); - put("octStr", "0102030405"); - }}) - ); - put( + new MapRecord(expectedChildSchema, Map.of("b", true, + "i", BigInteger.valueOf(789), + "octStr", "0102030405")), "children", new MapRecord[]{ - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", true); - put("i", BigInteger.valueOf(0)); - put("octStr", "000000"); - }}), - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", false); - put("i", BigInteger.valueOf(1)); - put("octStr", "010101"); - }}), - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", true); - put("i", BigInteger.valueOf(2)); - put("octStr", "020202"); - }}) - } - ); - put( + new MapRecord(expectedChildSchema, Map.of("b", true, + "i", BigInteger.valueOf(0), + "octStr", "000000")), + new MapRecord(expectedChildSchema, Map.of("b", false, + "i", BigInteger.valueOf(1), + "octStr", "010101")), + new MapRecord(expectedChildSchema, Map.of("b", true, + "i", BigInteger.valueOf(2), + "octStr", "020202")) + }, "unordered", new MapRecord[]{ - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", true); - put("i", BigInteger.valueOf(0)); - put("octStr", "000000"); - }}), - new MapRecord(expectedChildSchema, new HashMap() {{ - put("b", false); - put("i", BigInteger.valueOf(1)); - put("octStr", "010101"); - }}) - } - ); - put( + new MapRecord(expectedChildSchema, Map.of("b", true, + "i", BigInteger.valueOf(0), + "octStr", "000000")), + new MapRecord(expectedChildSchema, Map.of("b", false, + "i", BigInteger.valueOf(1), + "octStr", "010101")) + }, "numbers", new BigInteger[]{ BigInteger.valueOf(0), BigInteger.valueOf(1), BigInteger.valueOf(2), BigInteger.valueOf(3), } ); - }}; testReadRecord(dataFile, composite, expectedValuesProvider, expectedSchemaProvider); } @@ -288,24 +265,16 @@ public void testRecursive() throws Exception { expectedSchema.setSchemaName("Recursive"); expectedSchema.setSchemaNamespace("org.apache.nifi.jasn1.example"); - Map expectedValues = new HashMap() {{ - put("name", "name"); - put("children", new MapRecord[]{ - new MapRecord(expectedSchema, new HashMap() {{ - put("name", "childName1"); - put("children", new MapRecord[]{ - new MapRecord(expectedSchema, new HashMap() {{ - put("name", "grandChildName11"); - put("children", new MapRecord[0]); - }}) - }); - }}), - new MapRecord(expectedSchema, new HashMap() {{ - put("name", "childName2"); - put("children", new MapRecord[0]); - }}), + Map expectedValues = Map.of("name", "name", + "children", new MapRecord[]{ + new MapRecord(expectedSchema, Map.of("name", "childName1", + "children", new MapRecord[]{ + new MapRecord(expectedSchema, Map.of("name", "grandChildName11", + "children", new MapRecord[0])) + })), + new MapRecord(expectedSchema, Map.of("name", "childName2", + "children", new MapRecord[0])), }); - }}; testReadRecord(dataFile, recursive, expectedValues, expectedSchema); } @@ -323,10 +292,8 @@ public void testInheritance() throws Exception { new RecordField("str", RecordFieldType.STRING.getDataType()) )); - Map expectedValues = new HashMap() {{ - put("i", BigInteger.valueOf(53286L)); - put("str", "Some UTF-8 String. こんにちは世界。"); - }}; + Map expectedValues = Map.of("i", BigInteger.valueOf(53286L), + "str", "Some UTF-8 String. こんにちは世界。"); testReadRecord(dataFile, berValue, expectedValues, expectedSchema); } diff --git a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithSimpleTypes.java b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithSimpleTypes.java index a23862429cb6..b9257291a7d1 100644 --- a/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithSimpleTypes.java +++ b/nifi-extension-bundles/nifi-asn1-bundle/nifi-asn1-services/src/test/java/org/apache/nifi/jasn1/TestJASN1RecordReaderWithSimpleTypes.java @@ -51,7 +51,6 @@ import java.time.LocalDateTime; import java.time.LocalTime; import java.util.Arrays; -import java.util.HashMap; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -68,9 +67,7 @@ public void testBoolean() throws Exception { BooleanWrapper berValue = new BooleanWrapper(); berValue.setValue(new BerBoolean(true)); - Map expectedValues = new HashMap() {{ - put("value", true); - }}; + Map expectedValues = Map.of("value", true); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.BOOLEAN.getDataType())) @@ -86,9 +83,7 @@ public void testInteger() throws Exception { IntegerWrapper berValue = new IntegerWrapper(); berValue.setValue(new BerInteger(4321234)); - Map expectedValues = new HashMap() {{ - put("value", BigInteger.valueOf(4321234)); - }}; + Map expectedValues = Map.of("value", BigInteger.valueOf(4321234)); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.BIGINT.getDataType())) @@ -104,9 +99,7 @@ public void testBitString() throws Exception { BitStringWrapper berValue = new BitStringWrapper(); berValue.setValue(new BerBitString(new boolean[]{false, true, false, false, true, true, true, true, false, true, false, false})); - Map expectedValues = new HashMap() {{ - put("value", "010011110100"); - }}; + Map expectedValues = Map.of("value", "010011110100"); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.STRING.getDataType())) @@ -122,9 +115,8 @@ public void testOctetString() throws Exception { OctetStringWrapper berValue = new OctetStringWrapper(); berValue.setValue(new BerOctetString("0123456789ABCDEFGHIJKLMNopqrstuvwxyz".getBytes())); - Map expectedValues = new HashMap() {{ - put("value", octetStringExpectedValueConverter("0123456789ABCDEFGHIJKLMNopqrstuvwxyz".getBytes())); - }}; + Map expectedValues = + Map.of("value", octetStringExpectedValueConverter("0123456789ABCDEFGHIJKLMNopqrstuvwxyz".getBytes())); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.STRING.getDataType())) @@ -140,9 +132,7 @@ public void testUTF8StringString() throws Exception { UTF8StringWrapper berValue = new UTF8StringWrapper(); berValue.setValue(new BerUTF8String("Some UTF-8 String. こんにちは世界。")); - Map expectedValues = new HashMap() {{ - put("value", "Some UTF-8 String. こんにちは世界。"); - }}; + Map expectedValues = Map.of("value", "Some UTF-8 String. こんにちは世界。"); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.STRING.getDataType())) @@ -158,9 +148,7 @@ public void testBMPString() throws Exception { BMPStringWrapper berValue = new BMPStringWrapper(); berValue.setValue(new BerBMPString("Some UTF-8 String. こんにちは世界。".getBytes(StandardCharsets.UTF_8))); - Map expectedValues = new HashMap() {{ - put("value", "Some UTF-8 String. こんにちは世界。"); - }}; + Map expectedValues = Map.of("value", "Some UTF-8 String. こんにちは世界。"); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.STRING.getDataType())) @@ -176,9 +164,7 @@ public void testDate() throws Exception { DateWrapper berValue = new DateWrapper(); berValue.setValue(new BerDate("2019-10-16")); - Map expectedValues = new HashMap() {{ - put("value", LocalDate.parse("2019-10-16")); - }}; + Map expectedValues = Map.of("value", LocalDate.parse("2019-10-16")); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.DATE.getDataType())) @@ -209,9 +195,7 @@ public void testTimeOfDay() throws Exception { TimeOfDayWrapper berValue = new TimeOfDayWrapper(); berValue.setValue(new BerTimeOfDay("16:13:12")); - Map expectedValues = new HashMap() {{ - put("value", LocalTime.parse("16:13:12")); - }}; + Map expectedValues = Map.of("value", LocalTime.parse("16:13:12")); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.TIME.getDataType())) @@ -242,9 +226,7 @@ public void testDateTime() throws Exception { DateTimeWrapper berValue = new DateTimeWrapper(); berValue.setValue(new BerDateTime("2019-10-16T16:18:20")); - Map expectedValues = new HashMap() {{ - put("value", LocalDateTime.parse("2019-10-16T16:18:20")); - }}; + Map expectedValues = Map.of("value", LocalDateTime.parse("2019-10-16T16:18:20")); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.TIMESTAMP.getDataType("yyyy-MM-ddTHH:mm:ss"))) @@ -275,9 +257,7 @@ public void testReal() throws Exception { RealWrapper berValue = new RealWrapper(); berValue.setValue(new BerReal(176.34D)); - Map expectedValues = new HashMap() {{ - put("value", 176.34D); - }}; + Map expectedValues = Map.of("value", 176.34D); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.DOUBLE.getDataType())) @@ -293,9 +273,7 @@ public void testEnumerated() throws Exception { EnumeratedWrapper berValue = new EnumeratedWrapper(); berValue.setValue(new BerEnum(0)); - Map expectedValues = new HashMap() {{ - put("value", 0); - }}; + Map expectedValues = Map.of("value", 0); RecordSchema expectedSchema = new SimpleRecordSchema(Arrays.asList( new RecordField("value", RecordFieldType.INT.getDataType())) diff --git a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java index ee27087d18bf..af1490ec491c 100644 --- a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java +++ b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/ExtractAvroMetadata.java @@ -172,7 +172,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session @Override public void process(InputStream rawIn) throws IOException { try (final InputStream in = new BufferedInputStream(rawIn); - final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader())) { + final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader<>())) { final Schema schema = reader.getSchema(); if (schema == null) { diff --git a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java index 239c127e816c..7e3821b3dc9c 100644 --- a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java +++ b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java @@ -270,7 +270,7 @@ public List split(final ProcessSession session, final FlowFile origina @Override public void process(InputStream rawIn) throws IOException { try (final InputStream in = new BufferedInputStream(rawIn); - final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader())) { + final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader<>())) { final AtomicReference codec = new AtomicReference<>(reader.getMetaString(DataFileConstants.CODEC)); if (codec.get() == null) { @@ -278,7 +278,7 @@ public void process(InputStream rawIn) throws IOException { } // while records are left, start a new split by spawning a FlowFile - final AtomicReference hasNextHolder = new AtomicReference(reader.hasNext()); + final AtomicReference hasNextHolder = new AtomicReference<>(reader.hasNext()); while (hasNextHolder.get()) { FlowFile childFlowFile = session.create(originalFlowFile); childFlowFile = session.write(childFlowFile, new OutputStreamCallback() { @@ -347,7 +347,7 @@ public DatafileSplitWriter(final boolean transferMetadata) { @Override public void init(final DataFileStream reader, final String codec, final OutputStream out) throws IOException { - writer = new DataFileWriter<>(new GenericDatumWriter()); + writer = new DataFileWriter<>(new GenericDatumWriter<>()); if (transferMetadata) { for (String metaKey : reader.getMetaKeys()) { diff --git a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/test/java/org/apache/nifi/processors/avro/TestSplitAvro.java b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/test/java/org/apache/nifi/processors/avro/TestSplitAvro.java index e1f91fb4e65b..076bee835bb7 100644 --- a/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/test/java/org/apache/nifi/processors/avro/TestSplitAvro.java +++ b/nifi-extension-bundles/nifi-avro-bundle/nifi-avro-processors/src/test/java/org/apache/nifi/processors/avro/TestSplitAvro.java @@ -79,7 +79,7 @@ void createUsers(final int numUsers, final ByteArrayOutputStream users) throws I userList.add(user); } - try (final DataFileWriter dataFileWriter = new DataFileWriter<>(new GenericDatumWriter(schema))) { + try (final DataFileWriter dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.setMeta(META_KEY1, META_VALUE1); dataFileWriter.setMeta(META_KEY2, META_VALUE2); dataFileWriter.setMeta(META_KEY3, META_VALUE3.getBytes("UTF-8")); @@ -112,7 +112,7 @@ public void testRecordSplitDatafileOutputWithSingleRecords() throws IOException final TestRunner runner = TestRunners.newTestRunner(new SplitAvro()); final String filename = "users.avro"; - runner.enqueue(users.toByteArray(), new HashMap() {{ + runner.enqueue(users.toByteArray(), new HashMap<>() {{ put(CoreAttributes.FILENAME.key(), filename); }}); runner.run(); @@ -187,7 +187,7 @@ public void testRecordSplitDatafileOutputWithoutMetadata() throws IOException { for (final MockFlowFile flowFile : flowFiles) { try (final ByteArrayInputStream in = new ByteArrayInputStream(flowFile.toByteArray()); - final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader())) { + final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader<>())) { assertFalse(reader.getMetaKeys().contains(META_KEY1)); assertFalse(reader.getMetaKeys().contains(META_KEY2)); assertFalse(reader.getMetaKeys().contains(META_KEY3)); @@ -303,7 +303,7 @@ record = reader.read(record, decoder); private void checkDataFileSplitSize(List flowFiles, int expectedRecordsPerSplit, boolean checkMetadata) throws IOException { for (final MockFlowFile flowFile : flowFiles) { try (final ByteArrayInputStream in = new ByteArrayInputStream(flowFile.toByteArray()); - final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader())) { + final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader<>())) { int count = 0; GenericRecord record = null; @@ -328,7 +328,7 @@ private void checkDataFileTotalSize(List flowFiles, int expectedTo int count = 0; for (final MockFlowFile flowFile : flowFiles) { try (final ByteArrayInputStream in = new ByteArrayInputStream(flowFile.toByteArray()); - final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader())) { + final DataFileStream reader = new DataFileStream<>(in, new GenericDatumReader<>())) { GenericRecord record = null; while (reader.hasNext()) { diff --git a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/credentials/provider/service/AWSCredentialsProviderControllerService.java b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/credentials/provider/service/AWSCredentialsProviderControllerService.java index 2ddb45aeaf94..0f18aadab68c 100644 --- a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/credentials/provider/service/AWSCredentialsProviderControllerService.java +++ b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/credentials/provider/service/AWSCredentialsProviderControllerService.java @@ -346,7 +346,7 @@ private CredentialsStrategy selectPrimaryStrategy(final PropertyContext property @Override protected Collection customValidate(final ValidationContext validationContext) { final CredentialsStrategy selectedStrategy = selectPrimaryStrategy(validationContext); - final ArrayList validationFailureResults = new ArrayList(); + final ArrayList validationFailureResults = new ArrayList<>(); for (CredentialsStrategy strategy : strategies) { final Collection strategyValidationFailures = strategy.validate(validationContext, diff --git a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java index 938a3fc05e0c..c9e681fb34c4 100644 --- a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java +++ b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java @@ -695,12 +695,12 @@ private void listByTrackingEntities(ProcessContext context, ProcessSession sessi .stream() .filter(s3VersionSummary -> s3VersionSummary.getLastModified().getTime() >= minTimestampToList && includeObjectInListing(s3VersionSummary, currentTime)) - .map(s3VersionSummary -> new ListableEntityWrapper( - s3VersionSummary, - S3VersionSummary::getKey, - summary -> summary.getKey() + "_" + summary.getVersionId(), - summary -> summary.getLastModified().getTime(), - S3VersionSummary::getSize + .map(s3VersionSummary -> new ListableEntityWrapper<>( + s3VersionSummary, + S3VersionSummary::getKey, + summary -> summary.getKey() + "_" + summary.getVersionId(), + summary -> summary.getLastModified().getTime(), + S3VersionSummary::getSize )) .collect(Collectors.toList()); }, null); diff --git a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/encryption/StandardS3EncryptionService.java b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/encryption/StandardS3EncryptionService.java index 7ecd0d3b0763..66ad6bbd70c4 100644 --- a/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/encryption/StandardS3EncryptionService.java +++ b/nifi-extension-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/encryption/StandardS3EncryptionService.java @@ -46,7 +46,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; @@ -73,14 +72,13 @@ STRATEGY_NAME_CSE_C, new ClientSideCEncryptionStrategy() private static final AllowableValue CSE_KMS = new AllowableValue(STRATEGY_NAME_CSE_KMS, "Client-side KMS", "Use client-side, KMS key to perform encryption."); private static final AllowableValue CSE_C = new AllowableValue(STRATEGY_NAME_CSE_C, "Client-side Customer Key", "Use client-side, customer-supplied key to perform encryption."); - public static final Map ENCRYPTION_STRATEGY_ALLOWABLE_VALUES = new HashMap() {{ - put(STRATEGY_NAME_NONE, NONE); - put(STRATEGY_NAME_SSE_S3, SSE_S3); - put(STRATEGY_NAME_SSE_KMS, SSE_KMS); - put(STRATEGY_NAME_SSE_C, SSE_C); - put(STRATEGY_NAME_CSE_KMS, CSE_KMS); - put(STRATEGY_NAME_CSE_C, CSE_C); - }}; + public static final Map ENCRYPTION_STRATEGY_ALLOWABLE_VALUES = + Map.of(STRATEGY_NAME_NONE, NONE, + STRATEGY_NAME_SSE_S3, SSE_S3, + STRATEGY_NAME_SSE_KMS, SSE_KMS, + STRATEGY_NAME_SSE_C, SSE_C, + STRATEGY_NAME_CSE_KMS, CSE_KMS, + STRATEGY_NAME_CSE_C, CSE_C); public static final PropertyDescriptor ENCRYPTION_STRATEGY = new PropertyDescriptor.Builder() .name("encryption-strategy") diff --git a/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-processors/src/test/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecordTest.java b/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-processors/src/test/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecordTest.java index c749bc0f3989..c169c651c9df 100644 --- a/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-processors/src/test/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecordTest.java +++ b/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-processors/src/test/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecordTest.java @@ -142,7 +142,7 @@ public void testOnTriggerWithNestedRecords() throws Exception { final RecordSchema personSchema = new SimpleRecordSchema(personFields); recordReader.addSchemaField("person", RecordFieldType.RECORD); - recordReader.addRecord("1", "A", new MapRecord(personSchema, new HashMap() { + recordReader.addRecord("1", "A", new MapRecord(personSchema, new HashMap<>() { private static final long serialVersionUID = -3185956498135742190L; { put("name", "John Doe"); @@ -150,7 +150,7 @@ public void testOnTriggerWithNestedRecords() throws Exception { put("sport", "Soccer"); } })); - recordReader.addRecord("2", "B", new MapRecord(personSchema, new HashMap() { + recordReader.addRecord("2", "B", new MapRecord(personSchema, new HashMap<>() { private static final long serialVersionUID = 1L; { put("name", "Jane Doe"); @@ -158,7 +158,7 @@ public void testOnTriggerWithNestedRecords() throws Exception { put("sport", "Tennis"); } })); - recordReader.addRecord("3", "A", new MapRecord(personSchema, new HashMap() { + recordReader.addRecord("3", "A", new MapRecord(personSchema, new HashMap<>() { private static final long serialVersionUID = -1329194249439570573L; { put("name", "Sally Doe"); @@ -166,7 +166,7 @@ public void testOnTriggerWithNestedRecords() throws Exception { put("sport", "Curling"); } })); - recordReader.addRecord("4", "C", new MapRecord(personSchema, new HashMap() { + recordReader.addRecord("4", "C", new MapRecord(personSchema, new HashMap<>() { private static final long serialVersionUID = -1329194249439570574L; { put("name", "Jimmy Doe"); diff --git a/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java b/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java index 88515332ab9e..5f3bc6f15fef 100644 --- a/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java +++ b/nifi-extension-bundles/nifi-azure-bundle/nifi-azure-reporting-task/src/test/java/org/apache/nifi/reporting/azure/loganalytics/TestAzureLogAnalyticsProvenanceReportingTask.java @@ -64,7 +64,7 @@ public void testAddField2() { final Map config = Collections.emptyMap(); final JsonBuilderFactory factory = Json.createBuilderFactory(config); final JsonObjectBuilder builder = factory.createObjectBuilder(); - Map values = new LinkedHashMap(); + Map values = new LinkedHashMap<>(); values.put("TestKeyString1", "StringValue1"); values.put("TestKeyString2", "StringValue2"); AzureLogAnalyticsProvenanceReportingTask.addField(builder, factory, "TestKeyString", values, true); @@ -80,7 +80,7 @@ public void testAddField3() { final Map config = Collections.emptyMap(); final JsonBuilderFactory factory = Json.createBuilderFactory(config); final JsonObjectBuilder builder = factory.createObjectBuilder(); - Collection values = new ArrayList(); + Collection values = new ArrayList<>(); values.add("TestValueString1"); values.add("TestValueString2"); AzureLogAnalyticsProvenanceReportingTask.addField(builder, factory, "TestKeyString", values, true); diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service-api/src/main/java/org/apache/nifi/elasticsearch/ElasticsearchException.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service-api/src/main/java/org/apache/nifi/elasticsearch/ElasticsearchException.java index 5919efae40b1..79c8b5c16984 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service-api/src/main/java/org/apache/nifi/elasticsearch/ElasticsearchException.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service-api/src/main/java/org/apache/nifi/elasticsearch/ElasticsearchException.java @@ -17,7 +17,6 @@ package org.apache.nifi.elasticsearch; -import java.util.HashSet; import java.util.Set; public class ElasticsearchException extends RuntimeException { @@ -25,12 +24,8 @@ public class ElasticsearchException extends RuntimeException { * These are names of common Elasticsearch exceptions where it is safe to assume * that it's OK to retry the operation instead of just sending it to an error relationship. */ - public static final Set ELASTIC_ERROR_NAMES = new HashSet() {{ - add("NoNodeAvailableException"); - add("ElasticsearchTimeoutException"); - add("ReceiveTimeoutTransportException"); - add("NodeClosedException"); - }}; + public static final Set ELASTIC_ERROR_NAMES = Set.of("NoNodeAvailableException", + "ElasticsearchTimeoutException", "ReceiveTimeoutTransportException", "NodeClosedException"); protected boolean elastic; diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/src/main/java/org/apache/nifi/elasticsearch/ElasticSearchLookupService.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/src/main/java/org/apache/nifi/elasticsearch/ElasticSearchLookupService.java index f25feb005afb..38b39975628b 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/src/main/java/org/apache/nifi/elasticsearch/ElasticSearchLookupService.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/src/main/java/org/apache/nifi/elasticsearch/ElasticSearchLookupService.java @@ -196,13 +196,8 @@ private void validateCoordinates(final Map coordinates) throws L @SuppressWarnings("unchecked") private Record getById(final String _id, final Map context) throws IOException, LookupFailureException, SchemaNotFoundException { - final Map query = new HashMap() {{ - put("query", new HashMap() {{ - put("match", new HashMap() {{ - put("_id", _id); - }}); - }}); - }}; + final Map query = Map.of( + "query", Map.of("match", Map.of("_id", _id))); final String json = mapper.writeValueAsString(query); @@ -229,37 +224,27 @@ record = applyMappings(record, source); Map getNested(final String key, final Object value) { final String path = key.substring(0, key.lastIndexOf(".")); - return new HashMap() {{ - put("path", path); - put("query", new HashMap() {{ - put("match", new HashMap() {{ - put(key, value); - }}); - }}); - }}; + return Map.of("path", path, "query", Map.of("match", Map.of(key, value))); } private Map buildQuery(final Map coordinates) { - final Map query = new HashMap() {{ + final Map query = new HashMap<>() {{ put("bool", new HashMap() {{ put("must", coordinates.entrySet().stream() - .map(e -> new HashMap() {{ - if (e.getKey().contains(".")) { - put("nested", getNested(e.getKey(), e.getValue())); - } else { - put("match", new HashMap() {{ - put(e.getKey(), e.getValue()); - }}); - } - }}).collect(Collectors.toList()) + .map(e -> new HashMap() {{ + if (e.getKey().contains(".")) { + put("nested", getNested(e.getKey(), e.getValue())); + } else { + put("match", new HashMap() {{ + put(e.getKey(), e.getValue()); + }}); + } + }}).collect(Collectors.toList()) ); }}); }}; - return new HashMap() {{ - put("size", 1); - put("query", query); - }}; + return Map.of("size", 1, "query", query); } @SuppressWarnings("unchecked") diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java index 3694f5e24395..bd7243b038b6 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearch.java @@ -260,16 +260,15 @@ private FlowFile writeAggregationFlowFileContents(final String name, final Integ final ProcessSession session, final FlowFile aggFlowFile, final Map attributes) { FlowFile ff = session.write(aggFlowFile, out -> out.write(json.getBytes())); - ff = session.putAllAttributes(ff, new HashMap() {{ - if (name != null) { - put("aggregation.name", name); - } - if (number != null) { - put("aggregation.number", number.toString()); - } - }}); + Map latestAttributes = new HashMap<>(attributes); + if (name != null) { + latestAttributes.put("aggregation.name", name); + } + if (number != null) { + latestAttributes.put("aggregation.number", number.toString()); + } - return session.putAllAttributes(ff, attributes); + return session.putAllAttributes(ff, latestAttributes); } private void handleAggregations(final Map aggregations, final ProcessSession session, diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ConsumeElasticsearch.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ConsumeElasticsearch.java index 773b58f63bc6..23c3f56aa843 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ConsumeElasticsearch.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ConsumeElasticsearch.java @@ -279,7 +279,7 @@ public void addQueryClause(final Map query, final Map>>() { })); } else { - filters.add(mapper.convertValue(additionalFilters, new TypeReference>() { + filters.add(mapper.convertValue(additionalFilters, new TypeReference<>() { })); } } diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ElasticsearchRestProcessor.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ElasticsearchRestProcessor.java index 557a32e0d37b..18fe8080be8c 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ElasticsearchRestProcessor.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/main/java/org/apache/nifi/processors/elasticsearch/ElasticsearchRestProcessor.java @@ -311,9 +311,11 @@ default void addSortClause(final Map query, final Map> sortList; final JsonNode sort = mapper.readTree(context.getProperty(SORT).evaluateAttributeExpressions(attributes).getValue()); if (sort.isArray()) { - sortList = mapper.convertValue(sort, new TypeReference>>() { }); + sortList = mapper.convertValue(sort, new TypeReference<>() { + }); } else { - sortList = Collections.singletonList(mapper.convertValue(sort, new TypeReference>() { })); + sortList = Collections.singletonList(mapper.convertValue(sort, new TypeReference<>() { + })); } query.put("sort", new ArrayList<>(sortList)); } diff --git a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/test/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearchTest.java b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/test/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearchTest.java index d41637ca83d7..375bb5d82d73 100644 --- a/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/test/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearchTest.java +++ b/nifi-extension-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/src/test/java/org/apache/nifi/processors/elasticsearch/AbstractJsonQueryElasticsearchTest.java @@ -586,7 +586,8 @@ void setQuery(final TestRunner runner, final String query) throws JsonProcessing } if (QueryDefinitionType.BUILD_QUERY.getValue().equals(runner.getProcessContext().getProperty(ElasticsearchRestProcessor.QUERY_DEFINITION_STYLE).getValue())) { - final Map queryMap = TEST_MAPPER.readValue(query, new TypeReference>() { }); + final Map queryMap = TEST_MAPPER.readValue(query, new TypeReference<>() { + }); if (queryMap.containsKey("query")) { if (runner.getProcessor() instanceof ConsumeElasticsearch) { runner.setProperty(ConsumeElasticsearch.RANGE_FIELD, RANGE_FIELD_NAME); diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinProcessingResult.java b/nifi-extension-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinProcessingResult.java index eb36b0ccde37..9c4db7de2bbf 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinProcessingResult.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinProcessingResult.java @@ -38,7 +38,7 @@ public class BinProcessingResult { public BinProcessingResult(boolean isCommitted) { this.setCommitted(isCommitted); - this.setAttributes(new HashMap()); + this.setAttributes(new HashMap<>()); } public BinProcessingResult(boolean isCommitted, Map attributes) { diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java b/nifi-extension-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java index 88b8c742abaf..bdba71623762 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java @@ -174,7 +174,7 @@ protected Map getBatches(final ProcessSession sessio // if we don't have a batch for this key then create a new one if (batch == null) { - batch = new FlowFileEventBatch(session.create(), new ArrayList()); + batch = new FlowFileEventBatch(session.create(), new ArrayList<>()); batches.put(batchKey, batch); } diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java b/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java index b4af92b0acb4..88c8e9f18c8e 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java @@ -72,7 +72,7 @@ public abstract class GetFileTransfer extends AbstractProcessor { private final AtomicLong lastPollTime = new AtomicLong(-1L); private final Lock listingLock = new ReentrantLock(); private final AtomicReference> fileQueueRef = new AtomicReference<>(); - private final Set processing = Collections.synchronizedSet(new HashSet()); + private final Set processing = Collections.synchronizedSet(new HashSet<>()); // Used when transferring filenames from the File Queue to the processing queue; multiple threads can do this // simultaneously using the sharableTransferLock; however, in order to check if either has a given file, the @@ -288,7 +288,7 @@ private void fetchListing(final ProcessContext context, final ProcessSession ses BlockingQueue queue = fileQueueRef.get(); if (queue == null) { final boolean useNaturalOrdering = context.getProperty(FileTransfer.USE_NATURAL_ORDERING).asBoolean(); - queue = useNaturalOrdering ? new PriorityBlockingQueue(25000) : new LinkedBlockingQueue(25000); + queue = useNaturalOrdering ? new PriorityBlockingQueue<>(25000) : new LinkedBlockingQueue<>(25000); fileQueueRef.set(queue); } diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/CompressionType.java b/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/CompressionType.java index bf0eb088f981..b5a789451183 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/CompressionType.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/CompressionType.java @@ -66,7 +66,7 @@ public String toString() { } public static AllowableValue[] allowableValues() { - List values = new ArrayList(); + List values = new ArrayList<>(); for (CompressionType type : CompressionType.values()) { values.add(new AllowableValue(type.name(), type.name(), type.getDescription())); } diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java b/nifi-extension-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java index 1115ade962e0..f845169a861e 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java @@ -471,7 +471,7 @@ public void listByNoTracking(final ProcessContext context, final ProcessSession final TreeMap> orderedEntries = new TreeMap<>(); for (final T entity : entityList) { - List entitiesForTimestamp = orderedEntries.computeIfAbsent(entity.getTimestamp(), k -> new ArrayList()); + List entitiesForTimestamp = orderedEntries.computeIfAbsent(entity.getTimestamp(), k -> new ArrayList<>()); entitiesForTimestamp.add(entity); } @@ -678,11 +678,7 @@ public void listByTrackingTimestamps(final ProcessContext context, final Process final boolean newEntry = minTimestampToListMillis == null || entityTimestampMillis >= minTimestampToListMillis && entityTimestampMillis >= lastProcessedLatestEntryTimestampMillis; if (newEntry) { - List entitiesForTimestamp = orderedEntries.get(entity.getTimestamp()); - if (entitiesForTimestamp == null) { - entitiesForTimestamp = new ArrayList(); - orderedEntries.put(entity.getTimestamp(), entitiesForTimestamp); - } + List entitiesForTimestamp = orderedEntries.computeIfAbsent(entity.getTimestamp(), k -> new ArrayList<>()); entitiesForTimestamp.add(entity); } } diff --git a/nifi-extension-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java b/nifi-extension-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java index 70e7a20103e1..54efa0f5a81b 100644 --- a/nifi-extension-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java +++ b/nifi-extension-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java @@ -791,9 +791,7 @@ public void testListAndMapConversion() { Map obj = new HashMap<>(); List> list = new ArrayList<>(); for (int x = 0; x < 10; x++) { - list.add(new HashMap() {{ - put("Message", UUID.randomUUID().toString()); - }}); + list.add(Map.of("Message", UUID.randomUUID().toString())); } obj.put("List", list); @@ -863,15 +861,8 @@ private void testConvertToAvroObjectAlsoReverseSchemaList(Object expected, Objec @Test public void testConvertAvroMap() { // GIVEN - Map expected = new HashMap() {{ - put( - "nullableMapField", - new HashMap() {{ - put("key1", "value1"); - put("key2", "value2"); - }} - ); - }}; + Map expected = Map.of( + "nullableMapField", Map.of("key1", "value1", "key2", "value2")); Schema nullableMapFieldAvroSchema = Schema.createUnion( Schema.create(Type.NULL), @@ -886,11 +877,7 @@ public void testConvertAvroMap() { ) ); - Map value = new HashMap() {{ - put(new Utf8("key1"), "value1"); - put(new Utf8("key2"), "value2"); - }}; - + Map value = Map.of(new Utf8("key1"), "value1", new Utf8("key2"), "value2"); Record avroRecord = new GenericRecordBuilder(avroRecordSchema) .set("nullableMapField", value) .build(); diff --git a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java index 3c0e4702069f..2477abf537e8 100644 --- a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java +++ b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java @@ -151,7 +151,7 @@ public List verify(final ProcessContext context, final @Override protected final Collection customValidate(ValidationContext validationContext) { - final Collection results = new ArrayList(super.customValidate(validationContext)); + final Collection results = new ArrayList<>(super.customValidate(validationContext)); ProxyConfiguration.validateProxySpec(validationContext, results, ProxyAwareTransportFactory.PROXY_SPECS); final boolean projectId = validationContext.getProperty(PROJECT_ID).isSet(); diff --git a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialsFactory.java b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialsFactory.java index f4d54c22f479..31a9ffb8c6f2 100644 --- a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialsFactory.java +++ b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialsFactory.java @@ -45,7 +45,7 @@ */ public class CredentialsFactory { - private final List strategies = new ArrayList(); + private final List strategies = new ArrayList<>(); public CredentialsFactory() { // Primary Credential Strategies @@ -78,7 +78,7 @@ public CredentialsStrategy selectPrimaryStrategy(final ValidationContext validat */ public Collection validate(final ValidationContext validationContext) { final CredentialsStrategy selectedStrategy = selectPrimaryStrategy(validationContext); - final ArrayList validationFailureResults = new ArrayList(); + final ArrayList validationFailureResults = new ArrayList<>(); for (CredentialsStrategy strategy : strategies) { final Collection strategyValidationFailures = strategy.validate(validationContext, diff --git a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/strategies/AbstractBooleanCredentialsStrategy.java b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/strategies/AbstractBooleanCredentialsStrategy.java index 0f94a2affb22..7ed3eb33a669 100644 --- a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/strategies/AbstractBooleanCredentialsStrategy.java +++ b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/strategies/AbstractBooleanCredentialsStrategy.java @@ -54,7 +54,7 @@ public Collection validate(final ValidationContext validationC Boolean useStrategy = validationContext.getProperty(strategyProperty).asBoolean(); if (!thisIsSelectedStrategy && (useStrategy == null ? false : useStrategy)) { String failureFormat = "property %1$s cannot be used with %2$s"; - Collection validationFailureResults = new ArrayList(); + Collection validationFailureResults = new ArrayList<>(); String message = String.format(failureFormat, strategyProperty.getDisplayName(), primaryStrategy.getName()); validationFailureResults.add(new ValidationResult.Builder() diff --git a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDriveIT.java b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDriveIT.java index 9220b4d02e24..cb750744fead 100644 --- a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDriveIT.java +++ b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDriveIT.java @@ -69,11 +69,9 @@ void testInputFlowFileReferencesMissingFile() { inputFlowFileAttributes.put(GoogleDriveAttributes.FILENAME, "missing_filename"); Set> expectedFailureAttributes = new HashSet<>(singletonList( - new HashMap() {{ - put(GoogleDriveAttributes.ID, "missing"); - put(GoogleDriveAttributes.FILENAME, "missing_filename"); - put(GoogleDriveAttributes.ERROR_CODE, "404"); - }} + Map.of(GoogleDriveAttributes.ID, "missing", + GoogleDriveAttributes.FILENAME, "missing_filename", + GoogleDriveAttributes.ERROR_CODE, "404") )); testRunner.enqueue("unimportant_data", inputFlowFileAttributes); diff --git a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecordTest.java b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecordTest.java index e888b0022434..06793468fd19 100644 --- a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecordTest.java +++ b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecordTest.java @@ -88,13 +88,7 @@ public void testFlowFileList() throws Exception { setupGraphClient(false); List> test = new ArrayList<>(); Map tempMap = new HashMap<>(); - tempMap.put("M", new ArrayList() { - { - add(1); - add(2); - add(3); - } - }); + tempMap.put("M", List.of(1, 2, 3)); test.add(tempMap); byte[] json = JsonOutput.toJson(test).getBytes(); diff --git a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java index a4e47222d14c..0b55ef12bd93 100644 --- a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java +++ b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/MockCypherClientService.java @@ -28,14 +28,8 @@ public class MockCypherClientService extends AbstractControllerService implement @Override public Map executeQuery(String query, Map parameters, GraphQueryResultCallback handler) { - handler.process(new HashMap() {{ - put("name", "John Smith"); - put("age", 40); - }}, true); - handler.process(new HashMap() {{ - put("name", "John Smith"); - put("age", 40); - }}, false); + handler.process(Map.of("name", "John Smith", "age", 40), true); + handler.process(Map.of("name", "John Smith", "age", 40), false); Map resultAttributes = new HashMap<>(); resultAttributes.put(NODES_CREATED, String.valueOf(1)); diff --git a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/TestExecuteGraphQuery.java b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/TestExecuteGraphQuery.java index f90ccd920215..cfc65b77a9bb 100644 --- a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/TestExecuteGraphQuery.java +++ b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/TestExecuteGraphQuery.java @@ -24,7 +24,6 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -63,9 +62,7 @@ public void testExecuteFromBody() throws Exception { @Test public void testExecuteFromParameterWithEL() throws Exception { runner.setProperty(AbstractGraphExecutor.QUERY, "${query}"); - runner.enqueue("test-data", new HashMap() {{ - put("query", "MATCH (p:person) RETURN p"); - }}); + runner.enqueue("test-data", Map.of("query", "MATCH (p:person) RETURN p")); testExecute(1, 0, 1); } diff --git a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java index d8cc163443e6..7130c8af672f 100644 --- a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java +++ b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/test/java/org/apache/nifi/processors/graph/util/InMemoryGraphClient.java @@ -94,7 +94,7 @@ public Map executeQuery(String query, Map parame Map.Entry tempResult = (Map.Entry) resultSet.next(); Map tempRetObject = new HashMap<>(); tempRetObject.put(tempResult.getKey(), tempResult.getValue()); - SimpleEntry returnObject = new SimpleEntry(tempResult.getKey(), tempRetObject); + SimpleEntry returnObject = new SimpleEntry<>(tempResult.getKey(), tempRetObject); Map resultReturnMap = new HashMap<>(); resultReturnMap.put(innerResultSet.getKey(), returnObject); if (getLogger().isDebugEnabled()) { diff --git a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestGetHDFSFileInfo.java b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestGetHDFSFileInfo.java index 0d0b9ff5f843..2fd33009d08a 100644 --- a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestGetHDFSFileInfo.java +++ b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestGetHDFSFileInfo.java @@ -172,7 +172,7 @@ public void testValidELFunction() throws InterruptedException { runner.setProperty(GetHDFSFileInfo.IGNORE_DOTTED_DIRS, "true"); runner.setProperty(GetHDFSFileInfo.IGNORE_DOTTED_FILES, "true"); runner.setProperty(GetHDFSFileInfo.DESTINATION, GetHDFSFileInfo.DESTINATION_CONTENT); - runner.enqueue("foo", new HashMap()); + runner.enqueue("foo", new HashMap<>()); runner.run(); diff --git a/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java b/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java index a4f5042dae93..e0e91d36fcbc 100644 --- a/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java +++ b/nifi-extension-bundles/nifi-hl7-bundle/nifi-hl7-processors/src/main/java/org/apache/nifi/processors/hl7/ExtractHL7Attributes.java @@ -242,7 +242,7 @@ public static Map getAttributes(final Group group, final boolean private static Map getAllSegments(final Group group) throws HL7Exception { final Map segments = new TreeMap<>(); - addSegments(group, segments, new HashMap()); + addSegments(group, segments, new HashMap<>()); return Collections.unmodifiableMap(segments); } diff --git a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryHandler.java b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryHandler.java index b55dc76d0f06..2b36bd4ce831 100644 --- a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryHandler.java +++ b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/cf/JMSConnectionFactoryHandler.java @@ -133,7 +133,7 @@ void setConnectionFactoryProperties(ConnectionFactory connectionFactory) { } else { String[] brokerList = brokerValue.split(","); if (connectionFactoryValue.startsWith("com.ibm.mq.jms")) { - List ibmConList = new ArrayList(); + List ibmConList = new ArrayList<>(); for (String broker : brokerList) { String[] hostPort = broker.split(":"); if (hostPort.length == 2) { diff --git a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/ConsumeJMSIT.java b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/ConsumeJMSIT.java index 03032477ceea..327d13971353 100644 --- a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/ConsumeJMSIT.java +++ b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/ConsumeJMSIT.java @@ -376,7 +376,7 @@ protected void rendezvousWithJms(ProcessContext context, ProcessSession processS ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("validateNIFI7034://127.0.0.1:" + port); final String destinationName = "nifi7034"; - final AtomicReference tcpTransport = new AtomicReference(); + final AtomicReference tcpTransport = new AtomicReference<>(); TcpTransportFactory.registerTransportFactory("validateNIFI7034", new TcpTransportFactory() { @Override protected TcpTransport createTcpTransport(WireFormat wf, SocketFactory socketFactory, URI location, URI localLocation) throws UnknownHostException, IOException { diff --git a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/JMSPublisherConsumerIT.java b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/JMSPublisherConsumerIT.java index d41ba89dc701..650eba37ab52 100644 --- a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/JMSPublisherConsumerIT.java +++ b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/JMSPublisherConsumerIT.java @@ -178,8 +178,10 @@ private void testMapMessage(String destinationName, MessageCreator messageCreato ObjectMapper objectMapper = new ObjectMapper(); try { - Map actual = objectMapper.readValue(response.getMessageBody(), new TypeReference>() { }); - Map expected = objectMapper.readValue(expectedJson.getBytes(), new TypeReference>() { }); + Map actual = objectMapper.readValue(response.getMessageBody(), new TypeReference<>() { + }); + Map expected = objectMapper.readValue(expectedJson.getBytes(), new TypeReference<>() { + }); assertEquals(expected, actual); } catch (IOException e) { diff --git a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/PublishJMSIT.java b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/PublishJMSIT.java index 164a63aa0532..060445171c53 100644 --- a/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/PublishJMSIT.java +++ b/nifi-extension-bundles/nifi-jms-bundle/nifi-jms-processors/src/test/java/org/apache/nifi/jms/processors/PublishJMSIT.java @@ -53,7 +53,6 @@ import java.io.IOException; import java.lang.reflect.Proxy; import java.net.URI; -import java.net.UnknownHostException; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -392,10 +391,10 @@ protected void rendezvousWithJms(ProcessContext context, ProcessSession processS ActiveMQConnectionFactory cf = new ActiveMQConnectionFactory("validateNIFI7034://127.0.0.1:" + port); final String destinationName = "nifi7034"; - final AtomicReference tcpTransport = new AtomicReference(); + final AtomicReference tcpTransport = new AtomicReference<>(); TcpTransportFactory.registerTransportFactory("validateNIFI7034", new TcpTransportFactory() { @Override - protected TcpTransport createTcpTransport(WireFormat wf, SocketFactory socketFactory, URI location, URI localLocation) throws UnknownHostException, IOException { + protected TcpTransport createTcpTransport(WireFormat wf, SocketFactory socketFactory, URI location, URI localLocation) throws IOException { TcpTransport transport = super.createTcpTransport(wf, socketFactory, location, localLocation); tcpTransport.set(transport); return transport; diff --git a/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-processors/src/test/java/org/apache/nifi/processors/jolt/TestJoltTransformRecord.java b/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-processors/src/test/java/org/apache/nifi/processors/jolt/TestJoltTransformRecord.java index 89b140f7a2f1..7cb2b8e401de 100644 --- a/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-processors/src/test/java/org/apache/nifi/processors/jolt/TestJoltTransformRecord.java +++ b/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-processors/src/test/java/org/apache/nifi/processors/jolt/TestJoltTransformRecord.java @@ -363,34 +363,14 @@ public void testTransformInputWithShiftrMultipleOutputRecords() throws IOExcepti RecordField xRecord = new RecordField("x", RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.RECORD.getRecordDataType(xSchema))); parser.addSchemaField(xRecord); - final Record record1 = new MapRecord(xSchema, new HashMap() {{ - put("a", 1); - put("b", 2); - put("c", 3); - }}); - final Record record2 = new MapRecord(xSchema, new HashMap() {{ - put("a", 11); - put("b", 21); - put("c", 31); - }}); - final Record record3 = new MapRecord(xSchema, new HashMap() {{ - put("a", 21); - put("b", 2); - put("c", 3); - }}); + final Record record1 = new MapRecord(xSchema, Map.of("a", 1, "b", 2, "c", 3)); + final Record record2 = new MapRecord(xSchema, Map.of("a", 11, "b", 21, "c", 31)); + final Record record3 = new MapRecord(xSchema, Map.of("a", 21, "b", 2, "c", 3)); final Object[] recordArray1 = new Object[]{record1, record2, record3}; parser.addRecord((Object) recordArray1); - final Record record4 = new MapRecord(xSchema, new HashMap() {{ - put("a", 100); - put("b", 200); - put("c", 300); - }}); - final Record record5 = new MapRecord(xSchema, new HashMap() {{ - put("a", 101); - put("b", 201); - put("c", 301); - }}); + final Record record4 = new MapRecord(xSchema, Map.of("a", 100, "b", 200, "c", 300)); + final Record record5 = new MapRecord(xSchema, Map.of("a", 101, "b", 201, "c", 301)); final Object[] recordArray2 = new Object[]{record4, record5}; parser.addRecord((Object) recordArray2); @@ -759,22 +739,12 @@ private void generateTestData(int numRecords, final BiFunction() {{ - put("value", (10 * index) + 3); - }}); - final Record seriesRecord = new MapRecord(seriesSchema, new HashMap() {{ - put("value", new Integer[]{(10 * index) + 5, (10 * index) + 4}); - }}); - final Record qualityRecord = new MapRecord(qualitySchema, new HashMap() {{ - put("value", 3); - }}); - - - Record ratingRecord = new MapRecord(ratingSchema, new HashMap() {{ - put("primary", primaryRecord); - put("series", seriesRecord); - put("quality", qualityRecord); - }}); + final Record primaryRecord = new MapRecord(primarySchema, Map.of("value", (10 * index) + 3)); + final Record seriesRecord = new MapRecord(seriesSchema, Map.of("value", new Integer[]{(10 * index) + 5, (10 * index) + 4})); + final Record qualityRecord = new MapRecord(qualitySchema, Map.of("value", 3)); + + Record ratingRecord = new MapRecord(ratingSchema, Map.of("primary", primaryRecord, + "series", seriesRecord, "quality", qualityRecord)); parser.addRecord(ratingRecord); } diff --git a/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-transform-json-ui/src/main/java/org/apache/nifi/web/standard/api/transformjson/TransformJSONResource.java b/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-transform-json-ui/src/main/java/org/apache/nifi/web/standard/api/transformjson/TransformJSONResource.java index 663b799a99ad..2ae56eedd33f 100644 --- a/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-transform-json-ui/src/main/java/org/apache/nifi/web/standard/api/transformjson/TransformJSONResource.java +++ b/nifi-extension-bundles/nifi-jolt-bundle/nifi-jolt-transform-json-ui/src/main/java/org/apache/nifi/web/standard/api/transformjson/TransformJSONResource.java @@ -54,7 +54,7 @@ protected Object getSpecificationJsonObject(JoltSpecificationDTO specificationDT if (evaluateAttributes) { PreparedQuery preparedQuery = Query.prepare(specificationDTO.getSpecification()); - Map attributes = specificationDTO.getExpressionLanguageAttributes() == null ? Collections.unmodifiableMap(new HashMap()) + Map attributes = specificationDTO.getExpressionLanguageAttributes() == null ? Collections.unmodifiableMap(new HashMap<>()) : specificationDTO.getExpressionLanguageAttributes(); specification = preparedQuery.evaluateExpressions(new StandardEvaluationContext(attributes), null); } else { diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java index 28f862817684..f2d3814898ac 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/GetMongoRecord.java @@ -49,7 +49,6 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -161,9 +160,7 @@ public void onTrigger(ProcessContext context, ProcessSession session) throws Pro try { final Map attributes = getAttributes(context, input, query, mongoCollection); try (OutputStream out = session.write(output)) { - Map attrs = inputPtr != null ? inputPtr.getAttributes() : new HashMap() {{ - put("schema.name", schemaName); - }}; + Map attrs = inputPtr != null ? inputPtr.getAttributes() : Map.of("schema.name", schemaName); RecordSchema schema = writerFactory.getSchema(attrs, null); RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, attrs); long count = 0L; diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/GetMongoIT.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/GetMongoIT.java index f2a5693947ec..e0e1c3d074c7 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/GetMongoIT.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/GetMongoIT.java @@ -553,9 +553,7 @@ public void testInvalidQueryGoesToFailure() { //Test a bad flowfile attribute runner.setIncomingConnection(true); runner.setProperty(GetMongo.QUERY, "${badfromff}"); - runner.enqueue("<>", new HashMap() {{ - put("badfromff", "{\"prop\":}"); - }}); + runner.enqueue("<>", Map.of("badfromff", "{\"prop\":}")); runner.run(); runner.assertTransferCount(GetMongo.REL_FAILURE, 1); runner.assertTransferCount(GetMongo.REL_SUCCESS, 0); @@ -566,9 +564,7 @@ public void testInvalidQueryGoesToFailure() { //Test for regression on a good query from a flowfile attribute runner.setIncomingConnection(true); runner.setProperty(GetMongo.QUERY, "${badfromff}"); - runner.enqueue("<>", new HashMap() {{ - put("badfromff", "{}"); - }}); + runner.enqueue("<>", Map.of("badfromff", "{}")); runner.run(); runner.assertTransferCount(GetMongo.REL_FAILURE, 0); runner.assertTransferCount(GetMongo.REL_SUCCESS, 3); diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/PutMongoRecordIT.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/PutMongoRecordIT.java index 1942807c1d36..749fa9786e46 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/PutMongoRecordIT.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/PutMongoRecordIT.java @@ -160,26 +160,17 @@ public void testInsertNestedRecords() throws Exception { personFields.add(sportField); final RecordSchema personSchema = new SimpleRecordSchema(personFields); recordReader.addSchemaField("person", RecordFieldType.RECORD); - recordReader.addRecord(1, new MapRecord(personSchema, new HashMap() {{ - put("name", "John Doe"); - put("age", 48); - put("sport", "Soccer"); - }})); - recordReader.addRecord(2, new MapRecord(personSchema, new HashMap() {{ - put("name", "Jane Doe"); - put("age", 47); - put("sport", "Tennis"); - }})); - recordReader.addRecord(3, new MapRecord(personSchema, new HashMap() {{ - put("name", "Sally Doe"); - put("age", 47); - put("sport", "Curling"); - }})); - recordReader.addRecord(4, new MapRecord(personSchema, new HashMap() {{ - put("name", "Jimmy Doe"); - put("age", 14); - put("sport", null); - }})); + recordReader.addRecord(1, new MapRecord(personSchema, Map.of("name", "John Doe", + "age", 48, "sport", "Soccer"))); + recordReader.addRecord(2, new MapRecord(personSchema, Map.of("name", "Jane Doe", "age", + 47, "sport", "Tennis"))); + recordReader.addRecord(3, new MapRecord(personSchema, Map.of("name", "Sally Doe", + "age", 47, "sport", "Curling"))); + Map mapWithNullValue = new HashMap<>(); + mapWithNullValue.put("name", "Jimmy Doe"); + mapWithNullValue.put("age", 14); + mapWithNullValue.put("sport", null); + recordReader.addRecord(4, new MapRecord(personSchema, mapWithNullValue)); runner.enqueue(""); runner.run(); @@ -230,35 +221,16 @@ void testUpsertAsInsert() throws Exception { new RecordField("age", RecordFieldType.INT.getDataType()) )); - List> inputs = Arrays.asList( - Arrays.asList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "name1"); - put("age", 21); - }})}, - new Object[]{2, new MapRecord(personSchema, new HashMap() {{ - put("name", "name2"); - put("age", 22); - }})} - ) + List> inputs = List.of( + List.of( + new Object[]{1, new MapRecord(personSchema, Map.of("name", "name1", "age", 21))}, + new Object[]{2, new MapRecord(personSchema, Map.of("name", "name2", "age", 22))} + ) ); - Set> expected = new HashSet<>(Arrays.asList( - new HashMap() {{ - put("id", 1); - put("person", new Document(new HashMap() {{ - put("name", "name1"); - put("age", 21); - }})); - }}, - new HashMap() {{ - put("id", 2); - put("person", new Document(new HashMap() {{ - put("name", "name2"); - put("age", 22); - }})); - }} - )); + Set> expected = Set.of( + Map.of("id", 1, "person", new Document(Map.of("name", "name1", "age", 21))), + Map.of("id", 2, "person", new Document(Map.of("name", "name2", "age", 22)))); testUpsertSuccess(runner, inputs, expected); } @@ -279,43 +251,18 @@ void testUpsertAsUpdate() throws Exception { List> inputs = Arrays.asList( Arrays.asList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "updating_name1"); - put("age", "age1".length()); - }})}, - new Object[]{2, new MapRecord(personSchema, new HashMap() {{ - put("name", "name2"); - put("age", "updating_age2".length()); - }})} + new Object[]{1, new MapRecord(personSchema, Map.of("name", "updating_name1", "age", "age1".length()))}, + new Object[]{2, new MapRecord(personSchema, Map.of("name", "name2", "age", "updating_age2".length()))} ), Arrays.asList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "updated_name1"); - put("age", "age1".length()); - }})}, - new Object[]{2, new MapRecord(personSchema, new HashMap() {{ - put("name", "name2"); - put("age", "updated_age2".length()); - }})} + new Object[]{1, new MapRecord(personSchema, Map.of("name", "updated_name1", "age", "age1".length()))}, + new Object[]{2, new MapRecord(personSchema, Map.of("name", "name2", "age", "updated_age2".length()))} ) ); Set> expected = new HashSet<>(Arrays.asList( - new HashMap() {{ - put("id", 1); - put("person", new Document(new HashMap() {{ - put("name", "updated_name1"); - put("age", "age1".length()); - }})); - }}, - new HashMap() {{ - put("id", 2); - put("person", new Document(new HashMap() {{ - put("name", "name2"); - put("age", "updated_age2".length()); - }})); - }} - )); + Map.of("id", 1, "person", new Document(Map.of("name", "updated_name1", "age", "age1".length()))), + Map.of("id", 2, "person", new Document(Map.of("name", "name2", "age", "updated_age2".length()))))); testUpsertSuccess(runner, inputs, expected); } @@ -336,39 +283,17 @@ void testUpsertAsInsertAndUpdate() throws Exception { List> inputs = Arrays.asList( Collections.singletonList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "updating_name1"); - put("age", "updating_age1".length()); - }})} + new Object[]{1, new MapRecord(personSchema, Map.of("name", "updating_name1", "age", "updating_age1".length()))} ), Arrays.asList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "updated_name1"); - put("age", "updated_age1".length()); - }})}, - new Object[]{2, new MapRecord(personSchema, new HashMap() {{ - put("name", "inserted_name2"); - put("age", "inserted_age2".length()); - }})} + new Object[]{1, new MapRecord(personSchema, Map.of("name", "updated_name1", "age", "updated_age1".length()))}, + new Object[]{2, new MapRecord(personSchema, Map.of("name", "inserted_name2", "age", "inserted_age2".length()))} ) ); - Set> expected = new HashSet<>(Arrays.asList( - new HashMap() {{ - put("id", 1); - put("person", new Document(new HashMap() {{ - put("name", "updated_name1"); - put("age", "updated_age1".length()); - }})); - }}, - new HashMap() {{ - put("id", 2); - put("person", new Document(new HashMap() {{ - put("name", "inserted_name2"); - put("age", "inserted_age2".length()); - }})); - }} - )); + Set> expected = Set.of( + Map.of("id", 1, "person", new Document(Map.of("name", "updated_name1", "age", "updated_age1".length()))), + Map.of("id", 2, "person", new Document(Map.of("name", "inserted_name2", "age", "inserted_age2".length())))); testUpsertSuccess(runner, inputs, expected); } @@ -387,13 +312,10 @@ void testRouteToFailureWhenKeyFieldDoesNotExist() throws Exception { new RecordField("age", RecordFieldType.INT.getDataType()) )); - List> inputs = Arrays.asList( - Collections.singletonList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "unimportant"); - put("age", "unimportant".length()); - }})} - ) + List> inputs = List.of( + Collections.singletonList( + new Object[]{1, new MapRecord(personSchema, Map.of("name", "unimportant", "age", "unimportant".length()))} + ) ); testUpsertFailure(runner, inputs); @@ -429,35 +351,18 @@ void testUpdateMany() throws Exception { recordReader.addSchemaField("team", RecordFieldType.STRING); recordReader.addSchemaField("color", RecordFieldType.STRING); - List> inputs = Arrays.asList( - Arrays.asList( - new Object[]{"A", "yellow"}, - new Object[]{"B", "red"} - ) + List> inputs = List.of( + Arrays.asList( + new Object[]{"A", "yellow"}, + new Object[]{"B", "red"} + ) ); - Set> expected = new HashSet<>(Arrays.asList( - new HashMap() {{ - put("name", "Joe"); - put("team", "A"); - put("color", "yellow"); - }}, - new HashMap() {{ - put("name", "Jane"); - put("team", "A"); - put("color", "yellow"); - }}, - new HashMap() {{ - put("name", "Jeff"); - put("team", "B"); - put("color", "red"); - }}, - new HashMap() {{ - put("name", "Janet"); - put("team", "B"); - put("color", "red"); - }} - )); + Set> expected = Set.of( + Map.of("name", "Joe", "team", "A", "color", "yellow"), + Map.of("name", "Jane", "team", "A", "color", "yellow"), + Map.of("name", "Jeff", "team", "B", "color", "red"), + Map.of("name", "Janet", "team", "B", "color", "red")); testUpsertSuccess(updateRunner, inputs, expected); } @@ -492,43 +397,24 @@ void testUpdateModeFFAttributeSetToMany() throws Exception { recordReader.addSchemaField("team", RecordFieldType.STRING); recordReader.addSchemaField("color", RecordFieldType.STRING); - List> inputs = Arrays.asList( - Arrays.asList( - new Object[]{"A", "yellow"}, - new Object[]{"B", "red"} - ) + List> inputs = List.of( + Arrays.asList( + new Object[]{"A", "yellow"}, + new Object[]{"B", "red"} + ) ); - Set> expected = new HashSet<>(Arrays.asList( - new HashMap() {{ - put("name", "Joe"); - put("team", "A"); - put("color", "yellow"); - }}, - new HashMap() {{ - put("name", "Jane"); - put("team", "A"); - put("color", "yellow"); - }}, - new HashMap() {{ - put("name", "Jeff"); - put("team", "B"); - put("color", "red"); - }}, - new HashMap() {{ - put("name", "Janet"); - put("team", "B"); - put("color", "red"); - }} - )); + Set> expected = Set.of( + Map.of("name", "Joe", "team", "A", "color", "yellow"), + Map.of("name", "Jane", "team", "A", "color", "yellow"), + Map.of("name", "Jeff", "team", "B", "color", "red"), + Map.of("name", "Janet", "team", "B", "color", "red")); inputs.forEach(input -> { input.forEach(recordReader::addRecord); MockFlowFile flowFile = new MockFlowFile(1); - flowFile.putAttributes(new HashMap() {{ - put(AbstractMongoProcessor.ATTRIBUTE_MONGODB_UPDATE_MODE, "many"); - }}); + flowFile.putAttributes(Map.of(AbstractMongoProcessor.ATTRIBUTE_MONGODB_UPDATE_MODE, "many")); updateRunner.enqueue(flowFile); updateRunner.run(); }); @@ -557,11 +443,11 @@ void testRouteToFailureWhenUpdateModeFFAttributeSetToInvalid() throws Exception recordReader.addSchemaField("team", RecordFieldType.STRING); recordReader.addSchemaField("color", RecordFieldType.STRING); - List> inputs = Arrays.asList( - Arrays.asList( - new Object[]{"A", "yellow"}, - new Object[]{"B", "red"} - ) + List> inputs = List.of( + Arrays.asList( + new Object[]{"A", "yellow"}, + new Object[]{"B", "red"} + ) ); testUpsertFailure(runner, inputs); @@ -581,14 +467,9 @@ void testRouteToFailureWhenKeyFieldReferencesNonEmbeddedDocument() throws Except new RecordField("age", RecordFieldType.INT.getDataType()) )); - List> inputs = Arrays.asList( - Collections.singletonList( - new Object[]{1, new MapRecord(personSchema, new HashMap() {{ - put("name", "unimportant"); - put("age", "unimportant".length()); - }})} - ) - ); + List> inputs = List.of( + Collections.singletonList( + new Object[]{1, new MapRecord(personSchema, Map.of("name", "unimportant", "age", "unimportant".length()))})); testUpsertFailure(runner, inputs); } diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFSIT.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFSIT.java index f8157fe253e3..840b6e7821bf 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFSIT.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/FetchGridFSIT.java @@ -134,9 +134,7 @@ public void testQueryAttribute() { runner.clearTransferState(); - id = writeTestFile(fileName, content, BUCKET, new HashMap() {{ - put("lookupKey", "xyz"); - }}); + id = writeTestFile(fileName, content, BUCKET, Map.of("lookupKey", "xyz")); assertNotNull(id); String query = "{ \"metadata\": { \"lookupKey\": \"xyz\" }}"; diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFSIT.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFSIT.java index 5868a32badd4..83c764206c09 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFSIT.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/test/java/org/apache/nifi/processors/mongodb/gridfs/PutGridFSIT.java @@ -81,12 +81,10 @@ public void testWithProperties() { runner.run(); runner.assertAllFlowFilesTransferred(PutGridFS.REL_SUCCESS); - attrs = new HashMap() {{ - put("created_by", "john.smith"); - put("created_for", "jane.doe"); - put("restrictions", "PHI&PII"); - put("department", "Accounting"); - }}; + attrs = Map.of("created_by", "john.smith", + "created_for", "jane.doe", + "restrictions", "PHI&PII", + "department", "Accounting"); assertTrue(fileExists(fileName, BUCKET), "File does not exist"); assertTrue(fileHasProperties(fileName, BUCKET, attrs), "File is missing PARENT_PROPERTIES"); diff --git a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/test/java/org/apache/nifi/mongodb/StubSchemaRegistry.java b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/test/java/org/apache/nifi/mongodb/StubSchemaRegistry.java index 5e1725801fd5..80576ce12535 100644 --- a/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/test/java/org/apache/nifi/mongodb/StubSchemaRegistry.java +++ b/nifi-extension-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/test/java/org/apache/nifi/mongodb/StubSchemaRegistry.java @@ -27,7 +27,6 @@ import org.apache.nifi.serialization.record.SchemaIdentifier; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -42,8 +41,6 @@ public RecordSchema retrieveSchema(SchemaIdentifier schemaIdentifier) { @Override public Set getSuppliedSchemaFields() { - return new HashSet() {{ - add(SchemaField.SCHEMA_NAME); - }}; + return Set.of(SchemaField.SCHEMA_NAME); } } diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetOffsets.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetOffsets.java index 21c71caad6fd..276c3e6ea45b 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetOffsets.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetOffsets.java @@ -23,9 +23,9 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import org.apache.nifi.annotation.behavior.InputRequirement; @@ -203,12 +203,8 @@ private List getPartitions( results.add( session.putAllAttributes( outputFlowFile, - new HashMap() { - { - put(ParquetAttribute.RECORD_OFFSET, Long.toString(recordOffset + addedOffset)); - put(ParquetAttribute.RECORD_COUNT, Long.toString(Math.min(partitionSize, recordCount - addedOffset))); - } - } + Map.of(ParquetAttribute.RECORD_OFFSET, Long.toString(recordOffset + addedOffset), + ParquetAttribute.RECORD_COUNT, Long.toString(Math.min(partitionSize, recordCount - addedOffset))) ) ); } diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsets.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsets.java index 39c24eb0bec0..526ed4f4201c 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsets.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/main/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsets.java @@ -22,9 +22,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -154,14 +154,9 @@ private List getPartitions( results.add( session.putAllAttributes( outputFlowFile, - new HashMap() { - { - put(ParquetAttribute.FILE_RANGE_START_OFFSET, String.valueOf(currentBlockStartOffset)); - put(ParquetAttribute.FILE_RANGE_END_OFFSET, String.valueOf(currentBlockEndOffset)); - put(ParquetAttribute.RECORD_COUNT, String.valueOf(currentBlock.getRowCount())); - } - } - ) + Map.of(ParquetAttribute.FILE_RANGE_START_OFFSET, String.valueOf(currentBlockStartOffset), + ParquetAttribute.FILE_RANGE_END_OFFSET, String.valueOf(currentBlockEndOffset), + ParquetAttribute.RECORD_COUNT, String.valueOf(currentBlock.getRowCount()))) ); } diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/ParquetTestUtils.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/ParquetTestUtils.java index eee97b89401e..283bc88656fa 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/ParquetTestUtils.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/ParquetTestUtils.java @@ -23,7 +23,6 @@ import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Collection; -import java.util.HashMap; import java.util.Map; import java.util.stream.IntStream; import org.apache.avro.Schema; @@ -47,13 +46,7 @@ public static File createUsersParquetFile(int numUsers) throws IOException { } public static Map createUser(int i) { - return new HashMap() { - { - put("name", "Bob" + i); - put("favorite_number", i); - put("favorite_color", "blue" + i); - } - }; + return Map.of("name", "Bob" + i, "favorite_number", i, "favorite_color", "blue" + i); } private static File createUsersParquetFile(Collection> users) throws IOException { diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/TestParquetReader.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/TestParquetReader.java index 2a22f75345a1..c96bdf19e0e9 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/TestParquetReader.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/parquet/TestParquetReader.java @@ -83,12 +83,8 @@ public void testReadUsersPartiallyWithOffsetAndLimitedRecordCount() throws IOExc final int numUsers = 1000025; // intentionally so large, to test input with many record groups final int expectedRecords = 2; final File parquetFile = ParquetTestUtils.createUsersParquetFile(numUsers); - final List results = getRecords(parquetFile, new HashMap() { - { - put(ParquetAttribute.RECORD_OFFSET, "1000020"); - put(ParquetAttribute.RECORD_COUNT, "2"); - } - }); + final List results = getRecords(parquetFile, Map.of(ParquetAttribute.RECORD_OFFSET, "1000020", + ParquetAttribute.RECORD_COUNT, "2")); assertEquals(expectedRecords, results.size()); IntStream.range(0, expectedRecords) @@ -102,13 +98,9 @@ public void testReadUsersPartiallyWithOffsetWithinFileRange() throws IOException final File parquetFile = ParquetTestUtils.createUsersParquetFile(numUsers); final List results = getRecords( parquetFile, - new HashMap() { - { - put(ParquetAttribute.RECORD_OFFSET, "321"); - put(ParquetAttribute.FILE_RANGE_START_OFFSET, "16543"); - put(ParquetAttribute.FILE_RANGE_END_OFFSET, "24784"); - } - } + Map.of(ParquetAttribute.RECORD_OFFSET, "321", + ParquetAttribute.FILE_RANGE_START_OFFSET, "16543", + ParquetAttribute.FILE_RANGE_END_OFFSET, "24784") ); assertEquals(expectedRecords, results.size()); @@ -124,14 +116,10 @@ public void testReadUsersPartiallyWithOffsetAndLimitedRecordCountWithinFileRange final File parquetFile = ParquetTestUtils.createUsersParquetFile(numUsers); final List results = getRecords( parquetFile, - new HashMap() { - { - put(ParquetAttribute.RECORD_OFFSET, "321"); - put(ParquetAttribute.RECORD_COUNT, "2"); - put(ParquetAttribute.FILE_RANGE_START_OFFSET, "16543"); - put(ParquetAttribute.FILE_RANGE_END_OFFSET, "24784"); - } - } + Map.of(ParquetAttribute.RECORD_OFFSET, "321", + ParquetAttribute.RECORD_COUNT, "2", + ParquetAttribute.FILE_RANGE_START_OFFSET, "16543", + ParquetAttribute.FILE_RANGE_END_OFFSET, "24784") ); assertEquals(expectedRecords, results.size()); @@ -174,12 +162,8 @@ public void testPartialReaderWithOffsetAndLimitedRecordCount() throws Initializa runner.addControllerService("reader", parquetReader); runner.enableControllerService(parquetReader); - runner.enqueue(Paths.get(PARQUET_PATH), new HashMap() { - { - put(ParquetAttribute.RECORD_OFFSET, "6"); - put(ParquetAttribute.RECORD_COUNT, "2"); - } - }); + runner.enqueue(Paths.get(PARQUET_PATH), Map.of(ParquetAttribute.RECORD_OFFSET, "6", + ParquetAttribute.RECORD_COUNT, "2")); runner.setProperty(TestParquetProcessor.READER, "reader"); diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetOffsetsTest.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetOffsetsTest.java index 9811d03762e6..00180427feab 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetOffsetsTest.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetOffsetsTest.java @@ -42,12 +42,7 @@ public class CalculateParquetOffsetsTest { private static final Path PARQUET_PATH = Paths.get("src/test/resources/TestParquetReader.parquet"); private static final Path NOT_PARQUET_PATH = Paths.get("src/test/resources/core-site.xml"); - private static final Map PRESERVED_ATTRIBUTES = new HashMap() { - { - put("foo", "bar"); - put("example", "value"); - } - }; + private static final Map PRESERVED_ATTRIBUTES = Map.of("foo", "bar", "example", "value"); private TestRunner runner; @@ -65,11 +60,11 @@ public void testSinglePartition() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "10"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "10"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(PARQUET_PATH); - PRESERVED_ATTRIBUTES.forEach(results.get(0)::assertAttributeEquals); + PRESERVED_ATTRIBUTES.forEach(results.getFirst()::assertAttributeEquals); } @Test @@ -99,9 +94,9 @@ public void testHalfPartitions() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "5"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "5"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "5"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "5"); @@ -119,9 +114,9 @@ public void testAsymmetricPartitions() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "8"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "8"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "2"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "8"); @@ -133,7 +128,7 @@ public void testAsymmetricPartitions() throws Exception { @Test public void testSubPartitioningWithCountAndOffset() throws Exception { runner.setProperty(PROP_RECORDS_PER_SPLIT, "3"); - runner.enqueue(PARQUET_PATH, createAttributes(new HashMap() { + runner.enqueue(PARQUET_PATH, createAttributes(new HashMap<>() { { put(ParquetAttribute.RECORD_COUNT, "7"); put(ParquetAttribute.RECORD_OFFSET, "2"); @@ -144,9 +139,9 @@ public void testSubPartitioningWithCountAndOffset() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); + results.getFirst().assertContentEquals(PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "5"); @@ -168,9 +163,9 @@ public void testSubPartitioningWithoutOffset() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "2"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "2"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "1"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); @@ -188,9 +183,9 @@ public void testSubPartitioningWithoutCount() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "5"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "3"); - results.get(0).assertContentEquals(PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "5"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "3"); + results.getFirst().assertContentEquals(PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "2"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "8"); @@ -209,9 +204,9 @@ public void testZeroContentOutput() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "8"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(""); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "8"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(""); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "2"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "8"); @@ -247,9 +242,9 @@ public void testEmptyInputWithCountAttribute() { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(""); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(""); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "1"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "3"); @@ -261,7 +256,7 @@ public void testEmptyInputWithCountAttribute() { @Test public void testEmptyInputWithOffsetAndCountAttributes() { runner.setProperty(PROP_RECORDS_PER_SPLIT, "3"); - runner.enqueue("", createAttributes(new HashMap() { + runner.enqueue("", createAttributes(new HashMap<>() { { put(ParquetAttribute.RECORD_OFFSET, "2"); put(ParquetAttribute.RECORD_COUNT, "4"); @@ -272,9 +267,9 @@ public void testEmptyInputWithOffsetAndCountAttributes() { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); - results.get(0).assertContentEquals(""); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); + results.getFirst().assertContentEquals(""); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "1"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "5"); @@ -310,9 +305,9 @@ public void testUnrecognizedInputWithCountAttribute() throws IOException { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); - results.get(0).assertContentEquals(NOT_PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "0"); + results.getFirst().assertContentEquals(NOT_PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "1"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "3"); @@ -324,7 +319,7 @@ public void testUnrecognizedInputWithCountAttribute() throws IOException { @Test public void testUnrecognizedInputWithOffsetAndCountAttributes() throws IOException { runner.setProperty(PROP_RECORDS_PER_SPLIT, "3"); - runner.enqueue(NOT_PARQUET_PATH, createAttributes(new HashMap() { + runner.enqueue(NOT_PARQUET_PATH, createAttributes(new HashMap<>() { { put(ParquetAttribute.RECORD_OFFSET, "2"); put(ParquetAttribute.RECORD_COUNT, "4"); @@ -335,9 +330,9 @@ public void testUnrecognizedInputWithOffsetAndCountAttributes() throws IOExcepti final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); - results.get(0).assertContentEquals(NOT_PARQUET_PATH); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "3"); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "2"); + results.getFirst().assertContentEquals(NOT_PARQUET_PATH); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "1"); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_OFFSET, "5"); @@ -347,7 +342,7 @@ public void testUnrecognizedInputWithOffsetAndCountAttributes() throws IOExcepti } private HashMap createAttributes(Map additionalAttributes) { - return new HashMap(PRESERVED_ATTRIBUTES) {{ + return new HashMap<>(PRESERVED_ATTRIBUTES) {{ putAll(additionalAttributes); }}; } diff --git a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsetsTest.java b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsetsTest.java index dfd4dbacf48e..ef79489b34c1 100644 --- a/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsetsTest.java +++ b/nifi-extension-bundles/nifi-parquet-bundle/nifi-parquet-processors/src/test/java/org/apache/nifi/processors/parquet/CalculateParquetRowGroupOffsetsTest.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.nifi.parquet.ParquetTestUtils; @@ -44,12 +43,7 @@ public class CalculateParquetRowGroupOffsetsTest { private static final Path NOT_PARQUET_PATH = Paths.get("src/test/resources/core-site.xml"); - private static final Map PRESERVED_ATTRIBUTES = new HashMap() { - { - put("foo", "bar"); - put("example", "value"); - } - }; + private static final Map PRESERVED_ATTRIBUTES = Map.of("foo", "bar", "example", "value"); private TestRunner runner; @@ -67,12 +61,12 @@ public void testSinglePartition() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "10"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "298"); - results.get(0).assertContentEquals(parquetFile.toPath()); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "10"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "298"); + results.getFirst().assertContentEquals(parquetFile.toPath()); - PRESERVED_ATTRIBUTES.forEach(results.get(0)::assertAttributeEquals); + PRESERVED_ATTRIBUTES.forEach(results.getFirst()::assertAttributeEquals); } @Test @@ -84,10 +78,10 @@ public void testEachRowGroupGoesToSeparatePartition() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "337"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "8301"); - results.get(0).assertContentEquals(parquetFile.toPath()); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "337"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "8301"); + results.getFirst().assertContentEquals(parquetFile.toPath()); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "326"); results.get(1).assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "8301"); @@ -117,10 +111,10 @@ public void testZeroContentOutput() throws Exception { final List results = runner.getFlowFilesForRelationship(REL_SUCCESS); - results.get(0).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "337"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); - results.get(0).assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "8301"); - results.get(0).assertContentEquals(""); + results.getFirst().assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "337"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "4"); + results.getFirst().assertAttributeEquals(ParquetAttribute.FILE_RANGE_END_OFFSET, "8301"); + results.getFirst().assertContentEquals(""); results.get(1).assertAttributeEquals(ParquetAttribute.RECORD_COUNT, "163"); results.get(1).assertAttributeEquals(ParquetAttribute.FILE_RANGE_START_OFFSET, "8301"); diff --git a/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteClojure.java b/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteClojure.java index b99ba4516c56..98a53c96f716 100644 --- a/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteClojure.java +++ b/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteClojure.java @@ -22,8 +22,8 @@ import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; -import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -115,10 +115,7 @@ public void testDynamicProperties() throws Exception { runner.setProperty("myProp", "${myAttr}"); runner.assertValid(); - runner.enqueue(TEST_CSV_DATA.getBytes(StandardCharsets.UTF_8), - new HashMap(1) {{ - put("myAttr", "testValue"); - }}); + runner.enqueue(TEST_CSV_DATA.getBytes(StandardCharsets.UTF_8), Map.of("myAttr", "testValue")); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteScript.REL_SUCCESS, 1); diff --git a/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteGroovy.java b/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteGroovy.java index f91dad1e1497..d9acea6a2777 100644 --- a/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteGroovy.java +++ b/nifi-extension-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/java/org/apache/nifi/processors/script/TestExecuteGroovy.java @@ -23,8 +23,8 @@ import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; -import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -149,10 +149,7 @@ public void testDynamicProperties() { runner.setProperty("myProp", "${myAttr}"); runner.assertValid(); - runner.enqueue(TEST_CSV_DATA.getBytes(StandardCharsets.UTF_8), - new HashMap(1) {{ - put("myAttr", "testValue"); - }}); + runner.enqueue(TEST_CSV_DATA.getBytes(StandardCharsets.UTF_8), Map.of("myAttr", "testValue")); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteScript.REL_SUCCESS, 1); diff --git a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java index 55f0190dd42f..456ab9e5ef7b 100644 --- a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java +++ b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java @@ -122,7 +122,7 @@ public void onTrigger(final ReportingContext context) { componentNameFilter = Pattern.compile(context.getProperty(COMPONENT_NAME_FILTER_REGEX).evaluateAttributeExpressions().getValue()); // initialize the map - processGroupIDToPath = new HashMap(); + processGroupIDToPath = new HashMap<>(); final ProcessGroupStatus procGroupStatus = context.getEventAccess().getControllerStatus(); final String rootGroupName = procGroupStatus == null ? null : procGroupStatus.getName(); diff --git a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/test/java/org/apache/nifi/reporting/TestSiteToSiteStatusReportingTask.java b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/test/java/org/apache/nifi/reporting/TestSiteToSiteStatusReportingTask.java index 79eab2464fa0..b3bc0c819e7c 100644 --- a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/test/java/org/apache/nifi/reporting/TestSiteToSiteStatusReportingTask.java +++ b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/test/java/org/apache/nifi/reporting/TestSiteToSiteStatusReportingTask.java @@ -112,7 +112,7 @@ public void testSerializedForm() throws IOException, InitializationException { task.onTrigger(context); assertEquals(16, task.dataSent.size()); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject firstElement = jsonReader.readArray().getJsonObject(0); JsonString componentId = firstElement.getJsonString("componentId"); @@ -136,7 +136,7 @@ public void testComponentTypeFilter() throws IOException, InitializationExceptio task.onTrigger(context); assertEquals(1, task.dataSent.size()); // Only root pg and 3 child pgs - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonString componentId = jsonReader.readArray().getJsonObject(0).getJsonString("componentId"); assertEquals(pgStatus.getId(), componentId.getString()); @@ -154,7 +154,7 @@ public void testConnectionStatus() throws IOException, InitializationException { MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonString backpressure = object.getJsonString("isBackPressureEnabled"); @@ -181,7 +181,7 @@ public void testConnectionStatusWithNullValues() throws IOException, Initializat MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonValue destination = object.get("destinationName"); @@ -202,7 +202,7 @@ public void testComponentNameFilter() throws IOException, InitializationExceptio task.onTrigger(context); assertEquals(3, task.dataSent.size()); // 3 processors for each of 4 groups - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonString componentId = jsonReader.readArray().getJsonObject(0).getJsonString("componentId"); assertEquals("root.1.processor.1", componentId.getString()); @@ -221,7 +221,7 @@ public void testComponentNameFilter_nested() throws IOException, InitializationE task.onTrigger(context); assertEquals(10, task.dataSent.size()); // 3 + (3 * 3) + (3 * 3 * 3) = 39, or 10 batches of 4 - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonString componentId = jsonReader.readArray().getJsonObject(0).getJsonString("componentId"); assertEquals("root.1.1.processor.1", componentId.getString()); @@ -240,7 +240,7 @@ public void testPortStatus() throws IOException, InitializationException { MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonString runStatus = object.getJsonString("runStatus"); @@ -264,7 +264,7 @@ public void testPortStatusWithNullValues() throws IOException, InitializationExc MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonValue activeThreadCount = object.get("activeThreadCount"); @@ -284,7 +284,7 @@ public void testRemoteProcessGroupStatus() throws IOException, InitializationExc task.onTrigger(context); assertEquals(3, task.dataSent.size()); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject firstElement = jsonReader.readArray().getJsonObject(0); JsonNumber activeThreadCount = firstElement.getJsonNumber("activeThreadCount"); @@ -307,7 +307,7 @@ public void testRemoteProcessGroupStatusWithNullValues() throws IOException, Ini task.onTrigger(context); assertEquals(3, task.dataSent.size()); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject firstElement = jsonReader.readArray().getJsonObject(0); JsonValue targetURI = firstElement.get("targetURI"); @@ -326,7 +326,7 @@ public void testProcessorStatus() throws IOException, InitializationException { MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonString parentName = object.getJsonString("parentName"); @@ -357,7 +357,7 @@ public void testProcessorStatusWithNullValues() throws IOException, Initializati MockSiteToSiteStatusReportingTask task = initTask(properties, pgStatus); task.onTrigger(context); - final String msg = new String(task.dataSent.get(0), StandardCharsets.UTF_8); + final String msg = new String(task.dataSent.getFirst(), StandardCharsets.UTF_8); JsonReader jsonReader = Json.createReader(new ByteArrayInputStream(msg.getBytes())); JsonObject object = jsonReader.readArray().getJsonObject(0); JsonValue type = object.get("processorType"); @@ -401,7 +401,7 @@ public static ProcessGroupStatus generateProcessGroupStatus(String id, String na ProcessGroupStatus pgStatus = new ProcessGroupStatus(); pgStatus.setId(id); - pgStatus.setName(namePrefix + "-" + UUID.randomUUID().toString()); + pgStatus.setName(namePrefix + "-" + UUID.randomUUID()); pgStatus.setInputPortStatus(ipStatus); pgStatus.setOutputPortStatus(opStatus); pgStatus.setProcessGroupStatus(childPgStatus); @@ -432,7 +432,7 @@ public static ProcessGroupStatus generateProcessGroupStatus(String id, String na public static PortStatus generatePortStatus(String id, String namePrefix) { PortStatus pStatus = new PortStatus(); pStatus.setId(id); - pStatus.setName(namePrefix + "-" + UUID.randomUUID().toString()); + pStatus.setName(namePrefix + "-" + UUID.randomUUID()); pStatus.setActiveThreadCount(null); pStatus.setBytesReceived(1l); pStatus.setBytesSent(2l); @@ -451,7 +451,7 @@ public static PortStatus generatePortStatus(String id, String namePrefix) { public static ProcessorStatus generateProcessorStatus(String id, String namePrefix) { ProcessorStatus pStatus = new ProcessorStatus(); pStatus.setId(id); - pStatus.setName(namePrefix + "-" + UUID.randomUUID().toString()); + pStatus.setName(namePrefix + "-" + UUID.randomUUID()); pStatus.setActiveThreadCount(0); pStatus.setAverageLineageDuration(1l); pStatus.setBytesRead(2l); @@ -470,10 +470,7 @@ public static ProcessorStatus generateProcessorStatus(String id, String namePref pStatus.setType(null); pStatus.setTerminatedThreadCount(1); pStatus.setRunStatus(RunStatus.Running); - pStatus.setCounters(new HashMap() {{ - put("counter1", 10L); - put("counter2", 5L); - }}); + pStatus.setCounters(Map.of("counter1", 10L, "counter2", 5L)); return pStatus; } @@ -481,7 +478,7 @@ public static ProcessorStatus generateProcessorStatus(String id, String namePref public static RemoteProcessGroupStatus generateRemoteProcessGroupStatus(String id, String namePrefix) { RemoteProcessGroupStatus rpgStatus = new RemoteProcessGroupStatus(); rpgStatus.setId(id); - rpgStatus.setName(namePrefix + "-" + UUID.randomUUID().toString()); + rpgStatus.setName(namePrefix + "-" + UUID.randomUUID()); rpgStatus.setActiveRemotePortCount(0); rpgStatus.setActiveThreadCount(1); rpgStatus.setAverageLineageDuration(2l); @@ -499,7 +496,7 @@ public static RemoteProcessGroupStatus generateRemoteProcessGroupStatus(String i public static ConnectionStatus generateConnectionStatus(String id, String namePrefix) { ConnectionStatus cStatus = new ConnectionStatus(); cStatus.setId(id); - cStatus.setName(namePrefix + "-" + UUID.randomUUID().toString()); + cStatus.setName(namePrefix + "-" + UUID.randomUUID()); cStatus.setBackPressureDataSizeThreshold("1 KB"); // sets backPressureBytesThreshold too cStatus.setBackPressureObjectThreshold(1l); cStatus.setInputBytes(2l); diff --git a/nifi-extension-bundles/nifi-slack-bundle/nifi-slack-processors/src/test/java/org/apache/nifi/processors/slack/TestConsumeSlack.java b/nifi-extension-bundles/nifi-slack-bundle/nifi-slack-processors/src/test/java/org/apache/nifi/processors/slack/TestConsumeSlack.java index 2d6222feef22..20f659c9e936 100644 --- a/nifi-extension-bundles/nifi-slack-bundle/nifi-slack-processors/src/test/java/org/apache/nifi/processors/slack/TestConsumeSlack.java +++ b/nifi-extension-bundles/nifi-slack-bundle/nifi-slack-processors/src/test/java/org/apache/nifi/processors/slack/TestConsumeSlack.java @@ -583,7 +583,7 @@ public void addUserMapping(final String userId, final String username) { @Override public Map fetchChannelIds() { - final Map nameIdMapping = new HashMap(); + final Map nameIdMapping = new HashMap<>(); nameIdMapping.put("#cname1", "cid1"); nameIdMapping.put("#cname2", "cid2"); return nameIdMapping; diff --git a/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java b/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java index 4ba2bb120276..5f90b2c6d651 100644 --- a/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java +++ b/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/GetSmbFile.java @@ -241,7 +241,7 @@ public class GetSmbFile extends AbstractProcessor { @Override protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList(); + final List descriptors = new ArrayList<>(); descriptors.add(HOSTNAME); descriptors.add(SHARE); descriptors.add(DIRECTORY); @@ -262,7 +262,7 @@ protected void init(final ProcessorInitializationContext context) { descriptors.add(TIMEOUT); this.descriptors = Collections.unmodifiableList(descriptors); - final Set relationships = new HashSet(); + final Set relationships = new HashSet<>(); relationships.add(REL_SUCCESS); this.relationships = Collections.unmodifiableSet(relationships); } diff --git a/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java b/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java index a57225beb55e..56014caad0c1 100644 --- a/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java +++ b/nifi-extension-bundles/nifi-smb-bundle/nifi-smb-processors/src/main/java/org/apache/nifi/processors/smb/PutSmbFile.java @@ -59,7 +59,6 @@ import java.util.Collection; import java.util.Collections; import java.util.EnumSet; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -181,28 +180,24 @@ public class PutSmbFile extends AbstractProcessor { @Override protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList(); - descriptors.add(HOSTNAME); - descriptors.add(SHARE); - descriptors.add(DIRECTORY); - descriptors.add(DOMAIN); - descriptors.add(USERNAME); - descriptors.add(PASSWORD); - descriptors.add(CREATE_DIRS); - descriptors.add(SHARE_ACCESS); - descriptors.add(CONFLICT_RESOLUTION); - descriptors.add(BATCH_SIZE); - descriptors.add(RENAME_SUFFIX); - descriptors.add(SMB_DIALECT); - descriptors.add(USE_ENCRYPTION); - descriptors.add(ENABLE_DFS); - descriptors.add(TIMEOUT); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); + this.descriptors = List.of( + HOSTNAME, + SHARE, + DIRECTORY, + DOMAIN, + USERNAME, + PASSWORD, + CREATE_DIRS, + SHARE_ACCESS, + CONFLICT_RESOLUTION, + BATCH_SIZE, + RENAME_SUFFIX, + SMB_DIALECT, + USE_ENCRYPTION, + ENABLE_DFS, + TIMEOUT); + + this.relationships = Set.of(REL_SUCCESS, REL_FAILURE); } @Override diff --git a/nifi-extension-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/utils/UsmJsonParser.java b/nifi-extension-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/utils/UsmJsonParser.java index 3b6c98442cec..3a1a03203cef 100644 --- a/nifi-extension-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/utils/UsmJsonParser.java +++ b/nifi-extension-bundles/nifi-snmp-bundle/nifi-snmp-processors/src/main/java/org/apache/nifi/snmp/utils/UsmJsonParser.java @@ -41,7 +41,7 @@ private UsmJsonParser() { } static List parse(final String json) throws JsonProcessingException { - return MAPPER.readValue(json, new TypeReference>() { + return MAPPER.readValue(json, new TypeReference<>() { }); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/test/java/org/apache/nifi/parameter/TestDatabaseParameterProvider.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/test/java/org/apache/nifi/parameter/TestDatabaseParameterProvider.java index ac880cfbb752..490007faa8ba 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/test/java/org/apache/nifi/parameter/TestDatabaseParameterProvider.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/test/java/org/apache/nifi/parameter/TestDatabaseParameterProvider.java @@ -111,22 +111,12 @@ public void testColumnStrategiesWithExtraProperties() throws SQLException { } private void runColumnStrategiesTest(final Map properties) throws SQLException { - final List> rows = Arrays.asList( - new HashMap() { { - put("group", "Kafka"); put("name", "brokers"); put("value", "my-brokers"); put("unrelated_column", "unrelated_value"); - } }, - new HashMap() { { - put("group", "Kafka"); put("name", "topic"); put("value", "my-topic"); put("unrelated_column", "unrelated_value"); - } }, - new HashMap() { { - put("group", "Kafka"); put("name", "password"); put("value", "my-password"); put("unrelated_column", "unrelated_value"); - } }, - new HashMap() { { - put("group", "S3"); put("name", "bucket"); put("value", "my-bucket"); put("unrelated_column", "unrelated_value"); - } }, - new HashMap() { { - put("group", "S3"); put("name", "s3-password"); put("value", "my-s3-password"); put("unrelated_column", "unrelated_value"); - } } + final List> rows = List.of( + Map.of("group", "Kafka", "name", "brokers", "value", "my-brokers", "unrelated_column", "unrelated_value"), + Map.of("group", "Kafka", "name", "topic", "value", "my-topic", "unrelated_column", "unrelated_value"), + Map.of("group", "Kafka", "name", "password", "value", "my-password", "unrelated_column", "unrelated_value"), + Map.of("group", "S3", "name", "bucket", "value", "my-bucket", "unrelated_column", "unrelated_value"), + Map.of("group", "S3", "name", "s3-password", "value", "my-s3-password", "unrelated_column", "unrelated_value") ); mockTableResults(new MockTable(TABLE_NAME, rows)); @@ -158,36 +148,18 @@ public void testNonColumnStrategiesWithExtraProperties() throws SQLException { } private void runNonColumnStrategyTest(final Map properties) throws SQLException { - final List> kafkaRows = Arrays.asList( - new HashMap() { { - put("name", "nifi_brokers"); put("value", "my-brokers"); - } }, - new HashMap() { { - put("name", "nifi_topic"); put("value", "my-topic"); - } }, - new HashMap() { { - put("name", "unrelated_field"); put("value", "my-value"); - } }, - new HashMap() { { - put("name", "kafka_password"); put("value", "my-password"); - } }, - new HashMap() { { - put("name", "nifi_password"); put("value", "my-nifi-password"); - } } + final List> kafkaRows = List.of( + Map.of("name", "nifi_brokers", "value", "my-brokers"), + Map.of("name", "nifi_topic", "value", "my-topic"), + Map.of("name", "unrelated_field", "value", "my-value"), + Map.of("name", "kafka_password", "value", "my-password"), + Map.of("name", "nifi_password", "value", "my-nifi-password") ); - final List> s3Rows = Arrays.asList( - new HashMap() { { - put("name", "nifi_s3_bucket"); put("value", "my-bucket"); - } }, - new HashMap() { { - put("name", "s3_password"); put("value", "my-password"); - } }, - new HashMap() { { - put("name", "nifi_other_field"); put("value", "my-field"); - } }, - new HashMap() { { - put("name", "other_password"); put("value", "my-password"); - } } + final List> s3Rows = List.of( + Map.of("name", "nifi_s3_bucket", "value", "my-bucket"), + Map.of("name", "s3_password", "value", "my-password"), + Map.of("name", "nifi_other_field", "value", "my-field"), + Map.of("name", "other_password", "value", "my-password") ); mockTableResults(new MockTable("KAFKA", kafkaRows), new MockTable("S3", s3Rows)); @@ -216,22 +188,28 @@ private void runNonColumnStrategyTest(final Map prop @Test public void testNullNameColumn() throws SQLException { - mockTableResults(new MockTable(TABLE_NAME, - Arrays.asList(new HashMap() { { put("name", null); } }))); + final Map mapWithNullValue = new HashMap<>(); + mapWithNullValue.put("name", null); + mockTableResults(new MockTable(TABLE_NAME, List.of(mapWithNullValue))); runTestWithExpectedFailure(columnBasedProperties); } @Test public void testNullGroupNameColumn() throws SQLException { - mockTableResults(new MockTable(TABLE_NAME, - Arrays.asList(new HashMap() { { put("name", "param"); put("value", "value"); put("group", null); } }))); + final Map mapWithNullGroupNameColumn = new HashMap<>(); + mapWithNullGroupNameColumn.put("name", "param"); + mapWithNullGroupNameColumn.put("value", "value"); + mapWithNullGroupNameColumn.put("group", null); + mockTableResults(new MockTable(TABLE_NAME, List.of(mapWithNullGroupNameColumn))); runTestWithExpectedFailure(columnBasedProperties); } @Test public void testNullValueColumn() throws SQLException { - mockTableResults(new MockTable(TABLE_NAME, - Arrays.asList(new HashMap() { { put("name", "param"); put("value", null); } }))); + final Map mapWithNullValueColumn = new HashMap<>(); + mapWithNullValueColumn.put("name", "param"); + mapWithNullValueColumn.put("value", null); + mockTableResults(new MockTable(TABLE_NAME, List.of(mapWithNullValueColumn))); runTestWithExpectedFailure(columnBasedProperties); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java index f7c7e9093707..eaa9b3993548 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java @@ -226,7 +226,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } Map processorProperties = context.getProperties(); - Map generatedAttributes = new HashMap(); + Map generatedAttributes = new HashMap<>(); for (final Map.Entry entry : processorProperties.entrySet()) { PropertyDescriptor property = entry.getKey(); if (property.isDynamic() && property.isExpressionLanguageSupported()) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileResource.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileResource.java index e04029621edb..7aa1471fff4d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileResource.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileResource.java @@ -122,7 +122,7 @@ public Set getRelationships() { @Override public void onTrigger(final ProcessContext context, final ProcessSession session) { final Map processorProperties = context.getProperties(); - final Map generatedAttributes = new HashMap(); + final Map generatedAttributes = new HashMap<>(); for (final Map.Entry entry : processorProperties.entrySet()) { final PropertyDescriptor property = entry.getKey(); if (property.isDynamic()) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java index a21fb25bba24..c242671d2d6e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java @@ -528,7 +528,7 @@ private List performListing(final ProcessContext context, final Long m int maxDepth = recurse ? Integer.MAX_VALUE : 1; - final BiPredicate matcher = new BiPredicate() { + final BiPredicate matcher = new BiPredicate<>() { private long lastTimestamp = System.currentTimeMillis(); @Override @@ -579,7 +579,7 @@ public boolean test(final Path path, final BasicFileAttributes attributes) { final long start = System.currentTimeMillis(); final List result = new LinkedList<>(); - Files.walkFileTree(basePath, Set.of(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor() { + Files.walkFileTree(basePath, Set.of(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor<>() { @Override public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attributes) { if (Files.isReadable(dir)) { @@ -841,7 +841,7 @@ public RollingMetricPerformanceTracker(final ComponentLog logger, final long max this.logger = logger; this.maxDiskOperationMillis = maxDiskOperationMillis; - directoryToTimingInfo = new LinkedHashMap, TimingInfo>() { + directoryToTimingInfo = new LinkedHashMap<>() { @Override protected boolean removeEldestEntry(final Map.Entry, TimingInfo> eldest) { return size() > maxEntries; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java index b72c4dbc5061..f899e713dcbc 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java @@ -285,7 +285,7 @@ private String getMessageDemarcator(final ProcessContext context) { private EventBatcher getEventBatcher() { if (eventBatcher == null) { - eventBatcher = new EventBatcher(getLogger(), events, errorEvents) { + eventBatcher = new EventBatcher<>(getLogger(), events, errorEvents) { @Override protected String getBatchKey(ByteArrayMessage event) { return event.getSender(); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java index 68cee0515a63..fdcfac3ead7d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java @@ -454,14 +454,14 @@ public void onTrigger(final ProcessContext context, final ProcessSession session final boolean isWholeFFValidation = context.getProperty(VALIDATION_STRATEGY).getValue().equals(VALIDATE_WHOLE_FLOWFILE.getValue()); final boolean includeAllViolations = context.getProperty(INCLUDE_ALL_VIOLATIONS).asBoolean(); - final AtomicReference valid = new AtomicReference(true); - final AtomicReference isFirstLineValid = new AtomicReference(true); - final AtomicReference isFirstLineInvalid = new AtomicReference(true); - final AtomicReference okCount = new AtomicReference(0); - final AtomicReference totalCount = new AtomicReference(0); - final AtomicReference invalidFF = new AtomicReference(null); - final AtomicReference validFF = new AtomicReference(null); - final AtomicReference validationError = new AtomicReference(null); + final AtomicReference valid = new AtomicReference<>(true); + final AtomicReference isFirstLineValid = new AtomicReference<>(true); + final AtomicReference isFirstLineInvalid = new AtomicReference<>(true); + final AtomicReference okCount = new AtomicReference<>(0); + final AtomicReference totalCount = new AtomicReference<>(0); + final AtomicReference invalidFF = new AtomicReference<>(null); + final AtomicReference validFF = new AtomicReference<>(null); + final AtomicReference validationError = new AtomicReference<>(null); if (!isWholeFFValidation) { invalidFF.set(session.create(flowFile)); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/sql/DefaultAvroSqlWriter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/sql/DefaultAvroSqlWriter.java index 0e73e39ee997..198768858dc9 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/sql/DefaultAvroSqlWriter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/sql/DefaultAvroSqlWriter.java @@ -35,7 +35,7 @@ public class DefaultAvroSqlWriter implements SqlWriter { private final AvroConversionOptions options; - private final Map attributesToAdd = new HashMap() {{ + private final Map attributesToAdd = new HashMap<>() {{ put(CoreAttributes.MIME_TYPE.key(), JdbcCommon.MIME_TYPE_AVRO_BINARY); }}; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToCSV.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToCSV.java index 3aade85e51dc..ea2a193082cd 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToCSV.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToCSV.java @@ -23,17 +23,17 @@ import org.apache.nifi.util.TestRunners; import org.junit.jupiter.api.Test; -import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; -import java.util.HashMap; import java.util.Set; import java.util.HashSet; import java.util.List; import java.util.regex.Pattern; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -45,7 +45,7 @@ public class TestAttributesToCSV { private static final String OUTPUT_SEPARATOR = ","; private static final String OUTPUT_MIME_TYPE = "text/csv"; private static final String SPLIT_REGEX = OUTPUT_SEPARATOR + "(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"; - private static final String newline = System.getProperty("line.separator"); + private static final String newline = System.lineSeparator(); @Test public void testAttrListNoCoreNullOffNewAttrToAttribute() { @@ -59,13 +59,13 @@ public void testAttrListNoCoreNullOffNewAttrToAttribute() { testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", ""); + .getFirst().assertAttributeEquals("CSVData", ""); } @Test @@ -76,19 +76,19 @@ public void testAttrListNoCoreNullOffNewAttrToContent() { testRunner.setProperty(AttributesToCSV.INCLUDE_CORE_ATTRIBUTES, "false"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - //use only one attribute, which does not exists, as the list of attributes to convert to csv + //use only one attribute, which does not exist, as the list of attributes to convert to csv final String NON_PRESENT_ATTRIBUTE_KEY = "beach-type"; testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, NON_PRESENT_ATTRIBUTE_KEY); testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", ""); + .getFirst().assertAttributeEquals("CSVData", ""); } @Test @@ -103,13 +103,13 @@ public void testAttrListNoCoreNullOffTwoNewAttrToAttribute() { testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", ","); + .getFirst().assertAttributeEquals("CSVData", ","); } @Test @@ -124,13 +124,13 @@ public void testAttrListNoCoreNullTwoNewAttrToAttribute() { testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", "null,null"); + .getFirst().assertAttributeEquals("CSVData", "null,null"); } @Test @@ -143,13 +143,13 @@ public void testNoAttrListNoCoreNullOffToAttribute() { testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", ""); + .getFirst().assertAttributeEquals("CSVData", ""); } @Test @@ -161,28 +161,25 @@ public void testNoAttrListNoCoreNullToAttribute() { testRunner.enqueue(new byte[0]); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", ""); + .getFirst().assertAttributeEquals("CSVData", ""); } @Test - public void testNoAttrListCoreNullOffToContent() throws IOException { + public void testNoAttrListCoreNullOffToContent() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToCSV()); testRunner.setProperty(AttributesToCSV.DESTINATION, OUTPUT_OVERWRITE_CONTENT); testRunner.setProperty(AttributesToCSV.INCLUDE_CORE_ATTRIBUTES, "true"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - final Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - }}; + final Map attrs = Map.of("beach-name", "Malibu Beach", "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -192,13 +189,13 @@ public void testNoAttrListCoreNullOffToContent() throws IOException { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertEquals(OUTPUT_MIME_TYPE, flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); final byte[] contentData = testRunner.getContentAsByteArray(flowFile); - final String contentDataString = new String(contentData, "UTF-8"); + final String contentDataString = new String(contentData, StandardCharsets.UTF_8); Set contentValues = new HashSet<>(getStrings(contentDataString)); @@ -218,11 +215,8 @@ public void testNoAttrListCoreNullOffToAttribute() { testRunner.setProperty(AttributesToCSV.INCLUDE_CORE_ATTRIBUTES, "true"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -232,7 +226,7 @@ public void testNoAttrListCoreNullOffToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -252,17 +246,14 @@ public void testNoAttrListCoreNullOffToAttribute() { } @Test - public void testNoAttrListNoCoreNullOffToContent() throws IOException { + public void testNoAttrListNoCoreNullOffToContent() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToCSV()); testRunner.setProperty(AttributesToCSV.DESTINATION, OUTPUT_OVERWRITE_CONTENT); testRunner.setProperty(AttributesToCSV.INCLUDE_CORE_ATTRIBUTES, "false"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -272,13 +263,13 @@ public void testNoAttrListNoCoreNullOffToContent() throws IOException { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertEquals(OUTPUT_MIME_TYPE, flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); final byte[] contentData = testRunner.getContentAsByteArray(flowFile); - final String contentDataString = new String(contentData, "UTF-8"); + final String contentDataString = new String(contentData, StandardCharsets.UTF_8); Set contentValues = new HashSet<>(getStrings(contentDataString)); assertEquals(3, contentValues.size()); @@ -298,12 +289,9 @@ public void testAttrListNoCoreNullOffToAttribute() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "beach-name,beach-location,beach-endorsement"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVAttribute!"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVAttribute!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -313,7 +301,7 @@ public void testAttrListNoCoreNullOffToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -337,12 +325,10 @@ public void testAttrListCoreNullOffToAttribute() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "beach-name,beach-location,beach-endorsement"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -352,7 +338,7 @@ public void testAttrListCoreNullOffToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -379,12 +365,10 @@ public void testAttrListNoCoreNullOffOverrideCoreByAttrListToAttribute() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "beach-name,beach-location,beach-endorsement,uuid"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -394,7 +378,7 @@ public void testAttrListNoCoreNullOffOverrideCoreByAttrListToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -409,8 +393,8 @@ public void testAttrListNoCoreNullOffOverrideCoreByAttrListToAttribute() { assertTrue(CSVDataValues.contains("\"This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim\"")); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("filename"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("path"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("filename"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("path"))); assertTrue(CSVDataValues.contains(flowFile.getAttribute("uuid"))); } @@ -422,13 +406,11 @@ public void testAttrListFromExpCoreNullOffToAttribute() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "${myAttribs}"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - put("myAttribs", "beach-name,beach-location,beach-endorsement"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!", + "myAttribs", "beach-name,beach-location,beach-endorsement"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -439,7 +421,7 @@ public void testAttrListFromExpCoreNullOffToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); //Test flow file 0 with ATTRIBUTE_LIST populated from expression language - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -458,7 +440,7 @@ public void testAttrListFromExpCoreNullOffToAttribute() { assertTrue(CSVDataValues.contains(flowFile.getAttribute("uuid"))); //Test flow file 1 with ATTRIBUTE_LIST populated from expression language containing commas (output should be he same) - flowFile = flowFilesForRelationship.get(0); + flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -487,13 +469,11 @@ public void testAttrListWithCommasInNameFromExpCoreNullOffToAttribute() { testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrsCommaInName = new HashMap() {{ - put("beach,name", "Malibu Beach"); - put("beach,location", "California, US"); - put("beach,endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - put("myAttribs", "\"beach,name\",\"beach,location\",\"beach,endorsement\""); - }}; + Map attrsCommaInName = Map.of("beach,name", "Malibu Beach", + "beach,location", "California, US", + "beach,endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!", + "myAttribs", "\"beach,name\",\"beach,location\",\"beach,endorsement\""); testRunner.enqueue(new byte[0], attrsCommaInName); testRunner.run(); @@ -504,7 +484,7 @@ public void testAttrListWithCommasInNameFromExpCoreNullOffToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); //Test flow file 0 with ATTRIBUTE_LIST populated from expression language - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -523,7 +503,7 @@ public void testAttrListWithCommasInNameFromExpCoreNullOffToAttribute() { assertTrue(CSVDataValues.contains(flowFile.getAttribute("uuid"))); //Test flow file 1 with ATTRIBUTE_LIST populated from expression language containing commas (output should be he same) - flowFile = flowFilesForRelationship.get(0); + flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -552,13 +532,11 @@ public void testAttrListFromExpNoCoreNullOffOverrideCoreByAttrListToAttribute() testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "${myAttribs}"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - put("myAttribs", "beach-name,beach-location,beach-endorsement"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!", + "myAttribs", "beach-name,beach-location,beach-endorsement"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -568,7 +546,7 @@ public void testAttrListFromExpNoCoreNullOffOverrideCoreByAttrListToAttribute() testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -583,9 +561,9 @@ public void testAttrListFromExpNoCoreNullOffOverrideCoreByAttrListToAttribute() assertTrue(CSVDataValues.contains("\"This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim\"")); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("filename"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("path"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("uuid"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("filename"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("path"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("uuid"))); } @Test @@ -596,13 +574,11 @@ public void testAttributesRegex() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "${myRegEx}"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - put("myRegEx", "beach-.*"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!", + "myRegEx", "beach-.*"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -612,7 +588,7 @@ public void testAttributesRegex() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -627,9 +603,9 @@ public void testAttributesRegex() { assertTrue(CSVDataValues.contains("\"This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim\"")); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("filename"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("path"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("uuid"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("filename"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("path"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("uuid"))); } @Test @@ -641,15 +617,13 @@ public void testAttributesRegexAndList() { testRunner.setProperty(AttributesToCSV.ATTRIBUTES_LIST, "moreInfo1,moreInfo2"); testRunner.setProperty(AttributesToCSV.NULL_VALUE_FOR_EMPTY_STRING, "false"); - Map attrs = new HashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - put("myRegEx", "beach-.*"); - put("moreInfo1", "A+ Rating"); - put("moreInfo2", "Avg Temp: 61f"); - }}; + Map attrs = Map.of("beach-name", "Malibu Beach", + "beach-location", "California, US", + "beach-endorsement", "This is our family's favorite beach. We highly recommend it. \n\nThanks, Jim", + "attribute-should-be-eliminated", "This should not be in CSVData!", + "myRegEx", "beach-.*", + "moreInfo1", "A+ Rating", + "moreInfo2", "Avg Temp: 61f"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -659,7 +633,7 @@ public void testAttributesRegexAndList() { testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -675,9 +649,9 @@ public void testAttributesRegexAndList() { assertTrue(CSVDataValues.contains("A+ Rating")); assertTrue(CSVDataValues.contains("Avg Temp: 61f")); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("filename"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("path"))); - assertTrue(!CSVDataValues.contains(flowFile.getAttribute("uuid"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("filename"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("path"))); + assertFalse(CSVDataValues.contains(flowFile.getAttribute("uuid"))); } @@ -690,30 +664,29 @@ public void testSchemaToAttribute() { testRunner.setProperty(AttributesToCSV.INCLUDE_SCHEMA, "true"); testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "beach-.*"); - Map attrs = new LinkedHashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = new LinkedHashMap<>(); + attrs.put("beach-name", "Malibu Beach"); + attrs.put("beach-location", "California, US"); + attrs.put("attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVData"); - testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst() .assertAttributeExists("CSVSchema"); testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVData", "Malibu Beach,\"California, US\""); + .getFirst().assertAttributeEquals("CSVData", "Malibu Beach,\"California, US\""); testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS) - .get(0).assertAttributeEquals("CSVSchema", "beach-name,beach-location"); + .getFirst().assertAttributeEquals("CSVSchema", "beach-name,beach-location"); } @Test - public void testSchemaToContent() throws Exception { + public void testSchemaToContent() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToCSV()); //set the destination of the csv string to be an attribute testRunner.setProperty(AttributesToCSV.DESTINATION, OUTPUT_OVERWRITE_CONTENT); @@ -722,11 +695,10 @@ public void testSchemaToContent() throws Exception { testRunner.setProperty(AttributesToCSV.INCLUDE_SCHEMA, "true"); testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "beach-.*"); - Map attrs = new LinkedHashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = new LinkedHashMap<>(); + attrs.put("beach-name", "Malibu Beach"); + attrs.put("beach-location", "California, US"); + attrs.put("attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -734,13 +706,13 @@ public void testSchemaToContent() throws Exception { testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); - MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0); + MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst(); flowFile.assertAttributeNotExists("CSVData"); flowFile.assertAttributeNotExists("CSVSchema"); final byte[] contentData = testRunner.getContentAsByteArray(flowFile); - final String contentDataString = new String(contentData, "UTF-8"); + final String contentDataString = new String(contentData, StandardCharsets.UTF_8); assertEquals(contentDataString.split(newline)[0], "beach-name,beach-location"); assertEquals(contentDataString.split(newline)[1], "Malibu Beach,\"California, US\""); } @@ -755,11 +727,10 @@ public void testSchemaWithCoreAttribuesToAttribute() { testRunner.setProperty(AttributesToCSV.INCLUDE_SCHEMA, "true"); testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "beach-.*"); - Map attrs = new LinkedHashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = new LinkedHashMap<>(); + attrs.put("beach-name", "Malibu Beach"); + attrs.put("beach-location", "California, US"); + attrs.put("attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -767,7 +738,7 @@ public void testSchemaWithCoreAttribuesToAttribute() { testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); - MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0); + MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst(); flowFile.assertAttributeExists("CSVData"); flowFile.assertAttributeExists("CSVSchema"); @@ -780,7 +751,7 @@ public void testSchemaWithCoreAttribuesToAttribute() { } @Test - public void testSchemaWithCoreAttribuesToContent() throws Exception { + public void testSchemaWithCoreAttribuesToContent() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToCSV()); //set the destination of the csv string to be an attribute testRunner.setProperty(AttributesToCSV.DESTINATION, OUTPUT_OVERWRITE_CONTENT); @@ -789,11 +760,10 @@ public void testSchemaWithCoreAttribuesToContent() throws Exception { testRunner.setProperty(AttributesToCSV.INCLUDE_SCHEMA, "true"); testRunner.setProperty(AttributesToCSV.ATTRIBUTES_REGEX, "beach-.*"); - Map attrs = new LinkedHashMap() {{ - put("beach-name", "Malibu Beach"); - put("beach-location", "California, US"); - put("attribute-should-be-eliminated", "This should not be in CSVData!"); - }}; + Map attrs = new LinkedHashMap<>(); + attrs.put("beach-name", "Malibu Beach"); + attrs.put("beach-location", "California, US"); + attrs.put("attribute-should-be-eliminated", "This should not be in CSVData!"); testRunner.enqueue(new byte[0], attrs); testRunner.run(); @@ -801,7 +771,7 @@ public void testSchemaWithCoreAttribuesToContent() throws Exception { testRunner.assertTransferCount(AttributesToCSV.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToCSV.REL_FAILURE, 0); - MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).get(0); + MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToCSV.REL_SUCCESS).getFirst(); flowFile.assertAttributeNotExists("CSVData"); flowFile.assertAttributeNotExists("CSVSchema"); @@ -811,7 +781,7 @@ public void testSchemaWithCoreAttribuesToContent() throws Exception { final byte[] contentData = testRunner.getContentAsByteArray(flowFile); - final String contentDataString = new String(contentData, "UTF-8"); + final String contentDataString = new String(contentData, StandardCharsets.UTF_8); assertEquals(contentDataString.split(newline)[0], "beach-name,beach-location,path,filename,uuid"); assertEquals(contentDataString.split(newline)[1], "Malibu Beach,\"California, US\"," + path + "," + filename + "," + uuid); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDebugFlow.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDebugFlow.java index b209724a8a83..9ead535fcf22 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDebugFlow.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDebugFlow.java @@ -50,7 +50,7 @@ public void setup() throws IOException { String filename = "testFile" + (n + 1) + ".txt"; String content = "Hello World " + (n + 1) + "!"; contents.put(n, content); - attribs.put(n, new HashMap()); + attribs.put(n, new HashMap<>()); attribs.get(n).put(CoreAttributes.FILENAME.key(), filename); attribs.get(n).put(CoreAttributes.UUID.key(), "TESTING-FILE-" + (n + 1) + "-TESTING"); namesToContent.put(filename, content); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDuplicateFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDuplicateFlowFile.java index 963e1ae8af82..b4f63034cdc2 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDuplicateFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestDuplicateFlowFile.java @@ -21,8 +21,8 @@ import org.apache.nifi.util.TestRunners; import org.junit.jupiter.api.Test; -import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.apache.nifi.processors.standard.DuplicateFlowFile.COPY_INDEX_ATTRIBUTE; @@ -51,9 +51,7 @@ public void testNumberOfCopiesEL() { final TestRunner runner = TestRunners.newTestRunner(DuplicateFlowFile.class); runner.setProperty(DuplicateFlowFile.NUM_COPIES, "${num.copies}"); - runner.enqueue("hello".getBytes(), new HashMap() {{ - put("num.copies", "100"); - }}); + runner.enqueue("hello".getBytes(), Map.of("num.copies", "100")); runner.run(); runner.assertAllFlowFilesTransferred(DuplicateFlowFile.REL_SUCCESS, 101); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteSQL.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteSQL.java index 5e5624b73616..2ee9c1ddea3e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteSQL.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteSQL.java @@ -21,7 +21,6 @@ import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumReader; -import org.apache.commons.compress.compressors.CompressorException; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.flowfile.attributes.CoreAttributes; @@ -116,7 +115,7 @@ public void setup() throws InitializationException { } @Test - public void testIncomingConnectionWithNoFlowFile() throws InitializationException { + public void testIncomingConnectionWithNoFlowFile() { runner.setIncomingConnection(true); runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, "SELECT * FROM persons"); runner.run(); @@ -125,7 +124,7 @@ public void testIncomingConnectionWithNoFlowFile() throws InitializationExceptio } @Test - public void testIncomingConnectionWithNoFlowFileAndNoQuery() throws InitializationException { + public void testIncomingConnectionWithNoFlowFileAndNoQuery() { runner.setIncomingConnection(true); runner.run(); runner.assertTransferCount(ExecuteSQL.REL_SUCCESS, 0); @@ -133,55 +132,52 @@ public void testIncomingConnectionWithNoFlowFileAndNoQuery() throws Initializati } @Test - public void testNoIncomingConnectionAndNoQuery() throws InitializationException { + public void testNoIncomingConnectionAndNoQuery() { runner.setIncomingConnection(false); - assertThrows(AssertionError.class, () -> { - runner.run(); - }); + assertThrows(AssertionError.class, () -> runner.run()); } @Test - public void testNoIncomingConnection() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testNoIncomingConnection() throws SQLException, IOException { runner.setIncomingConnection(false); invokeOnTrigger(null, QUERY_WITHOUT_EL, false, null, true); } @Test - public void testNoTimeLimit() throws InitializationException, ClassNotFoundException, SQLException, IOException { + public void testNoTimeLimit() throws SQLException, IOException { invokeOnTrigger(null, QUERY_WITH_EL, true, null, true); } @Test - public void testSelectQueryInFlowFile() throws InitializationException, ClassNotFoundException, SQLException, IOException { + public void testSelectQueryInFlowFile() throws SQLException, IOException { invokeOnTrigger(null, QUERY_WITHOUT_EL, true, null, false); } @Test - public void testSelectQueryInFlowFileWithParameters() throws InitializationException, ClassNotFoundException, SQLException, IOException { - Map sqlParams = new HashMap() {{ - put("sql.args.1.type", "4"); - put("sql.args.1.value", "20"); - put("sql.args.2.type", "4"); - put("sql.args.2.value", "5"); - }}; + public void testSelectQueryInFlowFileWithParameters() throws SQLException, IOException { + Map sqlParams = new HashMap<>(); + sqlParams.put("sql.args.1.type", "4"); + sqlParams.put("sql.args.1.value", "20"); + sqlParams.put("sql.args.2.type", "4"); + sqlParams.put("sql.args.2.value", "5"); invokeOnTrigger(null, QUERY_WITHOUT_EL_WITH_PARAMS, true, sqlParams, false); } @Test - public void testQueryTimeout() throws InitializationException, ClassNotFoundException, SQLException, IOException { + public void testQueryTimeout() throws SQLException, IOException { // Does to seem to have any effect when using embedded Derby invokeOnTrigger(1, QUERY_WITH_EL, true, null, true); // 1 second max time } @Test - public void testAutoCommitFalse() throws InitializationException, ClassNotFoundException, SQLException, IOException { + public void testAutoCommitFalse() throws SQLException, IOException { runner.setProperty(ExecuteSQL.AUTO_COMMIT, "false"); invokeOnTrigger(null, QUERY_WITHOUT_EL, true, null, false); } @Test - public void testAutoCommitTrue() throws InitializationException, ClassNotFoundException, SQLException, IOException { + public void testAutoCommitTrue() throws SQLException, IOException { runner.setProperty(ExecuteSQL.AUTO_COMMIT, "true"); invokeOnTrigger(null, QUERY_WITHOUT_EL, true, null, false); } @@ -198,7 +194,7 @@ public void testWithNullIntColumn() throws SQLException { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -211,12 +207,12 @@ public void testWithNullIntColumn() throws SQLException { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0).assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "2"); - runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0).assertAttributeEquals(ExecuteSQL.RESULTSET_INDEX, "0"); + runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst().assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "2"); + runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst().assertAttributeEquals(ExecuteSQL.RESULTSET_INDEX, "0"); } @Test - public void testCompression() throws SQLException, CompressorException, IOException { + public void testCompression() throws SQLException, IOException { // remove previous test database, if any final File dbLocation = new File(DB_LOCATION); dbLocation.delete(); @@ -227,7 +223,7 @@ public void testCompression() throws SQLException, CompressorException, IOExcept try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -242,9 +238,9 @@ public void testCompression() throws SQLException, CompressorException, IOExcept runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); - try (DataFileStream dfs = new DataFileStream<>(new ByteArrayInputStream(flowFile.toByteArray()), new GenericDatumReader())) { + try (DataFileStream dfs = new DataFileStream<>(new ByteArrayInputStream(flowFile.toByteArray()), new GenericDatumReader<>())) { assertEquals(AvroUtil.CodecType.BZIP2.name().toLowerCase(), dfs.getMetaString(DataFileConstants.CODEC).toLowerCase()); } } @@ -261,7 +257,7 @@ public void testWithOutputBatching() throws SQLException { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -280,7 +276,7 @@ public void testWithOutputBatching() throws SQLException { runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_INDEX.key()); runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeNotExists(FragmentAttributes.FRAGMENT_COUNT.key()); @@ -306,7 +302,7 @@ public void testWithOutputBatchingAndIncomingFlowFile() throws SQLException { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -332,7 +328,7 @@ public void testWithOutputBatchingAndIncomingFlowFile() throws SQLException { runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_INDEX.key()); runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeNotExists(FragmentAttributes.FRAGMENT_COUNT.key()); @@ -362,7 +358,7 @@ public void testMaxRowsPerFlowFile() throws SQLException { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -383,7 +379,7 @@ public void testMaxRowsPerFlowFile() throws SQLException { runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_COUNT.key()); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "0"); @@ -408,7 +404,7 @@ public void testInsertStatementCreatesFlowFile() throws SQLException { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -418,7 +414,7 @@ public void testInsertStatementCreatesFlowFile() throws SQLException { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0).assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "0"); + runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst().assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "0"); } @Test @@ -433,7 +429,7 @@ public void testNoRowsStatementCreatesEmptyFlowFile() throws Exception { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -444,7 +440,7 @@ public void testNoRowsStatementCreatesEmptyFlowFile() throws Exception { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "0"); final InputStream in = new ByteArrayInputStream(firstFlowFile.toByteArray()); final DatumReader datumReader = new GenericDatumReader<>(); @@ -476,7 +472,7 @@ public void testWithDuplicateColumns() throws SQLException { try { stmt.execute("drop table host1"); stmt.execute("drop table host2"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table host1 (id integer not null, host varchar(45))"); @@ -489,7 +485,7 @@ public void testWithDuplicateColumns() throws SQLException { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0).assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "1"); + runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst().assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "1"); } @Test @@ -504,7 +500,7 @@ public void testWithSqlException() throws SQLException { try { stmt.execute("drop table TEST_NO_ROWS"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NO_ROWS (id integer)"); @@ -547,15 +543,15 @@ public void testWithSqlExceptionErrorProcessingResultSet() throws Exception { runner.assertTransferCount(ExecuteSQL.REL_SUCCESS, 0); // Assert exception message has been put to flow file attribute - MockFlowFile failedFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_FAILURE).get(0); + MockFlowFile failedFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_FAILURE).getFirst(); assertEquals("java.sql.SQLException: test execute statement failed", failedFlowFile.getAttribute(ExecuteSQL.RESULT_ERROR_MESSAGE)); } public void invokeOnTrigger(final Integer queryTimeout, final String query, final boolean incomingFlowFile, final Map attrs, final boolean setQueryProperty) - throws InitializationException, ClassNotFoundException, SQLException, IOException { + throws SQLException, IOException { if (queryTimeout != null) { - runner.setProperty(ExecuteSQL.QUERY_TIMEOUT, queryTimeout.toString() + " secs"); + runner.setProperty(ExecuteSQL.QUERY_TIMEOUT, queryTimeout + " secs"); } // remove previous test database, if any @@ -599,13 +595,13 @@ public void invokeOnTrigger(final Integer queryTimeout, final String query, fina runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, ExecuteSQL.RESULT_ROW_COUNT); final List flowfiles = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS); - final long executionTime = Long.parseLong(flowfiles.get(0).getAttribute(ExecuteSQL.RESULT_QUERY_EXECUTION_TIME)); - final long fetchTime = Long.parseLong(flowfiles.get(0).getAttribute(ExecuteSQL.RESULT_QUERY_FETCH_TIME)); - final long durationTime = Long.parseLong(flowfiles.get(0).getAttribute(ExecuteSQL.RESULT_QUERY_DURATION)); + final long executionTime = Long.parseLong(flowfiles.getFirst().getAttribute(ExecuteSQL.RESULT_QUERY_EXECUTION_TIME)); + final long fetchTime = Long.parseLong(flowfiles.getFirst().getAttribute(ExecuteSQL.RESULT_QUERY_FETCH_TIME)); + final long durationTime = Long.parseLong(flowfiles.getFirst().getAttribute(ExecuteSQL.RESULT_QUERY_DURATION)); assertEquals(durationTime, fetchTime + executionTime); - final InputStream in = new ByteArrayInputStream(flowfiles.get(0).toByteArray()); + final InputStream in = new ByteArrayInputStream(flowfiles.getFirst().toByteArray()); final DatumReader datumReader = new GenericDatumReader<>(); try (DataFileStream dataFileReader = new DataFileStream<>(in, datumReader)) { GenericRecord record = null; @@ -635,7 +631,7 @@ public void testPreQuery() throws Exception { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -648,7 +644,7 @@ public void testPreQuery() throws Exception { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "1"); final InputStream in = new ByteArrayInputStream(firstFlowFile.toByteArray()); @@ -680,7 +676,7 @@ public void testPostQuery() throws Exception { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -694,7 +690,7 @@ public void testPostQuery() throws Exception { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "1"); final InputStream in = new ByteArrayInputStream(firstFlowFile.toByteArray()); @@ -726,7 +722,7 @@ public void testPreQueryFail() throws Exception { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -753,7 +749,7 @@ public void testPostQueryFail() throws Exception { try { stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); @@ -767,7 +763,7 @@ public void testPostQueryFail() throws Exception { runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_FAILURE, 1); - MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_FAILURE).get(0); + MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQL.REL_FAILURE).getFirst(); firstFlowFile.assertContentEquals("test"); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestForkRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestForkRecord.java index 650dc48c4788..4c10b7d52906 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestForkRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestForkRecord.java @@ -298,7 +298,7 @@ public void testForkExtractComplexWithoutParentFields() throws IOException, Malf fields.add(new RecordField("accounts", accountsType)); final RecordSchema schema = new SimpleRecordSchema(fields); - final List fieldsWrite = new ArrayList(); + final List fieldsWrite = new ArrayList<>(); fieldsWrite.add(new RecordField("id", RecordFieldType.INT.getDataType())); fieldsWrite.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType())); final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java index bbdf72d731cb..f30fdba60a8f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java @@ -23,7 +23,6 @@ import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.standard.db.impl.DerbyDatabaseAdapter; -import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.MockProcessSession; import org.apache.nifi.util.MockSessionFactory; @@ -156,7 +155,7 @@ public void testAddedRows() throws SQLException, IOException { // Verify the expected FlowFile runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); flowFile.assertAttributeEquals(FRAGMENT_INDEX, "0"); @@ -192,7 +191,7 @@ public void testAddedRows() throws SQLException, IOException { assertEquals(ff2.getAttribute(FRAGMENT_COUNT), "2"); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 2 AND ID <= 5 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -215,7 +214,7 @@ public void testAddedRows() throws SQLException, IOException { stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (6, 'Mr. NiFi', 1.0, '2012-01-01 03:23:34.234')"); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 5 AND ID <= 6 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -249,7 +248,7 @@ public void testAddedRows() throws SQLException, IOException { } @Test - public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testAddedRowsTwoTables() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -278,7 +277,7 @@ public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -304,7 +303,7 @@ public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -324,7 +323,7 @@ public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID > 2 AND ID <= 5 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -345,7 +344,7 @@ public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException } @Test - public void testAddedRowsRightBounded() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testAddedRowsRightBounded() throws SQLException, IOException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -368,7 +367,7 @@ public void testAddedRowsRightBounded() throws ClassNotFoundException, SQLExcept runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -393,7 +392,7 @@ public void testAddedRowsRightBounded() throws ClassNotFoundException, SQLExcept runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 2 AND ID <= 5 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -416,7 +415,7 @@ public void testAddedRowsRightBounded() throws ClassNotFoundException, SQLExcept stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (6, 'Mr. NiFi', 1.0, '2012-01-01 03:23:34.234')"); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 5 AND ID <= 6 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -443,7 +442,7 @@ public void testAddedRowsRightBounded() throws ClassNotFoundException, SQLExcept } @Test - public void testAddedRowsTimestampRightBounded() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testAddedRowsTimestampRightBounded() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -466,7 +465,7 @@ public void testAddedRowsTimestampRightBounded() throws ClassNotFoundException, runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE created_on <= '2010-01-01 00:00:00.0' ORDER BY created_on FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -493,7 +492,7 @@ public void testAddedRowsTimestampRightBounded() throws ClassNotFoundException, runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE created_on > '2010-01-01 00:00:00.0' AND " + "created_on <= '2011-01-01 04:23:34.236' ORDER BY created_on FETCH NEXT 2 ROWS ONLY", query); @@ -518,7 +517,7 @@ public void testAddedRowsTimestampRightBounded() throws ClassNotFoundException, stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (8, 'Mr. NiFi', 1.0, '2012-01-01 03:23:34.234')"); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE created_on > '2011-01-01 04:23:34.236' AND created_on <= '2012-01-01 03:23:34.234' ORDER BY created_on FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -529,7 +528,7 @@ public void testAddedRowsTimestampRightBounded() throws ClassNotFoundException, } @Test - public void testOnePartition() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testOnePartition() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -554,7 +553,7 @@ public void testOnePartition() throws ClassNotFoundException, SQLException, Init runner.run(); runner.assertAllFlowFilesTransferred(GenerateTableFetch.REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(GenerateTableFetch.REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(GenerateTableFetch.REL_SUCCESS).getFirst(); flowFile.assertContentEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2"); flowFile.assertAttributeExists("generatetablefetch.limit"); flowFile.assertAttributeEquals("generatetablefetch.limit", null); @@ -591,7 +590,7 @@ public void testFlowFileGeneratedOnZeroResults() throws SQLException { runner.setProperty(GenerateTableFetch.OUTPUT_EMPTY_FLOWFILE_ON_ZERO_RESULTS, "true"); runner.run(); runner.assertAllFlowFilesTransferred(GenerateTableFetch.REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(GenerateTableFetch.REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(GenerateTableFetch.REL_SUCCESS).getFirst(); assertEquals("TEST_QUERY_DB_TABLE", flowFile.getAttribute("generatetablefetch.tableName")); assertEquals("ID,BUCKET", flowFile.getAttribute("generatetablefetch.columnNames")); assertEquals("1=1", flowFile.getAttribute("generatetablefetch.whereClause")); @@ -603,7 +602,7 @@ public void testFlowFileGeneratedOnZeroResults() throws SQLException { } @Test - public void testMultiplePartitions() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testMultiplePartitions() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -654,7 +653,7 @@ public void testMultiplePartitions() throws ClassNotFoundException, SQLException } @Test - public void testMultiplePartitionsIncomingFlowFiles() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testMultiplePartitionsIncomingFlowFiles() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -684,21 +683,12 @@ public void testMultiplePartitionsIncomingFlowFiles() throws ClassNotFoundExcept runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.PARTITION_SIZE, "${partSize}"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE1"); - put("partSize", "1"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE1", "partSize", "1")); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE2"); - put("partSize", "2"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE2", "partSize", "2")); // The table does not exist, expect the original flow file to be routed to failure - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE3"); - put("partSize", "1"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE3", "partSize", "1")); runner.run(3); runner.assertTransferCount(AbstractDatabaseFetchProcessor.REL_SUCCESS, 3); @@ -747,15 +737,11 @@ public void testBackwardsCompatibilityStateKeyStaticTableDynamicMaxValues() thro runner.setProperty(GenerateTableFetch.TABLE_NAME, "TEST_QUERY_DB_TABLE"); runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("maxValueCol", "id")); // Pre-populate the state with a key for column name (not fully-qualified) StateManager stateManager = runner.getStateManager(); - stateManager.setState(new HashMap() {{ - put("id", "0"); - }}, Scope.CLUSTER); + stateManager.setState(Map.of("id", "0"), Scope.CLUSTER); // Pre-populate the column type map with an entry for id (not fully-qualified) processor.columnTypeMap.put("id", 4); @@ -763,7 +749,7 @@ public void testBackwardsCompatibilityStateKeyStaticTableDynamicMaxValues() thro runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id > 0 AND id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); } @@ -786,16 +772,11 @@ public void testBackwardsCompatibilityStateKeyDynamicTableDynamicMaxValues() thr runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}"); runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); // Pre-populate the state with a key for column name (not fully-qualified) StateManager stateManager = runner.getStateManager(); - stateManager.setState(new HashMap() {{ - put("id", "0"); - }}, Scope.CLUSTER); + stateManager.setState(Map.of("id", "0"), Scope.CLUSTER); // Pre-populate the column type map with an entry for id (not fully-qualified) processor.columnTypeMap.put("id", 4); @@ -803,11 +784,11 @@ public void testBackwardsCompatibilityStateKeyDynamicTableDynamicMaxValues() thr runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); // Note there is no WHERE clause here. Because we are using dynamic tables, the old state key/value is not retrieved assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); assertEquals("TEST_QUERY_DB_TABLE", flowFile.getAttribute("generatetablefetch.tableName")); - assertEquals(null, flowFile.getAttribute("generatetablefetch.columnNames")); + assertNull(flowFile.getAttribute("generatetablefetch.columnNames")); assertEquals("id <= 1", flowFile.getAttribute("generatetablefetch.whereClause")); assertEquals("id", flowFile.getAttribute("generatetablefetch.maxColumnNames")); assertEquals("10000", flowFile.getAttribute("generatetablefetch.limit")); @@ -816,17 +797,14 @@ public void testBackwardsCompatibilityStateKeyDynamicTableDynamicMaxValues() thr runner.clearTransferState(); stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id > 1 AND id <= 2 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); assertEquals("TEST_QUERY_DB_TABLE", flowFile.getAttribute("generatetablefetch.tableName")); - assertEquals(null, flowFile.getAttribute("generatetablefetch.columnNames")); + assertNull(flowFile.getAttribute("generatetablefetch.columnNames")); assertEquals("id > 1 AND id <= 2", flowFile.getAttribute("generatetablefetch.whereClause")); assertEquals("id", flowFile.getAttribute("generatetablefetch.maxColumnNames")); assertEquals("10000", flowFile.getAttribute("generatetablefetch.limit")); @@ -852,15 +830,11 @@ public void testBackwardsCompatibilityStateKeyDynamicTableStaticMaxValues() thro runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}"); runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "id"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE")); // Pre-populate the state with a key for column name (not fully-qualified) StateManager stateManager = runner.getStateManager(); - stateManager.setState(new HashMap() {{ - put("id", "0"); - }}, Scope.CLUSTER); + stateManager.setState(Map.of("id", "0"), Scope.CLUSTER); // Pre-populate the column type map with an entry for id (not fully-qualified) processor.columnTypeMap.put("id", 4); @@ -868,21 +842,18 @@ public void testBackwardsCompatibilityStateKeyDynamicTableStaticMaxValues() thro runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); // Note there is no WHERE clause here. Because we are using dynamic tables, the old state key/value is not retrieved assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); runner.clearTransferState(); stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id > 1 AND id <= 2 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); } @@ -911,24 +882,21 @@ public void testBackwardsCompatibilityStateKeyVariableRegistry() throws Exceptio // Pre-populate the state with a key for column name (not fully-qualified) StateManager stateManager = runner.getStateManager(); - stateManager.setState(new HashMap() {{ - put("id", "0"); - }}, Scope.CLUSTER); - + stateManager.setState(Map.of("id", "0"), Scope.CLUSTER); // Pre-populate the column type map with an entry for id (not fully-qualified) processor.columnTypeMap.put("id", 4); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); // Note there is no WHERE clause here. Because we are using dynamic tables (i.e. Expression Language, // even when not referring to flow file attributes), the old state key/value is not retrieved assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray())); } @Test - public void testRidiculousRowCount() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testRidiculousRowCount() throws SQLException { long rowCount = Long.parseLong(Integer.toString(Integer.MAX_VALUE)) + 100; int partitionSize = 1000000; int expectedFileCount = (int) (rowCount / partitionSize) + 1; @@ -962,14 +930,14 @@ public void testRidiculousRowCount() throws ClassNotFoundException, SQLException runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, expectedFileCount); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE 1=1 ORDER BY ID FETCH NEXT 1000000 ROWS ONLY", query); runner.clearTransferState(); } @Test - public void testInitialMaxValue() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testInitialMaxValue() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -993,7 +961,7 @@ public void testInitialMaxValue() throws ClassNotFoundException, SQLException, I runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 1 AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1017,7 +985,7 @@ public void testInitialMaxValue() throws ClassNotFoundException, SQLException, I runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 2 AND ID <= 5 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -1038,7 +1006,7 @@ public void testInitialMaxValue() throws ClassNotFoundException, SQLException, I } @Test - public void testInitialMaxValueWithEL() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testInitialMaxValueWithEL() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1063,7 +1031,7 @@ public void testInitialMaxValueWithEL() throws ClassNotFoundException, SQLExcept runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 1 AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1079,7 +1047,7 @@ public void testInitialMaxValueWithEL() throws ClassNotFoundException, SQLExcept } @Test - public void testInitialMaxValueWithELAndIncoming() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testInitialMaxValueWithELAndIncoming() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1099,14 +1067,12 @@ public void testInitialMaxValueWithELAndIncoming() throws ClassNotFoundException runner.setProperty(GenerateTableFetch.TABLE_NAME, "TEST_QUERY_DB_TABLE"); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "ID"); runner.setProperty("initial.maxvalue.ID", "${maxval.id}"); - Map attrs = new HashMap() {{ - put("maxval.id", "1"); - }}; + Map attrs = Map.of("maxval.id", "1"); runner.setIncomingConnection(true); runner.enqueue(new byte[0], attrs); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 1 AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1123,7 +1089,7 @@ public void testInitialMaxValueWithELAndIncoming() throws ClassNotFoundException } @Test - public void testInitialMaxValueWithELAndMultipleTables() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testInitialMaxValueWithELAndMultipleTables() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1143,15 +1109,13 @@ public void testInitialMaxValueWithELAndMultipleTables() throws ClassNotFoundExc runner.setProperty(GenerateTableFetch.TABLE_NAME, "${table.name}"); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "ID"); runner.setProperty("initial.maxvalue.ID", "${maxval.id}"); - Map attrs = new HashMap() {{ - put("maxval.id", "1"); - put("table.name", "TEST_QUERY_DB_TABLE"); - }}; - runner.setIncomingConnection(true); + Map attrs = new HashMap<>(); + attrs.put("maxval.id", "1"); + attrs.put("table.name", "TEST_QUERY_DB_TABLE"); runner.setIncomingConnection(true); runner.enqueue(new byte[0], attrs); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 1 AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1182,7 +1146,7 @@ public void testInitialMaxValueWithELAndMultipleTables() throws ClassNotFoundExc runner.enqueue(new byte[0], attrs); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID > 1 AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); resultSet = stmt.executeQuery(query); @@ -1199,7 +1163,7 @@ public void testInitialMaxValueWithELAndMultipleTables() throws ClassNotFoundExc } @Test - public void testNoDuplicateWithRightBounded() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testNoDuplicateWithRightBounded() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1222,7 +1186,7 @@ public void testNoDuplicateWithRightBounded() throws ClassNotFoundException, SQL runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); // we now insert a row before the query issued by GFT is actually executed by, let's say, ExecuteSQL processor @@ -1240,7 +1204,7 @@ public void testNoDuplicateWithRightBounded() throws ClassNotFoundException, SQL // Run again runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); resultSet = stmt.executeQuery(query); @@ -1256,7 +1220,7 @@ public void testNoDuplicateWithRightBounded() throws ClassNotFoundException, SQL } @Test - public void testAddedRowsWithCustomWhereClause() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testAddedRowsWithCustomWhereClause() throws SQLException, IOException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1280,7 +1244,7 @@ public void testAddedRowsWithCustomWhereClause() throws ClassNotFoundException, runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE (type = 'male' OR type IS NULL)" + " AND ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query); @@ -1305,7 +1269,7 @@ public void testAddedRowsWithCustomWhereClause() throws ClassNotFoundException, runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 2 AND (type = 'male' OR type IS NULL)" + " AND ID <= 5 ORDER BY ID FETCH NEXT 1 ROWS ONLY", query); @@ -1329,7 +1293,7 @@ public void testAddedRowsWithCustomWhereClause() throws ClassNotFoundException, stmt.execute("insert into TEST_QUERY_DB_TABLE (id, type, name, scale, created_on) VALUES (6, 'male', 'Mr. NiFi', 1.0, '2012-01-01 03:23:34.234')"); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 5 AND (type = 'male' OR type IS NULL)" + " AND ID <= 6 ORDER BY ID FETCH NEXT 1 ROWS ONLY", query); @@ -1372,7 +1336,7 @@ public void testAddedRowsWithCustomWhereClause() throws ClassNotFoundException, } @Test - public void testColumnTypeMissing() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testColumnTypeMissing() throws SQLException { // Load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); @@ -1393,13 +1357,10 @@ public void testColumnTypeMissing() throws ClassNotFoundException, SQLException, runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}"); runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", query); runner.clearTransferState(); @@ -1412,22 +1373,19 @@ public void testColumnTypeMissing() throws ClassNotFoundException, SQLException, stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)"); // Re-launch FlowFile to se if re-cache column type works - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); // It should re-cache column type runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1); - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id > 1 AND id <= 2 ORDER BY id FETCH NEXT 10000 ROWS ONLY", query); runner.clearTransferState(); } @Test - public void testMultipleColumnTypeMissing() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testMultipleColumnTypeMissing() throws SQLException { // Load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1450,15 +1408,9 @@ public void testMultipleColumnTypeMissing() throws ClassNotFoundException, SQLEx runner.setIncomingConnection(true); runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}"); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE_2"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE_2", "maxValueCol", "id")); runner.run(2); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); @@ -1473,10 +1425,7 @@ public void testMultipleColumnTypeMissing() throws ClassNotFoundException, SQLEx stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)"); // Re-launch FlowFile to se if re-cache column type works - runner.enqueue("".getBytes(), new HashMap() {{ - put("tableName", "TEST_QUERY_DB_TABLE"); - put("maxValueCol", "id"); - }}); + runner.enqueue("".getBytes(), Map.of("tableName", "TEST_QUERY_DB_TABLE", "maxValueCol", "id")); // It should re-cache column type runner.run(); @@ -1486,7 +1435,7 @@ public void testMultipleColumnTypeMissing() throws ClassNotFoundException, SQLEx } @Test - public void testUseColumnValuesForPartitioning() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testUseColumnValuesForPartitioning() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1512,7 +1461,7 @@ public void testUseColumnValuesForPartitioning() throws ClassNotFoundException, runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // First flow file - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 12 AND ID >= 10 AND ID < 12", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1543,7 +1492,7 @@ public void testUseColumnValuesForPartitioning() throws ClassNotFoundException, runner.assertAllFlowFilesTransferred(REL_SUCCESS, 3); // Verify first flow file's contents - flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID > 12 AND ID <= 24 AND ID >= 20 AND ID < 22", query); resultSet = stmt.executeQuery(query); @@ -1572,7 +1521,7 @@ public void testUseColumnValuesForPartitioning() throws ClassNotFoundException, } @Test - public void testUseColumnValuesForPartitioningNoMaxValueColumn() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testUseColumnValuesForPartitioningNoMaxValueColumn() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1597,7 +1546,7 @@ public void testUseColumnValuesForPartitioningNoMaxValueColumn() throws ClassNot runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); // First flow file - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE 1=1 AND ID >= 10 AND ID < 12", query); ResultSet resultSet = stmt.executeQuery(query); @@ -1622,7 +1571,7 @@ public void testUseColumnValuesForPartitioningNoMaxValueColumn() throws ClassNot } @Test - public void testCustomOrderByColumn() throws SQLException, IOException { + public void testCustomOrderByColumn() throws SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -1646,7 +1595,7 @@ public void testCustomOrderByColumn() throws SQLException, IOException { runner.run(); runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2); - MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0); + MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).getFirst(); String query = new String(flowFile.toByteArray()); assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE 1=1 ORDER BY SCALE FETCH NEXT 2 ROWS ONLY", query); flowFile.assertAttributeEquals(FRAGMENT_INDEX, "0"); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java index af9f5af29082..6f3d2ae2957a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestMergeContent.java @@ -45,6 +45,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.util.Collection; import java.util.HashMap; @@ -81,10 +82,10 @@ public void testFlowFileLargerThanBin() { runner.assertTransferCount(MergeContent.REL_ORIGINAL, 1); runner.assertTransferCount(MergeContent.REL_MERGED, 1); runner.assertTransferCount(MergeContent.REL_FAILURE, 0); - assertEquals(runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0).getAttribute(CoreAttributes.UUID.key()), - runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).get(0).getAttribute(MergeContent.MERGE_UUID_ATTRIBUTE)); + assertEquals(runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst().getAttribute(CoreAttributes.UUID.key()), + runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).getFirst().getAttribute(MergeContent.MERGE_UUID_ATTRIBUTE)); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); assertEquals(1024 * 6, bundle.getSize()); // Queue should not be empty because the first FlowFile will be transferred back to the input queue @@ -130,7 +131,7 @@ public void testSimpleAvroConcat() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); // create a reader for the merged content @@ -180,7 +181,7 @@ public void testAvroConcatWithDifferentSchemas() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 1); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); final byte[] data = runner.getContentAsByteArray(bundle); @@ -189,7 +190,7 @@ public void testAvroConcatWithDifferentSchemas() throws IOException { assertTrue(users.containsKey("Alyssa")); assertTrue(users.containsKey("John")); - final MockFlowFile failure = runner.getFlowFilesForRelationship(MergeContent.REL_FAILURE).get(0); + final MockFlowFile failure = runner.getFlowFilesForRelationship(MergeContent.REL_FAILURE).getFirst(); final byte[] failureData = runner.getContentAsByteArray(failure); final Map places = getGenericRecordMap(failureData, schema2, "name"); assertEquals(1, places.size()); @@ -209,26 +210,19 @@ public void testAvroConcatWithDifferentMetadataDoNotMerge() throws IOException { final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); - final Map userMeta1 = new HashMap() {{ - put("test_metadata1", "Test 1"); - }}; + final Map userMeta1 = Map.of("test_metadata1", "Test 1"); final GenericRecord user2 = new GenericData.Record(schema); user2.put("name", "Ben"); user2.put("favorite_number", 7); user2.put("favorite_color", "red"); - final Map userMeta2 = new HashMap() {{ - put("test_metadata1", "Test 2"); // Test non-matching values - }}; + final Map userMeta2 = Map.of("test_metadata1", "Test 2"); // Test non-matching values final GenericRecord user3 = new GenericData.Record(schema); user3.put("name", "John"); user3.put("favorite_number", 5); user3.put("favorite_color", "blue"); - final Map userMeta3 = new HashMap() {{ - put("test_metadata1", "Test 1"); - put("test_metadata2", "Test"); // Test unique - }}; + final Map userMeta3 = Map.of("test_metadata1", "Test 1", "test_metadata2", "Test"); // Test unique final DatumWriter datumWriter = new GenericDatumWriter<>(schema); final ByteArrayOutputStream out1 = serializeAvroRecord(schema, user1, datumWriter, userMeta1); @@ -245,7 +239,7 @@ public void testAvroConcatWithDifferentMetadataDoNotMerge() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 2); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); // create a reader for the merged content @@ -269,26 +263,19 @@ public void testAvroConcatWithDifferentMetadataIgnore() throws IOException { final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); - final Map userMeta1 = new HashMap() {{ - put("test_metadata1", "Test 1"); - }}; + final Map userMeta1 = Map.of("test_metadata1", "Test 1"); final GenericRecord user2 = new GenericData.Record(schema); user2.put("name", "Ben"); user2.put("favorite_number", 7); user2.put("favorite_color", "red"); - final Map userMeta2 = new HashMap() {{ - put("test_metadata1", "Test 2"); // Test non-matching values - }}; + final Map userMeta2 = Map.of("test_metadata1", "Test 2"); // Test non-matching values final GenericRecord user3 = new GenericData.Record(schema); user3.put("name", "John"); user3.put("favorite_number", 5); user3.put("favorite_color", "blue"); - final Map userMeta3 = new HashMap() {{ - put("test_metadata1", "Test 1"); - put("test_metadata2", "Test"); // Test unique - }}; + final Map userMeta3 = Map.of("test_metadata1", "Test 1", "test_metadata2", "Test"); // Test unique final DatumWriter datumWriter = new GenericDatumWriter<>(schema); final ByteArrayOutputStream out1 = serializeAvroRecord(schema, user1, datumWriter, userMeta1); @@ -305,7 +292,7 @@ public void testAvroConcatWithDifferentMetadataIgnore() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); // create a reader for the merged content @@ -331,26 +318,19 @@ public void testAvroConcatWithDifferentMetadataUseFirst() throws IOException { final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); - final Map userMeta1 = new HashMap() {{ - put("test_metadata1", "Test 1"); - }}; + final Map userMeta1 = Map.of("test_metadata1", "Test 1"); final GenericRecord user2 = new GenericData.Record(schema); user2.put("name", "Ben"); user2.put("favorite_number", 7); user2.put("favorite_color", "red"); - final Map userMeta2 = new HashMap() {{ - put("test_metadata1", "Test 2"); // Test non-matching values - }}; + final Map userMeta2 = Map.of("test_metadata1", "Test 2"); // Test non-matching values final GenericRecord user3 = new GenericData.Record(schema); user3.put("name", "John"); user3.put("favorite_number", 5); user3.put("favorite_color", "blue"); - final Map userMeta3 = new HashMap() {{ - put("test_metadata1", "Test 1"); - put("test_metadata2", "Test"); // Test unique - }}; + final Map userMeta3 = Map.of("test_metadata1", "Test 1", "test_metadata2", "Test"); // Test unique final DatumWriter datumWriter = new GenericDatumWriter<>(schema); final ByteArrayOutputStream out1 = serializeAvroRecord(schema, user1, datumWriter, userMeta1); @@ -367,7 +347,7 @@ public void testAvroConcatWithDifferentMetadataUseFirst() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); // create a reader for the merged content @@ -393,26 +373,19 @@ public void testAvroConcatWithDifferentMetadataKeepCommon() throws IOException { final GenericRecord user1 = new GenericData.Record(schema); user1.put("name", "Alyssa"); user1.put("favorite_number", 256); - final Map userMeta1 = new HashMap() {{ - put("test_metadata1", "Test 1"); - }}; + final Map userMeta1 = Map.of("test_metadata1", "Test 1"); final GenericRecord user2 = new GenericData.Record(schema); user2.put("name", "Ben"); user2.put("favorite_number", 7); user2.put("favorite_color", "red"); - final Map userMeta2 = new HashMap() {{ - put("test_metadata1", "Test 2"); // Test non-matching values - }}; + final Map userMeta2 = Map.of("test_metadata1", "Test 2"); // Test non-matching values final GenericRecord user3 = new GenericData.Record(schema); user3.put("name", "John"); user3.put("favorite_number", 5); user3.put("favorite_color", "blue"); - final Map userMeta3 = new HashMap() {{ - put("test_metadata1", "Test 1"); - put("test_metadata2", "Test"); // Test unique - }}; + final Map userMeta3 = Map.of("test_metadata1", "Test 1", "test_metadata2", "Test"); // Test unique final DatumWriter datumWriter = new GenericDatumWriter<>(schema); final ByteArrayOutputStream out1 = serializeAvroRecord(schema, user1, datumWriter, userMeta1); @@ -429,7 +402,7 @@ public void testAvroConcatWithDifferentMetadataKeepCommon() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 1); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); // create a reader for the merged content @@ -486,11 +459,11 @@ public void testSimpleBinaryConcat() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("Hello, World!".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("Hello, World!".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); - runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).stream().forEach( + runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).forEach( ff -> assertEquals(bundle.getAttribute(CoreAttributes.UUID.key()), ff.getAttribute(MergeContent.MERGE_UUID_ATTRIBUTE))); } @@ -509,7 +482,7 @@ public void testSimpleBinaryConcatSingleBin() { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); bundle.assertContentEquals("Hello, World!"); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); } @@ -532,8 +505,8 @@ public void testSimpleBinaryConcatWithTextDelimiters() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("@Hello#, #World!$".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("@Hello#, #World!$".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); } @@ -553,13 +526,13 @@ public void testSimpleBinaryConcatWithTextDelimitersHeaderOnly() throws IOExcept runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("@Hello, World!".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("@Hello, World!".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); } @Test - public void testSimpleBinaryConcatWithFileDelimiters() throws IOException, InterruptedException { + public void testSimpleBinaryConcatWithFileDelimiters() throws IOException { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec"); runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_CONCAT); @@ -575,9 +548,9 @@ public void testSimpleBinaryConcatWithFileDelimiters() throws IOException, Inter attributes.put("demarcator", "src/test/resources/TestMergeContent/demarcate"); attributes.put("footer", "src/test/resources/TestMergeContent/foot"); - runner.enqueue("Hello".getBytes("UTF-8"), attributes); - runner.enqueue(", ".getBytes("UTF-8"), attributes); - runner.enqueue("World!".getBytes("UTF-8"), attributes); + runner.enqueue("Hello".getBytes(StandardCharsets.UTF_8), attributes); + runner.enqueue(", ".getBytes(StandardCharsets.UTF_8), attributes); + runner.enqueue("World!".getBytes(StandardCharsets.UTF_8), attributes); runner.run(2); runner.assertQueueEmpty(); @@ -585,8 +558,8 @@ public void testSimpleBinaryConcatWithFileDelimiters() throws IOException, Inter runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("(|)Hello***, ***World!___".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("(|)Hello***, ***World!___".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); } @@ -631,8 +604,7 @@ public void testTextDelimitersValidation() { Collection results = new HashSet<>(); ProcessContext context = runner.getProcessContext(); - if (context instanceof MockProcessContext) { - MockProcessContext mockContext = (MockProcessContext) context; + if (context instanceof MockProcessContext mockContext) { results = mockContext.validate(); } @@ -662,7 +634,7 @@ public void testFileDelimitersValidation() { assertEquals(3, results.size()); for (ValidationResult vr : results) { - assertTrue(vr.toString().contains("is invalid because File " + new File(doesNotExistFile).toString() + " does not exist")); + assertTrue(vr.toString().contains("is invalid because File " + new File(doesNotExistFile) + " does not exist")); } } @@ -684,8 +656,8 @@ public void testMimeTypeIsOctetStreamIfConflictingWithBinaryConcat() throws IOEx runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 4); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("Hello, World!".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("Hello, World!".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/octet-stream"); } @@ -768,25 +740,25 @@ public void testZip() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle)); final ZipInputStream in = new ZipInputStream(rawIn)) { assertNotNull(in.getNextEntry()); final byte[] part1 = IOUtils.toByteArray(in); - assertArrayEquals("Hello".getBytes("UTF-8"), part1); + assertArrayEquals("Hello".getBytes(StandardCharsets.UTF_8), part1); in.getNextEntry(); final byte[] part2 = IOUtils.toByteArray(in); - assertArrayEquals(", ".getBytes("UTF-8"), part2); + assertArrayEquals(", ".getBytes(StandardCharsets.UTF_8), part2); in.getNextEntry(); final byte[] part3 = IOUtils.toByteArray(in); - assertArrayEquals("World!".getBytes("UTF-8"), part3); + assertArrayEquals("World!".getBytes(StandardCharsets.UTF_8), part3); } bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/zip"); } @Test - public void testZipException() throws IOException { + public void testZipException() { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec"); runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_ZIP); @@ -795,9 +767,9 @@ public void testZipException() throws IOException { attributes.put(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); attributes.put("filename", "duplicate-filename.txt"); - runner.enqueue("Hello".getBytes("UTF-8"), attributes); - runner.enqueue(", ".getBytes("UTF-8"), attributes); - runner.enqueue("World!".getBytes("UTF-8"), attributes); + runner.enqueue("Hello".getBytes(StandardCharsets.UTF_8), attributes); + runner.enqueue(", ".getBytes(StandardCharsets.UTF_8), attributes); + runner.enqueue("World!".getBytes(StandardCharsets.UTF_8), attributes); runner.run(2); runner.assertQueueEmpty(); @@ -816,11 +788,11 @@ public void testTar() throws IOException { attributes.put(CoreAttributes.MIME_TYPE.key(), "application/plain-text"); attributes.put(CoreAttributes.FILENAME.key(), "AShortFileName"); - runner.enqueue("Hello".getBytes("UTF-8"), attributes); + runner.enqueue("Hello".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(CoreAttributes.FILENAME.key(), "ALongerrrFileName"); - runner.enqueue(", ".getBytes("UTF-8"), attributes); + runner.enqueue(", ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(CoreAttributes.FILENAME.key(), "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName"); - runner.enqueue("World!".getBytes("UTF-8"), attributes); + runner.enqueue("World!".getBytes(StandardCharsets.UTF_8), attributes); runner.run(2); runner.assertQueueEmpty(); @@ -828,23 +800,23 @@ public void testTar() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle)); final TarArchiveInputStream in = new TarArchiveInputStream(rawIn)) { ArchiveEntry entry = in.getNextEntry(); assertNotNull(entry); assertEquals("AShortFileName", entry.getName()); final byte[] part1 = IOUtils.toByteArray(in); - assertArrayEquals("Hello".getBytes("UTF-8"), part1); + assertArrayEquals("Hello".getBytes(StandardCharsets.UTF_8), part1); entry = in.getNextEntry(); assertEquals("ALongerrrFileName", entry.getName()); final byte[] part2 = IOUtils.toByteArray(in); - assertArrayEquals(", ".getBytes("UTF-8"), part2); + assertArrayEquals(", ".getBytes(StandardCharsets.UTF_8), part2); entry = in.getNextEntry(); assertEquals("AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName", entry.getName()); final byte[] part3 = IOUtils.toByteArray(in); - assertArrayEquals("World!".getBytes("UTF-8"), part3); + assertArrayEquals("World!".getBytes(StandardCharsets.UTF_8), part3); } bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/tar"); } @@ -867,7 +839,7 @@ public void testFlowFileStream() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 2); - final MockFlowFile merged = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile merged = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); merged.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), StandardFlowFileMediaType.VERSION_3.getMediaType()); } @@ -882,19 +854,19 @@ public void testDefragment() throws IOException { attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); } @Test @@ -907,23 +879,23 @@ public void testDefragmentWithFragmentCountOnLastFragmentOnly() throws IOExcepti attributes.put(MergeContent.FRAGMENT_ID_ATTRIBUTE, "1"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); } @Test @@ -934,53 +906,37 @@ public void testDefragmentWithFragmentCountOnMiddleFragment() throws IOException final String fragmentId = "Fragment Id"; - runner.enqueue("Fragment 1 without count ".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - }}); + runner.enqueue("Fragment 1 without count ".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId, + MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1")); - runner.enqueue("Fragment 2 with count ".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "3"); - }}); + runner.enqueue("Fragment 2 with count ".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId, + MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2", MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "3")); - runner.enqueue("Fragment 3 without count".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - }}); + runner.enqueue("Fragment 3 without count".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId, MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3")); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("Fragment 1 without count Fragment 2 with count Fragment 3 without count".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("Fragment 1 without count Fragment 2 with count Fragment 3 without count".getBytes(StandardCharsets.UTF_8)); } @Test - public void testDefragmentWithDifferentFragmentCounts() throws IOException { + public void testDefragmentWithDifferentFragmentCounts() { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT); runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min"); final String fragmentId = "Fragment Id"; - runner.enqueue("Fragment 1 with count ".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "2"); - }}); + runner.enqueue("Fragment 1 with count ".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, + fragmentId, MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1", MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "2")); - runner.enqueue("Fragment 2 with count ".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "3"); - }}); + runner.enqueue("Fragment 2 with count ".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId, + MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2", MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "3")); - runner.enqueue("Fragment 3 without count".getBytes("UTF-8"), new HashMap() {{ - put(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId); - put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - }}); + runner.enqueue("Fragment 3 without count".getBytes(StandardCharsets.UTF_8), Map.of(MergeContent.FRAGMENT_ID_ATTRIBUTE, fragmentId, + MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3")); runner.run(); @@ -1000,22 +956,22 @@ public void testDefragmentDuplicateFragment() throws IOException, InterruptedExc attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); // enqueue a duplicate fragment - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(1, false); runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); runner.clearTransferState(); Thread.sleep(1_100L); @@ -1035,24 +991,24 @@ public void testDefragmentWithTooManyFragments() throws IOException { attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); } @Test - public void testDefragmentWithTooFewFragments() throws IOException { + public void testDefragmentWithTooFewFragments() { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_DEFRAGMENT); runner.setProperty(MergeContent.MAX_BIN_AGE, "2 secs"); @@ -1062,13 +1018,13 @@ public void testDefragmentWithTooFewFragments() throws IOException { attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "5"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(1, false); @@ -1076,7 +1032,7 @@ public void testDefragmentWithTooFewFragments() throws IOException { try { Thread.sleep(3000L); break; - } catch (final InterruptedException ie) { + } catch (final InterruptedException ignore) { } } runner.run(1); @@ -1095,21 +1051,21 @@ public void testDefragmentOutOfOrder() throws IOException { attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); - runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).stream().forEach( + runner.getFlowFilesForRelationship(MergeContent.REL_ORIGINAL).forEach( ff -> assertEquals(assembled.getAttribute(CoreAttributes.UUID.key()), ff.getAttribute(MergeContent.MERGE_UUID_ATTRIBUTE))); } @@ -1128,27 +1084,27 @@ public void testDefragmentMultipleMingledSegments() throws IOException { secondAttrs.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "3"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); secondAttrs.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("No x ".getBytes("UTF-8"), secondAttrs); + runner.enqueue("No x ".getBytes(StandardCharsets.UTF_8), secondAttrs); secondAttrs.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("in ".getBytes("UTF-8"), secondAttrs); + runner.enqueue("in ".getBytes(StandardCharsets.UTF_8), secondAttrs); secondAttrs.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("Nixon".getBytes("UTF-8"), secondAttrs); + runner.enqueue("Nixon".getBytes(StandardCharsets.UTF_8), secondAttrs); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(1); runner.assertTransferCount(MergeContent.REL_MERGED, 2); final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); final MockFlowFile assembledTwo = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(1); - assembledTwo.assertContentEquals("No x in Nixon".getBytes("UTF-8")); + assembledTwo.assertContentEquals("No x in Nixon".getBytes(StandardCharsets.UTF_8)); } @Test @@ -1163,19 +1119,19 @@ public void testDefragmentOldStyleAttributes() throws IOException { attributes.put("fragment.index", "1"); attributes.put("segment.original.filename", "originalfilename"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put("fragment.index", "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put("fragment.index", "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put("fragment.index", "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); assembled.assertAttributeEquals(CoreAttributes.FILENAME.key(), "originalfilename"); } @@ -1188,28 +1144,28 @@ public void testDefragmentMultipleOnTriggers() throws IOException { attributes.put(MergeContent.FRAGMENT_ID_ATTRIBUTE, "1"); attributes.put(MergeContent.FRAGMENT_COUNT_ATTRIBUTE, "4"); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "1"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "2"); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "3"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); attributes.put(MergeContent.FRAGMENT_INDEX_ATTRIBUTE, "4"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes("UTF-8")); + final MockFlowFile assembled = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + assembled.assertContentEquals("A Man A Plan A Canal Panama".getBytes(StandardCharsets.UTF_8)); } @Test - public void testMergeBasedOnCorrelation() throws IOException, InterruptedException { + public void testMergeBasedOnCorrelation() { final TestRunner runner = TestRunners.newTestRunner(new MergeContent()); runner.setProperty(MergeContent.MERGE_STRATEGY, MergeContent.MERGE_STRATEGY_BIN_PACK); runner.setProperty(MergeContent.MAX_BIN_AGE, "1 min"); @@ -1219,14 +1175,14 @@ public void testMergeBasedOnCorrelation() throws IOException, InterruptedExcepti final Map attributes = new HashMap<>(); attributes.put("attr", "b"); - runner.enqueue("A Man ".getBytes("UTF-8"), attributes); - runner.enqueue("A Plan ".getBytes("UTF-8"), attributes); + runner.enqueue("A Man ".getBytes(StandardCharsets.UTF_8), attributes); + runner.enqueue("A Plan ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put("attr", "c"); - runner.enqueue("A Canal ".getBytes("UTF-8"), attributes); + runner.enqueue("A Canal ".getBytes(StandardCharsets.UTF_8), attributes); attributes.put("attr", "b"); - runner.enqueue("Panama".getBytes("UTF-8"), attributes); + runner.enqueue("Panama".getBytes(StandardCharsets.UTF_8), attributes); runner.run(2); @@ -1298,7 +1254,7 @@ public void testUniqueAttributes() { runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); outFile.assertAttributeEquals("abc", "xyz"); outFile.assertAttributeEquals("hello", "good-bye"); @@ -1328,7 +1284,7 @@ public void testCommonAttributesOnly() { runner.run(); runner.assertTransferCount(MergeContent.REL_MERGED, 1); - final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); + final MockFlowFile outFile = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); outFile.assertAttributeEquals("abc", "xyz"); outFile.assertAttributeNotExists("hello"); @@ -1358,8 +1314,8 @@ public void testCountAttribute() throws IOException { runner.assertTransferCount(MergeContent.REL_FAILURE, 0); runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3); - final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0); - bundle.assertContentEquals("Hello, World!".getBytes("UTF-8")); + final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).getFirst(); + bundle.assertContentEquals("Hello, World!".getBytes(StandardCharsets.UTF_8)); bundle.assertAttributeEquals(MergeContent.MERGE_COUNT_ATTRIBUTE, "3"); bundle.assertAttributeExists(MergeContent.MERGE_BIN_AGE_ATTRIBUTE); } @@ -1372,7 +1328,7 @@ public void testLeavesSmallBinUnmerged() { runner.setProperty(MergeContent.MAX_BIN_COUNT, "3"); for (int i = 0; i < 17; i++) { - runner.enqueue(String.valueOf(i) + "\n"); + runner.enqueue(i + "\n"); } runner.run(5); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java index 2a2c541ff422..b84a4787a88d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestModifyBytes.java @@ -23,9 +23,10 @@ import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.HashMap; +import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -59,8 +60,8 @@ public void testReturnEmptyFile() throws IOException { runner.run(); runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1); - final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0); - out.assertContentEquals("".getBytes("UTF-8")); + final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).getFirst(); + out.assertContentEquals("".getBytes(StandardCharsets.UTF_8)); } @Test @@ -73,7 +74,7 @@ public void testReturnSameFile() throws IOException { runner.run(); runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1); - final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0); + final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).getFirst(); out.assertContentEquals(testFile); } @@ -87,7 +88,7 @@ public void testRemoveHeader() throws IOException { runner.run(); runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1); - final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0); + final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).getFirst(); out.assertContentEquals(noHeaderFile); } @@ -97,13 +98,11 @@ public void testRemoveHeaderEL() throws IOException { runner.setProperty(ModifyBytes.START_OFFSET, "${numBytes}"); //REMOVE - '<<
>>' runner.setProperty(ModifyBytes.END_OFFSET, "0 MB"); - runner.enqueue(testFilePath, new HashMap() {{ - put("numBytes", "12 B"); - }}); + runner.enqueue(testFilePath, Map.of("numBytes", "12 B")); runner.run(); runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1); - final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0); + final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).getFirst(); out.assertContentEquals(noHeaderFile); } @@ -117,8 +116,8 @@ public void testKeepFooter() throws IOException { runner.run(); runner.assertAllFlowFilesTransferred(ModifyBytes.REL_SUCCESS, 1); - final MockFlowFile out = runner.getFlowFilesForRelationship(ModifyBytes.REL_SUCCESS).get(0); - out.assertContentEquals("<<