From 420b107962d244ebf170cf0983aaf1f6ab99eed9 Mon Sep 17 00:00:00 2001 From: Steve Vaughan Jr Date: Tue, 2 Apr 2024 12:48:45 -0400 Subject: [PATCH] Clean-up FileWrite --- .../spark/sql/execution/datasources/v2/FileWrite.scala | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala index 7a060a9bc8fe1..52f44e33ea11f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala @@ -18,7 +18,6 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util.UUID -import scala.collection.mutable import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration @@ -33,7 +32,7 @@ import org.apache.spark.sql.catalyst.types.DataTypeUtils.toAttributes import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils} import org.apache.spark.sql.connector.write.{BatchWrite, LogicalWriteInfo, Write} import org.apache.spark.sql.errors.QueryCompilationErrors -import org.apache.spark.sql.execution.datasources.{BasicWriteJobStatsTracker, DataSource, OutputWriterFactory, PartitionTaskStats, WriteJobDescription} +import org.apache.spark.sql.execution.datasources.{BasicWriteJobStatsTracker, DataSource, OutputWriterFactory, WriteJobDescription} import org.apache.spark.sql.execution.metric.SQLMetric import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{DataType, StructType} @@ -125,8 +124,6 @@ trait FileWrite extends Write { prepareWrite(sparkSession.sessionState.conf, job, caseInsensitiveOptions, schema) val allColumns = toAttributes(schema) val metrics: Map[String, SQLMetric] = BasicWriteJobStatsTracker.metrics - val partitionMetrics: mutable.Map[String, PartitionTaskStats] - = BasicWriteJobStatsTracker.partitionMetrics val serializableHadoopConf = new SerializableConfiguration(hadoopConf) val statsTracker = new BasicWriteJobStatsTracker(serializableHadoopConf, metrics) // TODO: after partitioning is supported in V2: