Skip to content

Commit

Permalink
Core, Spark: Remove deprecated code for 1.9.0 (#12336)
Browse files Browse the repository at this point in the history
  • Loading branch information
nastra authored Feb 20, 2025
1 parent de64415 commit 30fd752
Show file tree
Hide file tree
Showing 24 changed files with 58 additions and 131 deletions.
24 changes: 24 additions & 0 deletions .palantir/revapi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1177,6 +1177,30 @@ acceptedBreaks:
old: "method java.lang.String[] org.apache.iceberg.hadoop.Util::blockLocations(org.apache.iceberg.CombinedScanTask,\
\ org.apache.hadoop.conf.Configuration)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method org.apache.iceberg.TableMetadata org.apache.iceberg.TableMetadata::updateSchema(org.apache.iceberg.Schema,\
\ int)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method org.apache.iceberg.TableMetadata.Builder org.apache.iceberg.TableMetadata.Builder::addSchema(org.apache.iceberg.Schema,\
\ int)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method org.apache.iceberg.TableMetadata.Builder org.apache.iceberg.TableMetadata.Builder::setStatistics(long,\
\ org.apache.iceberg.StatisticsFile)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method org.apache.iceberg.UpdateStatistics org.apache.iceberg.SetStatistics::setStatistics(long,\
\ org.apache.iceberg.StatisticsFile)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method void org.apache.iceberg.MetadataUpdate.AddSchema::<init>(org.apache.iceberg.Schema,\
\ int)"
justification: "Removing deprecated code"
- code: "java.method.removed"
old: "method void org.apache.iceberg.MetadataUpdate.SetStatistics::<init>(long,\
\ org.apache.iceberg.StatisticsFile)"
justification: "Removing deprecated code"
org.apache.iceberg:iceberg-parquet:
- code: "java.class.visibilityReduced"
old: "class org.apache.iceberg.data.parquet.BaseParquetReaders<T extends java.lang.Object>"
Expand Down
7 changes: 4 additions & 3 deletions api/src/main/java/org/apache/iceberg/UpdateStatistics.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,12 @@ public interface UpdateStatistics extends PendingUpdate<List<StatisticsFile>> {
* the snapshot if any exists.
*
* @return this for method chaining
* @deprecated since 1.8.0, will be removed 1.9.0 or 2.0.0, use {@link
* #setStatistics(StatisticsFile)}.
* @deprecated since 1.8.0, will be removed in 2.0.0, use {@link #setStatistics(StatisticsFile)}.
*/
@Deprecated
UpdateStatistics setStatistics(long snapshotId, StatisticsFile statisticsFile);
default UpdateStatistics setStatistics(long snapshotId, StatisticsFile statisticsFile) {
return setStatistics(statisticsFile);
}

/**
* Set the table's statistics file for given snapshot, replacing the previous statistics file for
Expand Down
27 changes: 2 additions & 25 deletions core/src/main/java/org/apache/iceberg/MetadataUpdate.java
Original file line number Diff line number Diff line change
Expand Up @@ -84,34 +84,22 @@ public void applyTo(ViewMetadata.Builder viewMetadataBuilder) {

class AddSchema implements MetadataUpdate {
private final Schema schema;
private final int lastColumnId;

public AddSchema(Schema schema) {
this(schema, schema.highestFieldId());
}

/**
* Set the schema
*
* @deprecated since 1.8.0, will be removed 1.9.0 or 2.0.0, use AddSchema(schema).
*/
@Deprecated
public AddSchema(Schema schema, int lastColumnId) {
this.schema = schema;
this.lastColumnId = lastColumnId;
}

public Schema schema() {
return schema;
}

public int lastColumnId() {
return lastColumnId;
return schema.highestFieldId();
}

@Override
public void applyTo(TableMetadata.Builder metadataBuilder) {
metadataBuilder.addSchema(schema, lastColumnId);
metadataBuilder.addSchema(schema);
}

@Override
Expand Down Expand Up @@ -233,17 +221,6 @@ public void applyTo(TableMetadata.Builder metadataBuilder) {
class SetStatistics implements MetadataUpdate {
private final StatisticsFile statisticsFile;

/**
* Set statistics for a snapshot.
*
* @deprecated since 1.8.0, will be removed in 1.9.0 or 2.0.0, use
* SetStatistics(statisticsFile).
*/
@Deprecated
public SetStatistics(long snapshotId, StatisticsFile statisticsFile) {
this.statisticsFile = statisticsFile;
}

public SetStatistics(StatisticsFile statisticsFile) {
this.statisticsFile = statisticsFile;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -481,13 +481,7 @@ private static MetadataUpdate readUpgradeFormatVersion(JsonNode node) {
private static MetadataUpdate readAddSchema(JsonNode node) {
JsonNode schemaNode = JsonUtil.get(SCHEMA, node);
Schema schema = SchemaParser.fromJson(schemaNode);
int lastColumnId;
if (node.has(LAST_COLUMN_ID)) {
lastColumnId = JsonUtil.getInt(LAST_COLUMN_ID, node);
} else {
lastColumnId = schema.highestFieldId();
}
return new MetadataUpdate.AddSchema(schema, lastColumnId);
return new MetadataUpdate.AddSchema(schema);
}

private static MetadataUpdate readSetCurrentSchema(JsonNode node) {
Expand Down
17 changes: 1 addition & 16 deletions core/src/main/java/org/apache/iceberg/SetStatistics.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;

public class SetStatistics implements UpdateStatistics {
Expand All @@ -32,20 +31,6 @@ public SetStatistics(TableOperations ops) {
this.ops = ops;
}

/**
* Set the statistics file for a snapshot.
*
* @deprecated since 1.8.0, will be removed in 1.9.0 or 2.0.0, use {@link
* #setStatistics(StatisticsFile)}.
*/
@Deprecated
@Override
public UpdateStatistics setStatistics(long snapshotId, StatisticsFile statisticsFile) {
Preconditions.checkArgument(snapshotId == statisticsFile.snapshotId());
statisticsToSet.put(statisticsFile.snapshotId(), Optional.of(statisticsFile));
return this;
}

@Override
public UpdateStatistics setStatistics(StatisticsFile statisticsFile) {
statisticsToSet.put(statisticsFile.snapshotId(), Optional.of(statisticsFile));
Expand Down Expand Up @@ -75,7 +60,7 @@ private TableMetadata internalApply(TableMetadata base) {
statisticsToSet.forEach(
(snapshotId, statistics) -> {
if (statistics.isPresent()) {
builder.setStatistics(snapshotId, statistics.get());
builder.setStatistics(statistics.get());
} else {
builder.removeStatistics(snapshotId);
}
Expand Down
41 changes: 1 addition & 40 deletions core/src/main/java/org/apache/iceberg/TableMetadata.java
Original file line number Diff line number Diff line change
Expand Up @@ -592,16 +592,6 @@ public long nextRowId() {
return nextRowId;
}

/**
* Updates the schema
*
* @deprecated since 1.8.0, will be removed in 1.9.0 or 2.0.0, use updateSchema(schema).
*/
@Deprecated
public TableMetadata updateSchema(Schema newSchema, int newLastColumnId) {
return new Builder(this).setCurrentSchema(newSchema, newLastColumnId).build();
}

/** Updates the schema */
public TableMetadata updateSchema(Schema newSchema) {
return new Builder(this)
Expand Down Expand Up @@ -1141,17 +1131,6 @@ public Builder addSchema(Schema schema) {
return this;
}

/**
* Add a new schema.
*
* @deprecated since 1.8.0, will be removed in 1.9.0 or 2.0.0, use AddSchema(schema).
*/
@Deprecated
public Builder addSchema(Schema schema, int newLastColumnId) {
addSchemaInternal(schema, newLastColumnId);
return this;
}

public Builder setDefaultPartitionSpec(PartitionSpec spec) {
setDefaultPartitionSpec(addPartitionSpecInternal(spec));
return this;
Expand Down Expand Up @@ -1366,24 +1345,6 @@ public Builder removeRef(String name) {
return this;
}

/**
* Set a statistics file for a snapshot.
*
* @deprecated since 1.8.0, will be removed 1.9.0 or 2.0.0, use setStatistics(statisticsFile).
*/
@Deprecated
public Builder setStatistics(long snapshotId, StatisticsFile statisticsFile) {
Preconditions.checkNotNull(statisticsFile, "statisticsFile is null");
Preconditions.checkArgument(
snapshotId == statisticsFile.snapshotId(),
"snapshotId does not match: %s vs %s",
snapshotId,
statisticsFile.snapshotId());
statisticsFiles.put(statisticsFile.snapshotId(), ImmutableList.of(statisticsFile));
changes.add(new MetadataUpdate.SetStatistics(statisticsFile));
return this;
}

public Builder setStatistics(StatisticsFile statisticsFile) {
Preconditions.checkNotNull(statisticsFile, "statisticsFile is null");
statisticsFiles.put(statisticsFile.snapshotId(), ImmutableList.of(statisticsFile));
Expand Down Expand Up @@ -1665,7 +1626,7 @@ && changes(MetadataUpdate.AddSchema.class)
schemasById.put(newSchema.schemaId(), newSchema);
}

changes.add(new MetadataUpdate.AddSchema(newSchema, lastColumnId));
changes.add(new MetadataUpdate.AddSchema(newSchema));

this.lastAddedSchemaId = newSchemaId;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1748,7 +1748,7 @@ private StatisticsFile reuseStatsFile(long snapshotId, StatisticsFile statistics
}

private void commitStats(Table table, StatisticsFile statisticsFile) {
table.updateStatistics().setStatistics(statisticsFile.snapshotId(), statisticsFile).commit();
table.updateStatistics().setStatistics(statisticsFile).commit();
}

private String statsFileLocation(String tableLocation) {
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/java/org/apache/iceberg/TestSetStatistics.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public void testUpdateStatistics() {
ImmutableList.of(1, 2),
ImmutableMap.of("a-property", "some-property-value"))));

table.updateStatistics().setStatistics(snapshotId, statisticsFile).commit();
table.updateStatistics().setStatistics(statisticsFile).commit();

TableMetadata metadata = readMetadata();
assertThat(version()).isEqualTo(2);
Expand All @@ -100,7 +100,7 @@ public void testRemoveStatistics() {
new GenericStatisticsFile(
snapshotId, "/some/statistics/file.puffin", 100, 42, ImmutableList.of());

table.updateStatistics().setStatistics(snapshotId, statisticsFile).commit();
table.updateStatistics().setStatistics(statisticsFile).commit();

TableMetadata metadata = readMetadata();
assertThat(version()).isEqualTo(2);
Expand Down
4 changes: 0 additions & 4 deletions core/src/test/java/org/apache/iceberg/TestTableMetadata.java
Original file line number Diff line number Diff line change
Expand Up @@ -1341,7 +1341,6 @@ public void testSetStatistics() {
TableMetadata withStatistics =
TableMetadata.buildFrom(meta)
.setStatistics(
43,
new GenericStatisticsFile(
43, "/some/path/to/stats/file", 128, 27, ImmutableList.of()))
.build();
Expand All @@ -1356,7 +1355,6 @@ public void testSetStatistics() {
TableMetadata withStatisticsReplaced =
TableMetadata.buildFrom(withStatistics)
.setStatistics(
43,
new GenericStatisticsFile(
43, "/some/path/to/stats/file2", 128, 27, ImmutableList.of()))
.build();
Expand All @@ -1378,11 +1376,9 @@ public void testRemoveStatistics() {
TableMetadata.newTableMetadata(
schema, PartitionSpec.unpartitioned(), null, ImmutableMap.of()))
.setStatistics(
43,
new GenericStatisticsFile(
43, "/some/path/to/stats/file", 128, 27, ImmutableList.of()))
.setStatistics(
44,
new GenericStatisticsFile(
44, "/some/path/to/stats/file2", 128, 27, ImmutableList.of()))
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,7 @@ public void setAndRemoveStatistics() {
List<UpdateRequirement> requirements =
UpdateRequirements.forUpdateTable(
metadata,
ImmutableList.of(new MetadataUpdate.SetStatistics(0L, mock(StatisticsFile.class))));
ImmutableList.of(new MetadataUpdate.SetStatistics(mock(StatisticsFile.class))));
requirements.forEach(req -> req.validate(metadata));

assertThat(requirements)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void dropTableDataDeletesExpectedFiles() throws IOException {
table.currentSnapshot().sequenceNumber(),
tableLocation + "/metadata/" + UUID.randomUUID() + ".stats",
table.io());
table.updateStatistics().setStatistics(statisticsFile.snapshotId(), statisticsFile).commit();
table.updateStatistics().setStatistics(statisticsFile).commit();

PartitionStatisticsFile partitionStatisticsFile =
writePartitionStatsFile(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ public void testExpireSnapshotsWithStatisticFiles() throws Exception {
table.currentSnapshot().sequenceNumber(),
statsFileLocation1,
table.io());
table.updateStatistics().setStatistics(statisticsFile1.snapshotId(), statisticsFile1).commit();
table.updateStatistics().setStatistics(statisticsFile1).commit();

sql("INSERT INTO %s SELECT 20, 'def'", tableName);
table.refresh();
Expand All @@ -466,7 +466,7 @@ public void testExpireSnapshotsWithStatisticFiles() throws Exception {
table.currentSnapshot().sequenceNumber(),
statsFileLocation2,
table.io());
table.updateStatistics().setStatistics(statisticsFile2.snapshotId(), statisticsFile2).commit();
table.updateStatistics().setStatistics(statisticsFile2).commit();

waitUntilAfter(table.currentSnapshot().timestampMillis());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -493,10 +493,7 @@ public void testRemoveOrphanFilesWithStatisticFiles() throws Exception {
}

Transaction transaction = table.newTransaction();
transaction
.updateStatistics()
.setStatistics(statisticsFile.snapshotId(), statisticsFile)
.commit();
transaction.updateStatistics().setStatistics(statisticsFile).commit();
transaction.commitTransaction();

// wait to ensure files are old enough
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ private Result doExecute() {
snapshotId());
List<Blob> blobs = generateNDVBlobs();
StatisticsFile statisticsFile = writeStatsFile(blobs);
table.updateStatistics().setStatistics(snapshotId(), statisticsFile).commit();
table.updateStatistics().setStatistics(statisticsFile).commit();
return ImmutableComputeTableStats.Result.builder().statisticsFile(statisticsFile).build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,7 @@ public void testRemoveOrphanFilesWithStatisticFiles() throws Exception {
}

Transaction transaction = table.newTransaction();
transaction.updateStatistics().setStatistics(snapshotId, statisticsFile).commit();
transaction.updateStatistics().setStatistics(statisticsFile).commit();
transaction.commitTransaction();

SparkActions.get()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,6 @@ public void testStatisticFile() throws IOException {
TableMetadata withStatistics =
TableMetadata.buildFrom(metadata)
.setStatistics(
43,
new GenericStatisticsFile(
43, "/some/path/to/stats/file", 128, 27, ImmutableList.of()))
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,10 +186,7 @@ public void testPartitionMetadataColumnWithManyColumns() {

TableOperations ops = ((HasTableOperations) table).operations();
TableMetadata base = ops.current();
ops.commit(
base,
base.updateSchema(manyColumnsSchema, manyColumnsSchema.highestFieldId())
.updatePartitionSpec(spec));
ops.commit(base, base.updateSchema(manyColumnsSchema).updatePartitionSpec(spec));

Dataset<Row> df =
spark
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,7 @@ protected Record writeAndRead(String desc, Schema writeSchema, Schema readSchema
Schema expectedSchema = reassignIds(readSchema, idMapping);

// Set the schema to the expected schema directly to simulate the table schema evolving
TestTables.replaceMetadata(
desc, TestTables.readMetadata(desc).updateSchema(expectedSchema, 100));
TestTables.replaceMetadata(desc, TestTables.readMetadata(desc).updateSchema(expectedSchema));

Dataset<Row> df =
spark
Expand Down
Loading

0 comments on commit 30fd752

Please sign in to comment.