diff --git a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/ReadUtils.java b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/ReadUtils.java index 4b127fcdef22..a4d95ca249b4 100644 --- a/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/ReadUtils.java +++ b/sdks/java/io/iceberg/src/main/java/org/apache/beam/sdk/io/iceberg/ReadUtils.java @@ -48,7 +48,6 @@ import org.apache.iceberg.expressions.Expression; import org.apache.iceberg.hadoop.HadoopInputFile; import org.apache.iceberg.io.CloseableIterable; -import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.InputFile; import org.apache.iceberg.mapping.NameMapping; import org.apache.iceberg.mapping.NameMappingParser; @@ -74,12 +73,9 @@ public class ReadUtils { static ParquetReader createReader(FileScanTask task, Table table, Schema schema) { String filePath = task.file().path().toString(); - InputFile inputFile; - try (FileIO io = table.io()) { - EncryptedInputFile encryptedInput = - EncryptedFiles.encryptedInput(io.newInputFile(filePath), task.file().keyMetadata()); - inputFile = table.encryption().decrypt(encryptedInput); - } + EncryptedInputFile encryptedInput = + EncryptedFiles.encryptedInput(table.io().newInputFile(filePath), task.file().keyMetadata()); + InputFile inputFile = table.encryption().decrypt(encryptedInput); Map idToConstants = ReadUtils.constantsMap(task, IdentityPartitionConverters::convertConstant, table.schema());