diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java index 292a0f98af1ca..48016c3fdc090 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java @@ -28,12 +28,9 @@ import java.util.Map; import java.util.Set; -import com.google.common.annotations.VisibleForTesting; -import org.apache.parquet.VersionParser; -import org.apache.parquet.VersionParser.ParsedVersion; -import org.apache.parquet.column.page.PageReadStore; import scala.Option; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileSplit; @@ -42,6 +39,9 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.parquet.HadoopReadOptions; import org.apache.parquet.ParquetReadOptions; +import org.apache.parquet.VersionParser; +import org.apache.parquet.VersionParser.ParsedVersion; +import org.apache.parquet.column.page.PageReadStore; import org.apache.parquet.hadoop.BadConfigurationException; import org.apache.parquet.hadoop.ParquetFileReader; import org.apache.parquet.hadoop.ParquetInputFormat; @@ -51,6 +51,7 @@ import org.apache.parquet.hadoop.util.HadoopInputFile; import org.apache.parquet.schema.MessageType; import org.apache.parquet.schema.Types; + import org.apache.spark.TaskContext; import org.apache.spark.TaskContext$; import org.apache.spark.sql.internal.SQLConf;