diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index dd01a60c1d80..de1c8345dde8 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -17,7 +17,7 @@ * under the License. */ -String sparkVersion = '3.4.0' +String sparkVersion = '3.4.1' String sparkMajorVersion = '3.4' String scalaVersion = System.getProperty("scalaVersion") != null ? System.getProperty("scalaVersion") : System.getProperty("defaultScalaVersion") diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java index 4082cd6d441f..b23fe729a187 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java @@ -26,7 +26,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Objects; import org.apache.iceberg.Files; import org.apache.iceberg.Schema; import org.apache.iceberg.io.CloseableIterable; @@ -102,12 +101,6 @@ private void writeAndValidateRecords(Schema schema, Iterable expect assertEquals(schema, expectedRows.next(), actualRows.next()); } Assert.assertFalse("Should not have extra rows", actualRows.hasNext()); - } catch (UnsupportedOperationException e) { - // Fixed in https://github.com/apache/spark/pull/41103 - // Can be removed once Spark 3.4.1 is released - if (!Objects.equals(e.getMessage(), "Datatype not supported TimestampNTZType")) { - throw e; - } } } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/parquet/vectorized/TestParquetVectorizedReads.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/parquet/vectorized/TestParquetVectorizedReads.java index 9250f4ba4295..c763b7b7cc12 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/parquet/vectorized/TestParquetVectorizedReads.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/data/parquet/vectorized/TestParquetVectorizedReads.java @@ -60,15 +60,7 @@ public class TestParquetVectorizedReads extends AvroDataTest { @Override protected void writeAndValidate(Schema schema) throws IOException { - try { - writeAndValidate(schema, getNumRows(), 0L, RandomData.DEFAULT_NULL_PERCENTAGE, true); - } catch (UnsupportedOperationException exc) { - // Fixed in https://github.com/apache/spark/pull/41103 - // Can be removed once Spark 3.4.1 is released - if (!exc.getMessage().equals("Datatype not supported TimestampNTZType")) { - throw exc; - } - } + writeAndValidate(schema, getNumRows(), 0L, RandomData.DEFAULT_NULL_PERCENTAGE, true); } private void writeAndValidate(