From bd3edaa70d5398f795aeb37b7065fc9004156dd1 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Fri, 27 Jan 2023 14:48:14 +0100 Subject: [PATCH] Make retry-related constants more generic Writing through Hive can fail in ways other than just "could only be replicated to 0 nodes instead of minReplication". Let's make the names of constants more generic to support more cases in the future, and also abstract what the underlying problem may be. --- .../hive/TestAbfsSyncPartitionMetadata.java | 20 +++++------ .../product/hive/TestAzureBlobFileSystem.java | 6 ++-- .../io/trino/tests/product/hive/TestCsv.java | 16 ++++----- .../hive/TestHdfsSyncPartitionMetadata.java | 20 +++++------ .../hive/TestHiveBasicTableStatistics.java | 6 ++-- .../product/hive/TestHiveBucketedTables.java | 32 ++++++++--------- .../product/hive/TestHiveCreateTable.java | 8 ++--- .../hive/TestHivePartitionProcedures.java | 34 +++++++++---------- .../product/hive/TestHivePartitionsTable.java | 10 +++--- .../product/hive/TestHiveStorageFormats.java | 14 ++++---- .../product/hive/TestHiveTableStatistics.java | 6 ++-- .../hive/TestHiveTransactionalTable.java | 12 +++---- .../tests/product/utils/HadoopTestUtils.java | 19 +++++++---- 13 files changed, 105 insertions(+), 98 deletions(-) diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAbfsSyncPartitionMetadata.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAbfsSyncPartitionMetadata.java index 1c44409a9f86..21fad74372e0 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAbfsSyncPartitionMetadata.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAbfsSyncPartitionMetadata.java @@ -21,8 +21,8 @@ import static com.google.common.base.Preconditions.checkArgument; import static io.trino.testing.TestingNames.randomNameSuffix; import static io.trino.tests.product.TestGroups.AZURE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -56,7 +56,7 @@ protected String schemaLocation() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testAddPartition() { @@ -64,7 +64,7 @@ public void testAddPartition() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testAddPartitionContainingCharactersThatNeedUrlEncoding() { @@ -72,7 +72,7 @@ public void testAddPartitionContainingCharactersThatNeedUrlEncoding() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testDropPartition() { @@ -80,7 +80,7 @@ public void testDropPartition() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testDropPartitionContainingCharactersThatNeedUrlEncoding() { @@ -88,7 +88,7 @@ public void testDropPartitionContainingCharactersThatNeedUrlEncoding() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testFullSyncPartition() { @@ -96,7 +96,7 @@ public void testFullSyncPartition() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testInvalidSyncMode() { @@ -104,7 +104,7 @@ public void testInvalidSyncMode() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testMixedCasePartitionNames() { @@ -112,7 +112,7 @@ public void testMixedCasePartitionNames() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testConflictingMixedCasePartitionNames() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAzureBlobFileSystem.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAzureBlobFileSystem.java index 044e7fe36494..846ce7f01452 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAzureBlobFileSystem.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestAzureBlobFileSystem.java @@ -27,8 +27,8 @@ import static io.trino.tempto.assertions.QueryAssert.assertThat; import static io.trino.testing.TestingNames.randomNameSuffix; import static io.trino.tests.product.TestGroups.AZURE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -57,7 +57,7 @@ public void tearDown() } @Test(groups = AZURE) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testPathContainsSpecialCharacter() { String tableName = "test_path_special_character" + randomNameSuffix(); diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestCsv.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestCsv.java index cf1debbd18e9..581303f674a8 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestCsv.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestCsv.java @@ -26,8 +26,8 @@ import static io.trino.tempto.assertions.QueryAssert.assertThat; import static io.trino.tests.product.TestGroups.HMS_ONLY; import static io.trino.tests.product.TestGroups.STORAGE_FORMATS; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -66,14 +66,14 @@ private void testInsertIntoCsvTable(String tableName, String additionalTableProp } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreateCsvTableAs() { testCreateCsvTableAs(""); } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreateCsvTableAsWithCustomProperties() { testCreateCsvTableAs(", csv_escape = 'e', csv_separator = 's', csv_quote = 'q'"); @@ -98,14 +98,14 @@ private void testCreateCsvTableAs(String additionalParameters) } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testInsertIntoPartitionedCsvTable() { testInsertIntoPartitionedCsvTable("test_partitioned_csv_table", ""); } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testInsertIntoPartitionedCsvTableWithCustomProperties() { testInsertIntoPartitionedCsvTable("test_partitioned_csv_table_with_custom_parameters", ", csv_escape = 'e', csv_separator = 's', csv_quote = 'q'"); @@ -132,14 +132,14 @@ private void testInsertIntoPartitionedCsvTable(String tableName, String addition } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreatePartitionedCsvTableAs() { testCreatePartitionedCsvTableAs("storage_formats_test_create_table_as_select_partitioned_csv", ""); } @Test(groups = {STORAGE_FORMATS, HMS_ONLY}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreatePartitionedCsvTableAsWithCustomParamters() { testCreatePartitionedCsvTableAs( diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHdfsSyncPartitionMetadata.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHdfsSyncPartitionMetadata.java index 330eccec48e9..3d9646aaf79a 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHdfsSyncPartitionMetadata.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHdfsSyncPartitionMetadata.java @@ -25,8 +25,8 @@ import static io.trino.tests.product.TestGroups.HIVE_PARTITIONING; import static io.trino.tests.product.TestGroups.SMOKE; import static io.trino.tests.product.TestGroups.TRINO_JDBC; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onTrino; public class TestHdfsSyncPartitionMetadata @@ -47,7 +47,7 @@ protected String schemaLocation() } @Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testAddPartition() { @@ -55,7 +55,7 @@ public void testAddPartition() } @Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testAddPartitionContainingCharactersThatNeedUrlEncoding() { @@ -63,7 +63,7 @@ public void testAddPartitionContainingCharactersThatNeedUrlEncoding() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testDropPartition() { @@ -71,7 +71,7 @@ public void testDropPartition() } @Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testDropPartitionContainingCharactersThatNeedUrlEncoding() { @@ -79,7 +79,7 @@ public void testDropPartitionContainingCharactersThatNeedUrlEncoding() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testFullSyncPartition() { @@ -87,7 +87,7 @@ public void testFullSyncPartition() } @Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testInvalidSyncMode() { @@ -95,7 +95,7 @@ public void testInvalidSyncMode() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testMixedCasePartitionNames() { @@ -103,7 +103,7 @@ public void testMixedCasePartitionNames() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Override public void testConflictingMixedCasePartitionNames() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBasicTableStatistics.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBasicTableStatistics.java index 7049611c0586..5a018b03cf05 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBasicTableStatistics.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBasicTableStatistics.java @@ -28,8 +28,8 @@ import static com.google.common.base.Verify.verify; import static io.trino.tests.product.hive.util.TableLocationUtils.getTableLocation; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -294,7 +294,7 @@ public void testInsertPartitioned() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testInsertBucketed() { String tableName = "test_basic_statistics_bucketed_insert_presto"; diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBucketedTables.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBucketedTables.java index cbbca6dbc0de..a1e4429d1ced 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBucketedTables.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveBucketedTables.java @@ -50,8 +50,8 @@ import static io.trino.tests.product.hive.BucketingType.BUCKETED_V1; import static io.trino.tests.product.hive.BucketingType.BUCKETED_V2; import static io.trino.tests.product.hive.util.TemporaryHiveTable.temporaryHiveTable; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static io.trino.tests.product.utils.TableDefinitionUtils.mutableTableInstanceOf; @@ -107,7 +107,7 @@ public Requirement getRequirements(Configuration configuration) } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectStar() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); @@ -117,7 +117,7 @@ public void testSelectStar() } @Test(groups = LARGE_QUERY) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testIgnorePartitionBucketingIfNotBucketed() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); @@ -135,7 +135,7 @@ public void testIgnorePartitionBucketingIfNotBucketed() } @Test(groups = LARGE_QUERY) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testAllowMultipleFilesPerBucket() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); @@ -152,7 +152,7 @@ public void testAllowMultipleFilesPerBucket() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectAfterMultipleInserts() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); @@ -170,7 +170,7 @@ public void testSelectAfterMultipleInserts() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectAfterMultipleInsertsForSortedTable() { String tableName = mutableTableInstanceOf(BUCKETED_SORTED_NATION).getNameInDatabase(); @@ -188,7 +188,7 @@ public void testSelectAfterMultipleInsertsForSortedTable() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectAfterMultipleInsertsForPartitionedTable() { String tableName = mutableTableInstanceOf(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); @@ -213,7 +213,7 @@ public void testSelectAfterMultipleInsertsForPartitionedTable() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectFromEmptyBucketedTableEmptyTablesAllowed() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); @@ -222,7 +222,7 @@ public void testSelectFromEmptyBucketedTableEmptyTablesAllowed() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectFromIncompleteBucketedTableEmptyTablesAllowed() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); @@ -235,7 +235,7 @@ public void testSelectFromIncompleteBucketedTableEmptyTablesAllowed() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testInsertPartitionedBucketed() { String tableName = mutableTablesState().get(BUCKETED_NATION_PREPARED).getNameInDatabase(); @@ -251,7 +251,7 @@ public void testInsertPartitionedBucketed() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreatePartitionedBucketedTableAsSelect() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); @@ -265,7 +265,7 @@ public void testCreatePartitionedBucketedTableAsSelect() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testInsertIntoBucketedTables() { String tableName = mutableTablesState().get(BUCKETED_NATION).getNameInDatabase(); @@ -279,7 +279,7 @@ public void testInsertIntoBucketedTables() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreateBucketedTableAsSelect() { String tableName = mutableTablesState().get(BUCKETED_NATION_PREPARED).getNameInDatabase(); @@ -292,7 +292,7 @@ public void testCreateBucketedTableAsSelect() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testBucketingVersion() { String value = "Trino rocks"; @@ -314,7 +314,7 @@ public void testBucketingVersion() } @Test(dataProvider = "testBucketingWithUnsupportedDataTypesDataProvider") - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testBucketingWithUnsupportedDataTypes(BucketingType bucketingType, String columnToBeBucketed) { try (TemporaryHiveTable table = temporaryHiveTable("table_with_unsupported_bucketing_types_" + randomNameSuffix())) { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCreateTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCreateTable.java index 40ded126a5cf..de3a28aa47f6 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCreateTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCreateTable.java @@ -28,8 +28,8 @@ import static io.trino.tests.product.TestGroups.HDP3_ONLY; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.TestGroups.STORAGE_FORMATS; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.util.Objects.requireNonNull; @@ -38,7 +38,7 @@ public class TestHiveCreateTable extends ProductTest { @Test(groups = STORAGE_FORMATS) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreateTable() throws SQLException { @@ -59,7 +59,7 @@ public void testCreateTable() } @Test(groups = STORAGE_FORMATS) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testCreateTableAsSelect() throws SQLException { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionProcedures.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionProcedures.java index 5fb4ba6c2a23..df2c1ba13651 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionProcedures.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionProcedures.java @@ -32,8 +32,8 @@ import static io.trino.tests.product.TestGroups.HIVE_PARTITIONING; import static io.trino.tests.product.TestGroups.SMOKE; import static io.trino.tests.product.hive.util.TableLocationUtils.getTablePath; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; @@ -53,7 +53,7 @@ public class TestHivePartitionProcedures private HdfsDataSourceWriter hdfsDataSourceWriter; @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterPartition() throws URISyntaxException { @@ -72,7 +72,7 @@ public void testUnregisterPartition() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterViewTableShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -83,7 +83,7 @@ public void testUnregisterViewTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterMissingTableShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -93,7 +93,7 @@ public void testUnregisterMissingTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterUnpartitionedTableShouldFail() { createUnpartitionedTable(SECOND_TABLE); @@ -103,7 +103,7 @@ public void testUnregisterUnpartitionedTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterInvalidPartitionColumnsShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -113,7 +113,7 @@ public void testUnregisterInvalidPartitionColumnsShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUnregisterMissingPartitionShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -123,7 +123,7 @@ public void testUnregisterMissingPartitionShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionMissingTableShouldFail() { QueryAssert.assertQueryFailure(() -> addPartition("missing_table", "col", "f", "/")) @@ -131,7 +131,7 @@ public void testRegisterPartitionMissingTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterUnpartitionedTableShouldFail() { createUnpartitionedTable(SECOND_TABLE); @@ -141,7 +141,7 @@ public void testRegisterUnpartitionedTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterViewTableShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -152,7 +152,7 @@ public void testRegisterViewTableShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionCollisionShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -162,7 +162,7 @@ public void testRegisterPartitionCollisionShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionInvalidPartitionColumnsShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -172,7 +172,7 @@ public void testRegisterPartitionInvalidPartitionColumnsShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionInvalidLocationShouldFail() { createPartitionedTable(FIRST_TABLE); @@ -182,7 +182,7 @@ public void testRegisterPartitionInvalidLocationShouldFail() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionWithDefaultPartitionLocation() { createPartitionedTable(FIRST_TABLE); @@ -200,7 +200,7 @@ public void testRegisterPartitionWithDefaultPartitionLocation() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartition() throws URISyntaxException { @@ -221,7 +221,7 @@ public void testRegisterPartition() } @Test(groups = {HIVE_PARTITIONING, SMOKE}) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testRegisterPartitionFromAnyLocation() { createPartitionedTable(FIRST_TABLE); diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionsTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionsTable.java index 0fe1e17c888a..95bbf1f6c17f 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionsTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHivePartitionsTable.java @@ -43,8 +43,8 @@ import static io.trino.tempto.fulfillment.table.hive.InlineDataSource.createStringDataSource; import static io.trino.tempto.fulfillment.table.hive.tpch.TpchTableDefinitions.NATION; import static io.trino.tests.product.TestGroups.HIVE_PARTITIONING; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.Math.min; import static java.lang.String.format; @@ -103,7 +103,7 @@ private static TableDefinition partitionedTableWithVariablePartitionsDefinition( } @Test(groups = HIVE_PARTITIONING) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testShowPartitionsFromHiveTable() { String tableNameInDatabase = tablesState.get(PARTITIONED_TABLE).getNameInDatabase(); @@ -126,7 +126,7 @@ public void testShowPartitionsFromHiveTable() } @Test(groups = HIVE_PARTITIONING) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testShowPartitionsFromUnpartitionedTable() { assertQueryFailure(() -> onTrino().executeQuery("SELECT * FROM \"nation$partitions\"")) @@ -134,7 +134,7 @@ public void testShowPartitionsFromUnpartitionedTable() } @Test(groups = HIVE_PARTITIONING) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testShowPartitionsFromHiveTableWithTooManyPartitions() { String tableName = tablesState.get(PARTITIONED_TABLE_WITH_VARIABLE_PARTITIONS).getNameInDatabase(); diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveStorageFormats.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveStorageFormats.java index a97c2ddd1056..277f962f5db3 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveStorageFormats.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveStorageFormats.java @@ -66,8 +66,8 @@ import static io.trino.tests.product.TestGroups.HMS_ONLY; import static io.trino.tests.product.TestGroups.STORAGE_FORMATS; import static io.trino.tests.product.TestGroups.STORAGE_FORMATS_DETAILED; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.JdbcDriverUtils.setSessionProperty; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; @@ -459,7 +459,7 @@ public void testInsertAndSelectWithNullFormat(StorageFormat storageFormat) } @Test(dataProvider = "storageFormatsWithZeroByteFile", groups = STORAGE_FORMATS_DETAILED) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectFromZeroByteFile(StorageFormat storageFormat) { String tableName = format( @@ -480,7 +480,7 @@ public void testSelectFromZeroByteFile(StorageFormat storageFormat) } @Test(dataProvider = "storageFormatsWithNullFormat", groups = STORAGE_FORMATS_DETAILED) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testSelectWithNullFormat(StorageFormat storageFormat) { String nullFormat = "null_value"; @@ -638,7 +638,7 @@ private void testNestedFields(String format, Engine writer) } @Test(groups = STORAGE_FORMATS_DETAILED) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testOrcStructsWithNonLowercaseFields() throws SQLException { @@ -675,7 +675,7 @@ public void testOrcStructsWithNonLowercaseFields() } @Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = STORAGE_FORMATS_DETAILED) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testTimestampCreatedFromHive(StorageFormat storageFormat) { String tableName = createSimpleTimestampTable("timestamps_from_hive", storageFormat); @@ -709,7 +709,7 @@ public void testTimestampCreatedFromTrino(StorageFormat storageFormat) } @Test(dataProvider = "storageFormatsWithNanosecondPrecision", groups = STORAGE_FORMATS_DETAILED) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testStructTimestampsFromHive(StorageFormat format) { String tableName = createStructTimestampTable("hive_struct_timestamp", format); diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTableStatistics.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTableStatistics.java index 3ea4a96f03a9..19f8484bcfb5 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTableStatistics.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTableStatistics.java @@ -39,8 +39,8 @@ import static io.trino.tests.product.hive.AllSimpleTypesTableDefinitions.ALL_HIVE_SIMPLE_TYPES_TEXTFILE; import static io.trino.tests.product.hive.HiveTableDefinitions.NATION_PARTITIONED_BY_BIGINT_REGIONKEY; import static io.trino.tests.product.hive.HiveTableDefinitions.NATION_PARTITIONED_BY_VARCHAR_REGIONKEY; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -1377,7 +1377,7 @@ public void testComputeStatisticsForTableWithOnlyDateColumns() } @Test - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testMixedHiveAndPrestoStatistics() { String tableName = "test_mixed_hive_and_presto_statistics"; diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTransactionalTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTransactionalTable.java index 8c91d8f9a0d4..0fc694d26835 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTransactionalTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveTransactionalTable.java @@ -66,8 +66,8 @@ import static io.trino.tests.product.hive.TransactionalTableType.ACID; import static io.trino.tests.product.hive.TransactionalTableType.INSERT_ONLY; import static io.trino.tests.product.hive.util.TableLocationUtils.getTablePath; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE; -import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES; +import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static java.lang.String.format; @@ -325,7 +325,7 @@ public void testReadFullAcidWithOriginalFiles(boolean isPartitioned, BucketingTy } @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider", timeOut = TEST_TIMEOUT) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUpdateFullAcidWithOriginalFilesTrinoInserting(boolean isPartitioned, BucketingType bucketingType) { withTemporaryTable("trino_update_full_acid_acid_converted_table_read", true, isPartitioned, bucketingType, tableName -> { @@ -375,7 +375,7 @@ public void testUpdateFullAcidWithOriginalFilesTrinoInserting(boolean isPartitio } @Test(groups = {STORAGE_FORMATS, HIVE_TRANSACTIONAL}, dataProvider = "partitioningAndBucketingTypeDataProvider", timeOut = TEST_TIMEOUT) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testUpdateFullAcidWithOriginalFilesTrinoInsertingAndDeleting(boolean isPartitioned, BucketingType bucketingType) { withTemporaryTable("trino_update_full_acid_acid_converted_table_read", true, isPartitioned, bucketingType, tableName -> { @@ -813,7 +813,7 @@ public void testPartitionedInsertAndRowLevelDelete(Engine inserter, Engine delet } @Test(groups = HIVE_TRANSACTIONAL, dataProvider = "inserterAndDeleterProvider", timeOut = TEST_TIMEOUT) - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) public void testBucketedPartitionedDelete(Engine inserter, Engine deleter) { withTemporaryTable("bucketed_partitioned_delete", true, true, NONE, tableName -> { @@ -1611,7 +1611,7 @@ public void testAcidUpdateMultipleDuplicateValues() }); } - @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + @Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH) @Test(groups = HIVE_TRANSACTIONAL, timeOut = TEST_TIMEOUT) public void testInsertDeleteUpdateWithTrinoAndHive() { diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/utils/HadoopTestUtils.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/utils/HadoopTestUtils.java index ef2bf097fbd2..16b00ca4859f 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/utils/HadoopTestUtils.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/utils/HadoopTestUtils.java @@ -33,14 +33,21 @@ private HadoopTestUtils() {} @Language("RegExp") public static final String ERROR_READING_FROM_HIVE_MATCH = "FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask"; - public static final String ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE = "https://github.com/trinodb/trino/issues/4936"; + /** + * Link to issues: + * + */ + public static final String RETRYABLE_FAILURES_ISSUES = "https://github.com/trinodb/trino/issues?q=is%3Aissue+issue%3A+4936"; @Language("RegExp") - public static final String ERROR_COMMITTING_WRITE_TO_HIVE_MATCH = + public static final String RETRYABLE_FAILURES_MATCH = // "Error committing write to Hive" is present depending on when the exception is thrown. // It may be absent when the underlying problem manifest earlier (e.g. during RecordFileWriter.appendRow vs RecordFileWriter.commit). - - // "could only be written to 0 of the 1 minReplication" is the error wording used by e.g. HDP 3 - "(could only be replicated to 0 nodes instead of minReplication|could only be written to 0 of the 1 minReplication)"; + "(could only be replicated to 0 nodes instead of minReplication" + + // "could only be written to 0 of the 1 minReplication" is the error wording used by e.g. HDP 3 + "|could only be written to 0 of the 1 minReplication" + + ")"; public static final RetryPolicy ERROR_COMMITTING_WRITE_TO_HIVE_RETRY_POLICY = new RetryPolicy() .handleIf(HadoopTestUtils::isErrorCommittingToHive) @@ -50,7 +57,7 @@ private HadoopTestUtils() {} private static boolean isErrorCommittingToHive(Throwable throwable) { - return Pattern.compile(ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) + return Pattern.compile(RETRYABLE_FAILURES_MATCH) .matcher(Throwables.getStackTraceAsString(throwable)) .find(); }